process4.py

187 lines | 6.565 kB Blame History Raw Download
import numpy as np
import pickle
from datetime import tzinfo, timedelta, datetime
from bokeh.layouts import gridplot, widgetbox
from bokeh.plotting import figure, show, output_file
from bokeh.sampledata.stocks import AAPL, GOOG, IBM, MSFT
from bokeh.charts import Step
from bokeh.models import LinearAxis, Range1d, Span, Label
from bokeh.models.widgets import Paragraph
from bokeh.io import reset_output
import json
import glob




def generateReport(jsonFileName):
	f = open(jsonFileName,"rb")
	data = json.load(f)
	f.close()

	pwidth = data.get('pwidth',400)
	pheight = data.get('pheight',500)
	perfMonFile=data.get('perfMonFile','')
	dataFile = data.get('locustFile','')


	perfMon = {'timestamp':[],'anonUsers':[]}
	if perfMonFile != "":
		f = open(perfMonFile, 'r')
		l = f.readline()
		l = f.readline()

		while l:
			v = l.split(",")
			if len(v) >= 13:
				timeStr = v[0].replace("\"",'')
				myTime = datetime.strptime(timeStr, "%m/%d/%Y %H:%M:%S.%f") + timedelta(hours=7)
				#myTime.replace(tzinfo=FixedOffset(-7, 'PDT'))
				val = float(v[12].replace("\"",''))
				perfMon['timestamp'].append((myTime - datetime(1970,1,1)).total_seconds())
				perfMon['anonUsers'].append(val)
			l = f.readline()
		f.close()

	throughput = {'time':[], 'rate':[], 'mean':[], 'median':[]}
	req_success = {'timestamp':[],'req_len':[],'req_duration':[], 'mean':[], 'median':[], 'rate':[]}
	downloads = {'time':[], 'downloads':[]}

	if dataFile != "":
		f = open(dataFile, 'rb')
		results = pickle.load(f)
		f.close()
		print len(results['req_errors'])

		for r in results['throughput']:
			throughput['time'].append(r['time'])
			throughput['rate'].append(r['rate']/1024.0)
		if len(throughput['time'])>1:
			throughput['time'].pop(0)
			throughput['rate'].pop(0)

		for r in results['req_success']:
			req_success['req_duration'].append(r['time']/1000.0)
			req_success['req_len'].append(r['len']/1024.0)
			req_success['timestamp'].append(r['timestamp'])
			req_success['rate'].append(req_success['req_len'][-1] / req_success['req_duration'][-1])


		if len(results['downloads']) > 0:
			for i in range(len(results['downloads'])):
				r = results['downloads'][i]
				downloads['time'].append(r['time'])
				downloads['downloads'].append(r['downloads'])
				if i < len(results['downloads'])-1:
					downloads['time'].append(results['downloads'][i+1]['time'])
					downloads['downloads'].append(r['downloads'])
			downloads['time'].append(max(req_success['timestamp']))
			downloads['downloads'].append(results['downloads'][-1]['downloads'])

	minTime = 9999999999999
	if len(perfMon['timestamp'] )>0:
		minTime = min(perfMon['timestamp'])
	if len(throughput['time'])>0:
		minTime = min(minTime, min(throughput['time']))
	if len(req_success['timestamp'])>0:
		minTime = min(minTime, min(req_success['timestamp']))
	if len(downloads['time'])>0:
		minTime = min(minTime, min(downloads['time']))

	#shift times
	for i in range(len(perfMon['timestamp'])):
		perfMon['timestamp'][i] -= minTime
	for r in range(len(throughput['time'])):
		throughput['time'][r] -= minTime
	for r in range(len(downloads['time'])):
		downloads['time'][r]-= minTime
	for r in range(len(req_success['timestamp'])):
		req_success['timestamp'][r] -= minTime


	row1 = []
	kwargs={'plot_width':pwidth,'plot_height':pheight}
	if len(req_success['timestamp'])>0:
		# s5 = figure( x_axis_label='Size (KB)', y_axis_label='Duration (sec)', **kwargs)
		# s5.circle(req_success['req_len'],req_success['req_duration'])


		s1 = figure( y_axis_label='Duration (sec)', title='Request Duration', x_axis_label='Test Time (s)',**kwargs)
		
		for i in range(1, len(req_success['req_duration'])):
			mean = np.mean(req_success['req_duration'][0:i])
			req_success['mean'].append(mean)
			median = np.median(req_success['req_duration'][0:i])
			req_success['median'].append(median)

		#mean = np.mean(req_success['req_duration'])
		#median = np.median(req_success['req_duration'])
		#meanSpan = Span(location=mean, dimension='width', line_color='green',line_dash='dashed',line_width=1, name="Mean")
		#meanLabel = Label(x=s1.x_range.start, y=mean, text="Mean")
		#medianSpan = Span(location=median, dimension='width', line_color='blue',line_dash='dashed',line_width=2, name="Median")
		#s1.add_layout(meanSpan)
		#s1.add_layout(meanLabel)
		#s1.add_layout(medianSpan)

		s1.circle(req_success['timestamp'],req_success['req_duration'], size=4, legend='Request Duration', color='darkgrey', alpha=0.2)
		s1.line(req_success['timestamp'],req_success['mean'], legend='Mean', line_width=2, line_color="green", line_dash='dotted')
		s1.line(req_success['timestamp'],req_success['median'], legend='Median', line_width=2, line_color="blue", line_dash='dotted')
		if not kwargs.has_key('x_range'): 
			kwargs['x_range']=s1.x_range

		s5 = figure(y_axis_label='Request Bytes/Sec', x_axis_label='Test Time(s)', **kwargs)
		s5.circle(req_success['timestamp'], req_success['rate'], size=4, legend='Request Rate', color='darkgrey', alpha=0.2)


		row1.append(s1)
		row1.append(s5)

	if len(perfMon['timestamp'])>0:	
		s2 = figure(y_axis_label="Users", title='Connected Anonymous Users', x_axis_label='Test Time (s)', **kwargs)
		s2.line(perfMon['timestamp'],perfMon['anonUsers'])
		if not kwargs.has_key('x_range'): 
			kwargs['x_range']=s2.x_range
		row1.append(s2)

	row2=[]
	if len(downloads['time'])>0:
		s3 = figure(y_axis_label='# Downloads', title='Concurrent Downloads', x_axis_label='Test Time (s)', **kwargs)
		s3.line(downloads['time'],downloads['downloads'])
		if not kwargs.has_key('x_range'): 
			kwargs['x_range']=s3.x_range
		row2.append(s3)

	if len(throughput['time'])>0:
		for i in range(1, len(throughput['rate'])):
			mean = np.mean(throughput['rate'][0:i])
			throughput['mean'].append(mean)
			median = np.median(throughput['rate'][0:i])
			throughput['median'].append(median)

		s4 = figure(y_axis_label='Download Rate (KB/s)', title='Throughput', x_axis_label='Test Time (s)',**kwargs)
		s4.line(throughput['time'],throughput['rate'])
		#s4.line(throughput['time'],throughput['mean'], legend='Mean', line_width=2, line_color="green", line_dash='dotted')
		#s4.line(throughput['time'],throughput['median'], legend='Median', line_width=2, line_color="blue", line_dash='dotted')
		if not kwargs.has_key('x_range'): 
			kwargs['x_range']=s4.x_range
		row2.append(s4)

	row3 = []
	p = Paragraph(text=data['description'],width=pwidth,height=pheight)
	row3.append(p)

	grid = []
	if len(row1)>0: grid.append(row1)
	if len(row2)>0: grid.append(row2)
	grid.append(row3)

	print "Will save"
	output_file(data["outFile"], title=data["title"])

	show(gridplot(grid))  # open a browser
	reset_output()


jsons = glob.glob('../results/P11*.json')
for j in jsons:
	generateReport(j)