# -*- coding: utf-8 -*- import numpy as np import matplotlib.pyplot as plt from mpl_toolkits.axes_grid.axislines import Subplot def get_mean_std(data, size): s = data[np.where(data[:,0] == size)][:,1] # remove 2x std outliers std = np.std(s) outliers = s[np.abs(s - np.mean(s)) > 4 * np.std(s)] s = s[np.abs(s - np.mean(s)) < 4 * np.std(s)] return np.mean(s), np.std(s) def read_data(fname): data = np.loadtxt(fname) ymean = [] ystd = [] for i in range(128, 4096 + 128, 128): m, s = get_mean_std(data, i) ymean.append(m) ystd.append(s) return ymean, ystd xs = np.arange(128, 4096 + 128, 128) dgma_cpu_mean, dgma_cpu_std = read_data('ipedirectgma.cpu.txt') #dgma_gpu_mean, dgma_gpu_std = read_data('ipedirectgma.gpu.txt') cam2_cpu_mean, cam2_cpu_std = read_data('ipecamera2.cpu.txt') #cam2_gpu_mean, cam2_gpu_std = read_data('ipecamera2.gpu.txt') print dgma_cpu_mean[8] print cam2_cpu_mean[8] #plt.rc('font', **dict(family='serif')) fig = plt.figure(1, (4,3)) ax = Subplot(fig, 111) fig.add_subplot(ax) plt.xlabel('Packet size (B)') plt.ylabel('Latency (us)') #ax.plot(xs, dgma_gpu_mean, 'x-', label='GPU memory (embedded)', color='#3b5b92') #ax.plot(xs, cam2_gpu_mean, '.-', label='GPU memory (workstation)', color='#3b5b92') ax.plot(xs, cam2_cpu_mean, '.-', markersize=4, label='Setup1', color='#028e2c') ax.plot(xs, dgma_cpu_mean, 'x-', markersize=4, label='Setup2', color='#77DD77') plt.xticks([128, 1024, 2048, 2048+1024, 4096]) # plt.yticks([2,4,6,8]) plt.xlim(0, 4200) ax.axis["right"].set_visible(False) ax.axis["top"].set_visible(False) plt.legend(loc='upper left', frameon=False) plt.savefig('latency-cpu.pdf', dpi=300, bbox_inches='tight') # A = np.vstack([xs, np.ones(len(xs))]).T # mc, cc = np.linalg.lstsq(A, yscm)[0] # mg, cg = np.linalg.lstsq(A, ysgm)[0] # print 100000000/ (100000000 * mc + cc) # print 100000000/ (100000000 * mg + cg)