|
@@ -1,380 +0,0 @@
|
|
|
-
|
|
|
-from scipy.optimize import curve_fit
|
|
|
-from scipy.stats import chisquare
|
|
|
-from scipy.stats import chi2_contingency
|
|
|
-from scipy.linalg import solve
|
|
|
-
|
|
|
-
|
|
|
-import numpy as np
|
|
|
-import os
|
|
|
-
|
|
|
-import warnings
|
|
|
-with warnings.catch_warnings():
|
|
|
- warnings.filterwarnings("ignore",category=FutureWarning)
|
|
|
-import h5py
|
|
|
-
|
|
|
-
|
|
|
-from time import time, sleep
|
|
|
-import psutil
|
|
|
-
|
|
|
-import pycuda.driver as cuda
|
|
|
-import pycuda.autoinit
|
|
|
-from pycuda.compiler import SourceModule
|
|
|
-import pycuda.gpuarray as gpuarray
|
|
|
-
|
|
|
-
|
|
|
-import filehelper as fh
|
|
|
-from PeakReconstuctionPlot import plot, plotFFTFile
|
|
|
-
|
|
|
-
|
|
|
-import scipy.constants
|
|
|
-class ANKA:
|
|
|
- circumference = 110.4 # Meter
|
|
|
- h = 184
|
|
|
- frev = scipy.constants.c / circumference
|
|
|
- trev = 1 / frev
|
|
|
-
|
|
|
- frf = frev * h
|
|
|
- trf = 1 / frf
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
-mod = SourceModule(open(os.path.join(os.path.dirname(__file__), "kernel.c"), "r").read())
|
|
|
-
|
|
|
-def analyze_file(filename, calibrationData=None, verbose=False, bucket=-1, outDir="", plotdir="", hdf=False, newWaterfall=False, waterfallfile=""):
|
|
|
- print('=========================')
|
|
|
- if verbose: print('=========================')
|
|
|
- print(filename)
|
|
|
- np.set_printoptions(precision=4)
|
|
|
- np.set_printoptions(suppress=True)
|
|
|
-
|
|
|
- starttot = time()
|
|
|
- starttime = time()
|
|
|
-
|
|
|
- file = np.memmap(filename, dtype='uint32',mode='r')
|
|
|
- start, stop, header = fh.read_file_specs(file, verbose)
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
- n = int((stop - start)*2//4)
|
|
|
-
|
|
|
- if verbose: print(start, stop)
|
|
|
- if verbose: print(n)
|
|
|
- if verbose: print("Has Header: ", header!=None)
|
|
|
-
|
|
|
- if header is not None:
|
|
|
- datax = header['fine_delay_adc']
|
|
|
- skip_turns = header['skip_turns']
|
|
|
- delay = header['delay_th']
|
|
|
- if verbose:
|
|
|
- print('datax', datax)
|
|
|
- print('delay_th', delay)
|
|
|
- else:
|
|
|
- datax = np.array([9,33,48,84])/3
|
|
|
- skip_turns = 10
|
|
|
- delay= 3
|
|
|
-
|
|
|
- threads = 1024
|
|
|
- blocks = 2147483647
|
|
|
-
|
|
|
- ngrid = (n//2)//threads
|
|
|
-
|
|
|
-
|
|
|
- if verbose: print('set up GPU')
|
|
|
-
|
|
|
-
|
|
|
- out = np.zeros(int(n*5), dtype=np.float32)
|
|
|
- gpu_out = cuda.mem_alloc(out.size*out.dtype.itemsize)
|
|
|
- #cuda.memset_d32(gpu_out, 0, int(n*5))
|
|
|
- data = np.array(file[start:stop], dtype=np.int32)
|
|
|
- gpu_in = cuda.mem_alloc(data.size * data.dtype.itemsize)
|
|
|
-
|
|
|
- cuda.memcpy_htod(gpu_in, data)
|
|
|
-
|
|
|
- baseline = np.zeros(n, dtype=np.float64)
|
|
|
- calibration = np.array([[0,0,0,0],[1,1,1,1]], dtype=np.float32)
|
|
|
- if calibrationData is not None:
|
|
|
- for i, item in enumerate(datax):
|
|
|
- calibration[0][i] = calibrationData[i][delay][int(item)][0]
|
|
|
- calibration[1][i] = calibrationData[i][delay][int(item)][1]
|
|
|
- pass
|
|
|
- calibration = calibration.reshape(4*2)
|
|
|
-
|
|
|
- if verbose:
|
|
|
- print('Calibration ', calibration)
|
|
|
- print('bucket ', bucket)
|
|
|
-
|
|
|
-
|
|
|
- if verbose: print('load {} datapoints in {} threads on {} Blocks = {}'.format(n, threads, ngrid, threads*ngrid*2))
|
|
|
- func = mod.get_function('load')
|
|
|
- func(gpu_in, gpu_out, cuda.Out(baseline), cuda.In(calibration), block=(threads,1,1), grid=(ngrid,1))
|
|
|
-
|
|
|
- gpu_in.free()
|
|
|
- gpu_in = gpu_out
|
|
|
- #cuda.memcpy_dtoh(out, gpu_out)
|
|
|
- #print(out.reshape(n,5)[0:200])
|
|
|
-
|
|
|
- baseline = np.sum(baseline)/np.sum(baseline != 0)
|
|
|
-
|
|
|
-
|
|
|
- end = time()
|
|
|
- if verbose:
|
|
|
- print('=========================')
|
|
|
- print('Data Read in {}sec'.format(end-starttime))
|
|
|
- print('baseline {}'.format(baseline))
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
- start = time()
|
|
|
- ngrid = n//threads
|
|
|
-
|
|
|
- datax = np.array(datax*3, dtype=np.float32)
|
|
|
-
|
|
|
- x = np.array([datax**2, datax, np.ones(len(datax))], dtype=np.float32).T
|
|
|
- X = (np.linalg.inv((x.T).dot(x)).dot(x.T)).astype(np.float32)
|
|
|
-
|
|
|
-
|
|
|
- gpu_matrix = X.reshape(4*3)
|
|
|
- result = np.array(np.zeros(n*5), dtype=np.float32)
|
|
|
-
|
|
|
- gpu_out = cuda.mem_alloc(result.size * result.dtype.itemsize)
|
|
|
-
|
|
|
- bucket = np.int32(bucket)
|
|
|
- if verbose:
|
|
|
- print('calculate {} datapoints in {} threads on {} Blocks = {}'.format(n, threads, ngrid, threads*ngrid))
|
|
|
- func = mod.get_function('calculate')
|
|
|
- func(gpu_in, cuda.In(gpu_matrix), gpu_out, cuda.In(datax), cuda.In(baseline), bucket, block=(threads,1,1), grid=(ngrid,1))
|
|
|
-
|
|
|
- cuda.memcpy_dtoh(result, gpu_out)
|
|
|
-
|
|
|
-
|
|
|
- end = time()
|
|
|
- if verbose: print('CPU sort')
|
|
|
-
|
|
|
- result = result.reshape(n, 5)
|
|
|
-
|
|
|
- #print(result[:200])
|
|
|
-
|
|
|
- gpu_in.free()
|
|
|
- gpu_out.free()
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
- if verbose:
|
|
|
- print('=========================')
|
|
|
- print('GPU time in {}sec'.format(end-start))
|
|
|
-
|
|
|
-
|
|
|
- end = time()
|
|
|
- if verbose:
|
|
|
- print('=========================')
|
|
|
- print('Analyzed in {}sec = {}µs/peak'.format(end-start, (end-start)*1000000/len(result)))
|
|
|
- print('saveing')
|
|
|
-
|
|
|
- if outDir != "":
|
|
|
- x = filename.rfind(os.path.sep)
|
|
|
- filename = os.path.join(outDir, filename[x+1:])
|
|
|
-
|
|
|
-
|
|
|
- if bucket == -1:
|
|
|
- savedat = np.resize(result, (n//184, 184, 5))
|
|
|
- else:
|
|
|
- savedat = np.array(result[bucket::184], dtype=np.float32)
|
|
|
-
|
|
|
-
|
|
|
- outfile = filename[:-4] + "_fit"
|
|
|
- if hdf:
|
|
|
- with h5py.File(outfile+".hdf", 'w') as f:
|
|
|
- grp = f.create_group('header')
|
|
|
- for k in header.keys():
|
|
|
- grp[k] = header[k]
|
|
|
- grp2 = f.create_group('fit_info')
|
|
|
- grp2['baseline'] = baseline
|
|
|
- grp2['calibration'] = calibration
|
|
|
- grp2['bucket'] = bucket
|
|
|
- dataset = f.create_dataset('dataset', data=savedat, compression="gzip")
|
|
|
- else:
|
|
|
- np.save(outfile+".npy", savedat)
|
|
|
- outfile2 = filename[:-4] + "_fit_info." + "npy"
|
|
|
- info = dict(baseline=baseline, skip_turns=skip_turns, delay=datax, delayth=delay, bucket=bucket, calibration=calibration)
|
|
|
- np.save(outfile2, info)
|
|
|
-
|
|
|
-
|
|
|
- end = time()
|
|
|
-
|
|
|
- if verbose: print('=========================')
|
|
|
- print('Total {:.2f}sec = {}µs/peak'.format((end-starttot), (end-starttot)*1000000/len(result)))
|
|
|
- print('{:10} Datasets Analyzed'.format(len(result)))
|
|
|
- if bucket == -1:
|
|
|
- print('{:10} Datasets stored'.format(len(result)*184))
|
|
|
- print('{:10} good'.format(len(savedat)))
|
|
|
- print('{:8.2f} baseline'.format(baseline))
|
|
|
- print("Results in " + outfile)
|
|
|
-
|
|
|
-
|
|
|
- ########################################################################################
|
|
|
- ### Plotting
|
|
|
-
|
|
|
- if plotdir != "" and bucket != -1:
|
|
|
- print('Plotting to ' + plotdir)
|
|
|
- start = time()
|
|
|
- x = filename.rfind(os.path.sep)
|
|
|
- title = filename[x+1:-4]
|
|
|
- filename = os.path.join(plotdir, filename[x+1:-4])
|
|
|
- plot(savedat, title, filename, skip_turns)
|
|
|
-
|
|
|
- print('Plot done {:.2f}sec'.format((time()-start)))
|
|
|
-
|
|
|
-
|
|
|
- if waterfallfile != "":
|
|
|
- print("FFT start")
|
|
|
- start = time()
|
|
|
- generateFFTfile(savedat, waterfallfile, skip_turns, new=newWaterfall)
|
|
|
- #title = waterfallfile
|
|
|
- #waterfall(savedat, title, waterfallfile, skip_turns, append=appendWaterfall, generatePlot=dowaterfall, lim=(0,120000))
|
|
|
- print('FFT done {:.2f}sec'.format((time()-start)))
|
|
|
-
|
|
|
-
|
|
|
-# 8888888888 8888888888 88888888888 8888888888 d8b 888
|
|
|
-# 888 888 888 888 Y8P 888
|
|
|
-# 888 888 888 888 888
|
|
|
-# 8888888 8888888 888 8888888 888 888 .d88b.
|
|
|
-# 888 888 888 888 888 888 d8P Y8b
|
|
|
-# 888 888 888 888 888 888 88888888
|
|
|
-# 888 888 888 888 888 888 Y8b.
|
|
|
-# 888 888 888 888 888 888 "Y8888
|
|
|
-#
|
|
|
-#
|
|
|
-#
|
|
|
-def generateFFTfile(data, filename, infos, skippedTurns=0, lim=(0,120000), new=False):
|
|
|
-
|
|
|
- if not os.path.isfile(filename+ ".hdf"):
|
|
|
- new = True
|
|
|
-
|
|
|
- if new:
|
|
|
- f = h5py.File(filename+".hdf", 'w')
|
|
|
- else:
|
|
|
- f = h5py.File(filename+".hdf", 'a')
|
|
|
-
|
|
|
-
|
|
|
- fftfreq = np.fft.rfftfreq(data[:, 1].shape[0], ANKA.trev * (skippedTurns+1))
|
|
|
- f1 = np.abs(np.fft.rfft(data[:, 3]))
|
|
|
- f2 = np.abs(np.fft.rfft(data[:, 2]))
|
|
|
- f3 = np.abs(np.fft.rfft(data[:, 1]))
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
- if new:
|
|
|
- imin = np.where(fftfreq > lim[0])[0][0]
|
|
|
- imax = np.where(fftfreq < lim[1])[0][-1]
|
|
|
-
|
|
|
-
|
|
|
- f['indexctr'] = [0]
|
|
|
- grp = f.create_group('infos')
|
|
|
- grp['limHz'] = lim
|
|
|
- grp['limIndex'] = (imin, imax)
|
|
|
-
|
|
|
- grp.create_dataset('fftfreq', data=fftfreq[imin:imax], compression="gzip")
|
|
|
- g1 = f.create_group('Amplitude')
|
|
|
- g2 = f.create_group('Schwerpunkt')
|
|
|
- g3 = f.create_group('Breite')
|
|
|
- else:
|
|
|
- grp = f['infos']
|
|
|
- g1 = f['Amplitude']
|
|
|
- g2 = f['Schwerpunkt']
|
|
|
- g3 = f['Breite']
|
|
|
- imin = grp['limIndex'][0]
|
|
|
- imax = grp['limIndex'][1]
|
|
|
-
|
|
|
- indexctr = f['indexctr']
|
|
|
- ctr = indexctr[0]
|
|
|
-
|
|
|
- print('ctr', str(ctr))
|
|
|
-
|
|
|
- f1 = f1[imin:imax]
|
|
|
- f2 = f2[imin:imax]
|
|
|
- f3 = f3[imin:imax]
|
|
|
-
|
|
|
-
|
|
|
- g1.create_dataset(str(ctr), data=f1, compression="gzip")
|
|
|
- g2.create_dataset(str(ctr), data=f2, compression="gzip")
|
|
|
- g3.create_dataset(str(ctr), data=f3, compression="gzip")
|
|
|
- ctr += 1
|
|
|
- indexctr[0] = ctr
|
|
|
-
|
|
|
- f.close()
|
|
|
-
|
|
|
-# d8b
|
|
|
-# Y8P
|
|
|
-#
|
|
|
-# 88888b.d88b. 8888b. 888 88888b.
|
|
|
-# 888 "888 "88b "88b 888 888 "88b
|
|
|
-# 888 888 888 .d888888 888 888 888
|
|
|
-# 888 888 888 888 888 888 888 888
|
|
|
-# 888 888 888 "Y888888 888 888 888
|
|
|
-
|
|
|
-if __name__ == '__main__':
|
|
|
- import argparse as ap
|
|
|
-
|
|
|
- parser = ap.ArgumentParser("Sequence Skript Converter")
|
|
|
- parser.add_argument('-f', type=str, default="", help="Set input file")
|
|
|
- parser.add_argument('-d', type=str, default="", help="Set input directory")
|
|
|
- parser.add_argument('-o', type=str, default="", help="Set Output Direktory if wished")
|
|
|
- parser.add_argument('-v', action='store_true', default=False, help="print debuginfos")
|
|
|
- parser.add_argument('-b', type=int, default=-1, help="Set if only one Bucket is filled")
|
|
|
- parser.add_argument('-c', type=str, default="", help="Calibration file")
|
|
|
- parser.add_argument('-wf', type=str, default="", help="Heatmap File (without extension!)")
|
|
|
- parser.add_argument('-w', type=int, default=2, help="Heatmap\n 1: new\n 2: append")
|
|
|
- parser.add_argument('-wp', action='store_true', default=False, help="Heatmap generate Plot")
|
|
|
- parser.add_argument('-p', type=str, default="", help="Set Output Direktory for Plot")
|
|
|
- parser.add_argument('-hdf', action='store_true', default=False, help="Save results as hdf5 file")
|
|
|
-
|
|
|
-
|
|
|
- args = parser.parse_args()
|
|
|
-
|
|
|
-
|
|
|
- starttot = time()
|
|
|
-
|
|
|
- np.set_printoptions(precision=4)
|
|
|
- np.set_printoptions(suppress=True)
|
|
|
- calibration = None
|
|
|
-
|
|
|
- if args.o != "":
|
|
|
- if not os.path.isdir(args.o):
|
|
|
- os.makedirs(args.o)
|
|
|
- if args.p != "":
|
|
|
- if not os.path.isdir(args.p):
|
|
|
- os.makedirs(args.p)
|
|
|
-
|
|
|
- if args.c != "":
|
|
|
- calibration = np.load(args.c)
|
|
|
-
|
|
|
-
|
|
|
- if args.f != "":
|
|
|
- filename = args.f
|
|
|
- if not os.path.isfile(filename):
|
|
|
- print("File '" + filename + "' not found - exit")
|
|
|
- exit()
|
|
|
- analyze_file(filename, calibration, args.v, args.b, args.o, plotdir=args.p, hdf=args.hdf, newWaterfall=(args.w==1), waterfallfile=args.wf)
|
|
|
- if args.wp and args.wf is not "":
|
|
|
- plotFFTFile(args.wf + '.hdf', args.wf+'.png', filename)
|
|
|
-
|
|
|
- elif args.d != "":
|
|
|
- filelist = os.listdir(args.d)
|
|
|
- print('==================================================')
|
|
|
- print("Analyzing {} with {} Files".format(args.d, len(filelist)))
|
|
|
- start = time()
|
|
|
- for i, file in enumerate(filelist):
|
|
|
- if '.out' in file:
|
|
|
- analyze_file(os.path.join(args.d,file), calibration, args.v, args.b, args.o, plotdir=args.p, hdf=args.hdf, newWaterfall=(i==0), waterfallfile=args.wf)
|
|
|
- stop = time()
|
|
|
-
|
|
|
- print('==================================================')
|
|
|
- print('Analyzed in {}sec'.format(stop-start))
|
|
|
- if args.wp and args.wf is not "":
|
|
|
- plotFFTFile(args.wf + 'hdf', args.wf+'.png', args.d)
|
|
|
-
|
|
|
- else:
|
|
|
- print("No input defined")
|
|
|
- print("use -f to input a single file")
|
|
|
- print("use -d to input a directory (every .out file will be read in)")
|