DataSet.py 25 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709
  1. import logging
  2. import time
  3. import os
  4. import math
  5. import numpy as np
  6. from numpy.polynomial.polynomial import polyval
  7. import h5py
  8. import traceback
  9. try:
  10. #Compatibility Python3 and Python 2
  11. from .CalibrationHandle import theCalibration
  12. from .constants import KARA
  13. except:
  14. from CalibrationHandle import theCalibration
  15. from constants import KARA
  16. HEADER_SIZE_BYTES = 32
  17. RAW_FILE = 1
  18. RAW_FILE_NPY = 2
  19. TIMESCAN = 3
  20. class DataSet(object):
  21. """
  22. Most usefull functions:
  23. train(adc,frm,to,calibrate)
  24. heatmap(adc, frm,to)
  25. fft(adc,frm,to,trunbyturn,mean)
  26. readFromLog()
  27. ! Be aware: parameter ADC is in range of 0 to 7
  28. """
  29. def __init__(self, filename=None, decodedData=None, rawData=None, stringData=None, delays=None, shiftFMC2=0, tRev=KARA.trev, bunchesPerTurn=KARA.h, calibrationFile=""):
  30. """
  31. Initialise the dataset
  32. use one of:
  33. :param filename: string of the rawfile to open. If available it reads automatically the meta information from the Logfile
  34. :param decodedData: array of already decoded Data
  35. :param rawData: filedescriptor (eg. np.memmap) or array of rawdata
  36. :param stringData: handling rawdata given as string (eg. from bif._bif_read_data)
  37. :param delays: dict containing the delaysettings. default is read from LogFile or if not present {'c330':0, 'c25':0, 'c25b':4, 'f':[0,0,0,0,0,0,0,0]}
  38. :param shiftFMC2: compensate Bucketmissmatch between channel 1-4 and 5-8 (can later be done by setShiftFMC2)
  39. :param tRev: default KARA.trev
  40. :param bunchesPerTrun: default KARA.h
  41. :param calibrationFile: define the to use calibration. by default it looks for "calibration.hdf" in the same dic as the filename
  42. :return: -
  43. """
  44. self.fileName = filename
  45. if filename is not None:
  46. self.path = os.path.dirname(filename)
  47. self.type = 0
  48. self.header = None
  49. self.adcNumber = 4
  50. self.adcSwap = False #compatibility for old data
  51. self.log = None
  52. self.stepper = None
  53. self.invert = False
  54. self.datax = None
  55. self.isAcquisition=False
  56. self.skippedTurns = 0
  57. self.tRev = tRev
  58. self.bunchesPerTurn = bunchesPerTurn
  59. self.shiftFMC2 = shiftFMC2
  60. self.array = []
  61. self.frm = 0
  62. self.to = 0
  63. self.v = False
  64. self._heatmaps = {}
  65. self._ffts = {}
  66. self._fftsnobunching = False
  67. self._f = 0
  68. self._t = 1
  69. self.c330 = 0
  70. self.c25 = 0
  71. self.c25b = 4
  72. self.f = []
  73. self.dataRead = False
  74. self.noFile = False
  75. self.calibId = "current"
  76. if calibrationFile != "":
  77. self.calibId = theCalibration.openFile(calibrationFile)
  78. else:
  79. if filename is not None:
  80. self.calibId = theCalibration.openFile(os.path.join(self.path,'calibration.hdf'))
  81. else:
  82. print('calibrationFile not defined - no calibration possible')
  83. if delays is not None:
  84. try:
  85. self.c330 = delays['c330']
  86. self.c25 = delays['c25']
  87. self.c25b = delays['c25b']
  88. self.f = delays['f']
  89. except:
  90. pass
  91. if decodedData is not None:
  92. self.array = decodedData
  93. self.adcNumber = len(decodedData[1])-1
  94. self.dataRead = True
  95. self.noFile = True
  96. print('load decodedData with ADCs', self.adcNumber)
  97. elif stringData is not None:
  98. logging.vinfo("Read data directly from device.")
  99. logging.vinfo("Read %i bytes of data" % len(stringData))
  100. rawData = np.fromstring(stringData, dtype=np.uint32)
  101. self.loadFromRawData(rawData)
  102. self.dataRead = True
  103. self.noFile = True
  104. elif rawData is not None:
  105. self.loadFromRawData(rawData)
  106. self.dataRead = True
  107. self.noFile = True
  108. elif filename is not None:
  109. if not os.path.isfile(self.fileName):
  110. print('file {} does not exist'.format(self.fileName))
  111. return
  112. else:
  113. self.getFromLog()
  114. else:
  115. print('DataSet nothing given!')
  116. return
  117. def setShiftFMC2(self, shiftFMC2):
  118. self.shiftFMC2 = shiftFMC2
  119. self.to = 0 #force new decode
  120. def setDelays(self, delays):
  121. try:
  122. self.c330 = delays['c330']
  123. self.c25 = delays['c25']
  124. self.c25b = delays['c25b']
  125. self.f = np.array([float(v) for v in delays['f'][1:-1].split(',')])
  126. except:
  127. traceback.print_exc()
  128. pass
  129. def fineDelays(self):
  130. #print(self.header)
  131. if len(self.f):
  132. return self.f
  133. if self.header:
  134. return self.header['fine_delay_adc']
  135. else:
  136. return None
  137. def getCalibratedDelay(self):
  138. if self.datax is not None:
  139. return self.datax*1e-12
  140. out = []
  141. for i,v in enumerate(self.f):
  142. out.append(theCalibration.calibrateX(i, self.c330, self.c25, v, self.c25b))
  143. return np.array(out)
  144. def numOfBunches(self):
  145. return self.array.shape[0]
  146. def numOfTurns(self):
  147. return self.numOfBunches() / self.bunchesPerTurn
  148. def heatmap(self, adc=1, frm=0, to=-1, bunch_frm=0, bunch_to=-1):
  149. newone = self.loadFromFile(0, to*self.bunchesPerTurn)
  150. if isinstance(adc, list):
  151. adc = adc[0]
  152. if not 0 <= adc <= self.adcNumber:
  153. raise ValueError('adc must be in [0,{:}]'.format(self.adcNumber-1))
  154. l = len(self.array)
  155. l = (l//self.bunchesPerTurn)*self.bunchesPerTurn
  156. if not adc in self._heatmaps or newone:
  157. heatmap = self.array[:l,adc].reshape(-1, self.bunchesPerTurn).transpose()
  158. self._heatmaps[adc] = heatmap
  159. return self._heatmaps[adc][bunch_frm:bunch_to, frm:to]
  160. def fft(self, adc=0, frm=0, to=-1, drop_first_bin=True, nobunching=False):
  161. """
  162. :param adc: single int in [0:7] or list.
  163. :param nobunching: False - generate img with fft for every bunch; True - generate one fft without splitting into bunches
  164. :return: if param adc is a list it returns a list with lenght 8 in which the requested elements containing the array with the fft
  165. """
  166. if nobunching:
  167. if not isinstance(adc, list):
  168. adc=[adc]
  169. if self._fftsnobunching != nobunching:
  170. self._fftsnobunching = nobunching
  171. self._ffts = {}
  172. if isinstance(adc, list):
  173. #print('list', adc)
  174. out = [[],[],[],[], [],[],[],[]]
  175. for i in adc:
  176. if i in self._ffts:
  177. if self._ffts[i][0] == frm and self._ffts[i][1] == to:
  178. #print('use stored FFT')
  179. out[i]=self._ffts[i][2]
  180. continue
  181. if self._fftsnobunching:
  182. heatmap = self.train(i,frm,to)
  183. else:
  184. heatmap = self.heatmap(i, frm, to)
  185. if drop_first_bin:
  186. if self._fftsnobunching:
  187. self._ffts[i] = [frm, to, self._fft(heatmap, self._fftsnobunching)[1:] ]
  188. else:
  189. self._ffts[i] = [frm, to, self._fft(heatmap, self._fftsnobunching)[:, 1:] ]
  190. else:
  191. self._ffts[i] = [frm, to, self._fft(heatmap, self._fftsnobunching)]
  192. out[i]=self._ffts[i][2]
  193. return out
  194. else:
  195. #print('single', adc)
  196. heatmap = self.heatmap(adc, frm, to)
  197. if drop_first_bin:
  198. return self._fft(heatmap)[:, 1:]
  199. else:
  200. return self._fft(heatmap)
  201. def fftMaxFreq(self):
  202. mult=1.0
  203. if self._fftsnobunching:
  204. mult = 184.0
  205. return ((self.numOfTurns()* mult) // 2 + 1) * self.fftFreqDist()
  206. def fftFreqDist(self):
  207. mult=1.0
  208. #if self._fftsnobunching:
  209. # mult = 1.0/184.0
  210. return 1.0/(self.numOfTurns() * (self.skippedTurns + 1) * self.tRev * mult)
  211. def train(self, adc=0, frm=0, to=-1, calibrate=False):
  212. """
  213. params: adc: single int in [0:7] or list. if adc is a list (eg. [1,3]) it returns a list of length 8 with the requested elements filled
  214. """
  215. self.loadFromFile(frm, to)
  216. if to != self.to:
  217. to -= self.frm
  218. pdata = self.array[frm-self.frm:to, :-1]
  219. else:
  220. pdata = self.array[frm-self.frm:, :-1]
  221. #print("train", adc)
  222. #print(pdata)
  223. if isinstance(adc, list):
  224. data = [[],[],[],[], [],[],[],[]]
  225. for item in range(len(data)):
  226. if item in adc:
  227. if calibrate:
  228. data[item] = theCalibration.calibrateY(pdata[:,item], item, self.calibId)
  229. else:
  230. data[item] = pdata[:,item]
  231. else:
  232. data[item] = np.zeros(len(pdata))
  233. #print('train', len(data))
  234. return np.array(data)
  235. if calibrate:
  236. return theCalibration.calibrateY(pdata[:,adc], adc, self.calibId)
  237. return pdata[:,adc]
  238. def combined(self, adc=0, frm=0, to=-1, calibrate=False, turnbyturn=False, mean=False, workingChannels=[0,1,2,3,4,5,6,7]):
  239. """
  240. generates one array with all adc
  241. :param adc: select the adc that will be return solo
  242. :param turnbyturn: default False. if set to one it repeats the x value with the maximum bunchnumber. so to overlay the turns
  243. :param mean: only used with turnbyturn set to True. If set to 1 it calculates the mean of all used turns
  244. :param working_channels: to dissable channels in the Combined data (List)
  245. :return: 2D List [0] contains X data and Y data of all. [1] only for selected adc
  246. """
  247. if len(workingChannels) < 2:
  248. raise ValueError('working_channels must have at least 2 channels; {}'.format(workingChannels))
  249. if turnbyturn:
  250. if to != -1:
  251. to = to//184*184
  252. selector = [0,1,2,3]
  253. if self.adcNumber > 4:
  254. #selector = [0,1,2,3,6,7] #currently not all chanels are working
  255. selector = workingChannels
  256. array = self.train(adc=selector, frm=frm,to=to, calibrate=calibrate).T
  257. array = array[:, np.array(selector)]
  258. if isinstance(adc, list):
  259. adc = adc[0]
  260. finedelays = np.array(self.fineDelays())
  261. if finedelays is None:
  262. finedelays = np.array([0,0,0,0])
  263. if self.adcNumber >4:
  264. finedelays = np.repeat(finedelays, 2)
  265. if calibrate:
  266. for i in range(self.adcNumber):
  267. finedelays[i] = (theCalibration.calibrateX(i, self.c330, self.c25, finedelays[i], self.c25b, self.calibId) - theCalibration.calibrateX(0, self.c330, self.c25, 0, self.c25b, self.calibId) )*1e12
  268. else:
  269. finedelays = finedelays*3
  270. if self.datax is not None:
  271. finedelays = self.datax - np.min(self.datax)
  272. finedelays = finedelays/100.0
  273. if not turnbyturn:
  274. a = np.array(np.reshape(np.repeat(np.arange(0, len(array)), len(selector)),-1),dtype=np.float)
  275. else:
  276. a = np.array(np.reshape(np.tile(np.repeat(np.arange(0, 184), len(selector)), len(array)//184),-1),dtype=np.float)
  277. b = np.reshape(np.repeat([finedelays[np.array(selector)]], len(array),0), -1)
  278. orig_xs = a+b
  279. # Remove bunch number and flatten array
  280. array = array.reshape((-1, 1))
  281. array = array.flatten()
  282. if turnbyturn and mean:
  283. array = np.mean(array.reshape((-1, 184, len(selector))),0)
  284. orig_xs = np.mean(orig_xs.reshape((-1, 184, len(selector))),0)
  285. array = array.reshape(-1)
  286. orig_xs = orig_xs.reshape(-1)
  287. #print(adc)
  288. ret = [np.array([orig_xs, array])]
  289. if adc not in selector:
  290. ret.append(np.array([0,0]))
  291. return ret
  292. if len(selector) == 6:
  293. if adc > 4:
  294. adc -= 2
  295. while adc not in selector:
  296. adc -= 1
  297. if adc < 0:
  298. adc = self.adcNumber
  299. ret.append(np.array([orig_xs[adc::len(selector)], array[adc::len(selector)]]))
  300. #print(ret)
  301. return ret
  302. def loadFromRawData(self, rawData):
  303. print('loadfromrawdata')
  304. #self.printData(rawData[:32+32])
  305. #print('-------------------')
  306. headerInfo = self.dataHasHeader(rawData)
  307. if True in headerInfo:
  308. #logging.vinfo("Header detected.")
  309. # We read words of 4 bytes each
  310. self.header = self.parseHeader(rawData, headerInfo)
  311. spliceWords = HEADER_SIZE_BYTES // 4
  312. rawData = rawData[spliceWords:]
  313. else:
  314. #logging.vinfo("No Header detected.")
  315. self.header = None
  316. self.array = self.decodeData(rawData)
  317. self.dataRead=True
  318. def printData(self, data):
  319. try:
  320. for line in range(len(data)//4):
  321. print('0x{:08x}: 0x{:08x} 0x{:08x} 0x{:08x} 0x{:08x}'.format(line*16, data[line*4+0], data[line*4+1], data[line*4+2], data[line*4+3]))
  322. except:
  323. traceback.print_exc()
  324. def loadFromFile(self, frm, to):
  325. if self.noFile:
  326. return False
  327. if self.dataRead == False:
  328. if '.npy' in self.fileName:
  329. self.type = RAW_FILE_NPY
  330. self.array = np.load(self.fileName)
  331. elif '.out' in self.fileName:
  332. self.type = RAW_FILE
  333. self.fp = np.memmap(self.fileName, dtype='uint32')
  334. #print('loadfromfile')
  335. #self.printdata(self.fp[:32+32])
  336. #print('-------------------')
  337. headerInfo = self.dataHasHeader(self.fp)
  338. if True in headerInfo:
  339. #logging.vinfo("Header detected.")
  340. # We read words of 4 bytes each
  341. self.header = self.parseHeader(self.fp, headerInfo)
  342. #if self.header == dict():
  343. # self.getFromLog()
  344. spliceWords = HEADER_SIZE_BYTES // 4
  345. self.fp = self.fp[spliceWords:]
  346. else:
  347. #logging.vinfo("No Header detected.")
  348. self.header = None
  349. self.getFromLog()
  350. self.adcNumber = self.dataAdcCount(self.fp)
  351. self.dataRead=True
  352. if to == -1:
  353. if self.to != to:
  354. if self.type == RAW_FILE:
  355. self.array = self.decodeData(self.fp[frm*self.adcNumber//2:])
  356. self.to = to
  357. return True
  358. return False
  359. tto = to+184*3
  360. if self.frm > frm or self.to < tto:
  361. if self.type == RAW_FILE:
  362. self.array = self.decodeData(self.fp[frm*self.adcNumber//2:tto*self.adcNumber//2])
  363. self.frm = frm
  364. self.to = tto
  365. return True
  366. return False
  367. def decodeData(self, data):
  368. self.adcNumber = self.dataAdcCount(data)
  369. try:
  370. end = np.where(data==0xDEADDEAD)[0][0]
  371. data = data[:end]
  372. except Exception as e:
  373. #print('decode_data', e)
  374. pass
  375. #data = data[np.where(data != 0xDEADDEAD)] # This is the new filling
  376. #print('len data', len(data))
  377. # Make sure we read multiple of adcNumber
  378. data = data[:self.adcNumber * int((math.floor(data.size // self.adcNumber)))]
  379. #print('len data', len(data))
  380. bunch_low = data & 0xfff
  381. bunch_high = np.right_shift(data, 12) & 0xfff
  382. bunch_number = np.right_shift(data, 24) & 0xfff
  383. bunch_low = bunch_low.reshape(-1, self.adcNumber)
  384. bunch_high = bunch_high.reshape(-1, self.adcNumber)
  385. if self.invert:
  386. bunch_high = 2048 - (bunch_high-2048)
  387. bunch_low = 2048 - (bunch_low-2048)
  388. result = np.empty((bunch_low.shape[0] + bunch_high.shape[0], self.adcNumber+1), dtype=np.uint16)
  389. result[0::2,:self.adcNumber] = bunch_low
  390. result[1::2,:self.adcNumber] = bunch_high
  391. result[0::2, self.adcNumber] = bunch_number[::self.adcNumber]
  392. result[1::2, self.adcNumber] = bunch_number[::self.adcNumber] + 1
  393. #result = result[:int(self.bunchesPerTurn * (math.floor(result.shape[0] // self.bunchesPerTurn))), :]
  394. if self.shiftFMC2:
  395. if self.v: print('shift FMC2 by ', self.shiftFMC2)
  396. #print('decode_data ', result.shape)
  397. tmp = []
  398. for i in range(self.adcNumber+1):
  399. if self.shiftFMC2 > 0:
  400. if i < 4:
  401. tmp.append(result[self.shiftFMC2:,i])
  402. else:
  403. tmp.append(result[:-self.shiftFMC2,i])
  404. else:
  405. shift = self.shiftFMC2*(-1)
  406. if i < 4:
  407. tmp.append(result[:-shift:,i])
  408. else:
  409. tmp.append(result[shift:,i])
  410. result = np.array(tmp, dtype=np.uint16).T
  411. result = result[:int(self.bunchesPerTurn * (math.floor(result.shape[0] // self.bunchesPerTurn))), :]
  412. if self.v: print('decode_data ', result.shape)
  413. #print(result)
  414. return result
  415. def dataAdcCount(self, data):
  416. bunch_number = np.right_shift(data, 24) & 0xfff
  417. ctr = 0
  418. b0 = bunch_number[0]
  419. for i in range(10):
  420. if b0 == bunch_number[i]:
  421. ctr += 1
  422. else:
  423. break
  424. if ctr < 4:
  425. ctr = 4
  426. if self.v: print('ADC number ', ctr)
  427. if ctr < 4:
  428. ctr = 4
  429. #return 8
  430. return ctr
  431. def dataHasHeader(self, data):
  432. possible_header = data[0:HEADER_SIZE_BYTES//4]
  433. kapture2 = (possible_header[0] & 0xFF000000) != 0
  434. back = possible_header[-1] & 0xF8888888 == 0xF8888888
  435. front = possible_header[0] & 0xF8888888 == 0xF8888888
  436. if self.v: print("has head", (front, back, kapture2))
  437. return (front, back, kapture2)
  438. def parseHeader(self, data, header_info, verbose=False):
  439. """
  440. Not supported for KAPTURE-2
  441. """
  442. global HEADER_SIZE_BYTES
  443. if header_info[2] == True:
  444. HEADER_SIZE_BYTES = 64
  445. header = data[0:HEADER_SIZE_BYTES//4]
  446. parsed = dict()
  447. if verbose: print('parsing header')
  448. # print([format(b, '#08X') for b in header])
  449. try:
  450. assert header[0] == 0xf8888888 or header[7] == 0xf8888888, 'Field 8 is saved for data'
  451. assert header[1] == 0xf7777777 or header[6] == 0xf7777777, 'Field 7 is saved for data'
  452. assert header[2] == 0xf6666666 or header[5] == 0xf6666666, 'Field 6 is saved for data'
  453. if header[0] == 0xf8888888 and header[1] == 0xf7777777 and header[2] == 0xf6666666:
  454. header = header[::-1]
  455. # TODO: what kind of timestamp is this? counts with appr. 25MHz up?!
  456. parsed['timestamp'] = header[4]
  457. assert header[3] >> 8 == 0, 'Highest 6 x 4 bits of field 3 is supposed to be zero'
  458. parsed['delay_adc4'] = header[3] & 0xf
  459. parsed['delay_adc3'] = header[3] >> 4 & 0xf
  460. assert header[2] >> 16 == 0, 'Highest 4 x 4 bits of field 2 is supposed to be zero'
  461. parsed['delay_adc2'] = header[2] & 0xf
  462. parsed['delay_adc1'] = header[2] >> 4 & 0xf
  463. parsed['delay_th'] = header[2] >> 8 & 0xf
  464. parsed['delay_fpga'] = header[2] >> 12 & 0xf
  465. parsed['fine_delay_adc'] = np.array([header[1] & 0xff,
  466. header[1] >> 8 & 0xff,
  467. header[1] >> 16 & 0xff,
  468. header[1] >> 24 & 0xff])
  469. assert header[0] >> 28 == 0xF, 'Highest 4 bits of field 0 is supposed to be 0xF'
  470. parsed['skip_turns'] = header[0] & 0xfffffff
  471. if verbose: print(parsed)
  472. except Exception as e:
  473. pass
  474. #traceback.print_exc()
  475. return parsed
  476. def readLogfile(self):
  477. """
  478. return: dict with all information present in the Logfile
  479. """
  480. if self.noFile:
  481. return None
  482. logFile = os.path.join(os.path.dirname(self.fileName), 'Measurement_board_6028.log')
  483. if not os.path.isfile(logFile):
  484. return None
  485. log = self._readLogfile(logFile)
  486. file = os.path.basename(self.fileName)
  487. for entry in log:
  488. try:
  489. if entry['Filename'] == file:
  490. return entry
  491. except:
  492. pass
  493. return None
  494. def _readLogfile(self, file):
  495. defaultKeys = ['Number of Turns', 'Number of Skipped Turns', 'T/H Delay', '25ps Delay', 'ADC 1 Delay', 'ADC 2 Delay', 'ADC 3 Delay', 'ADC 4 Delay', 'ADC Delays', "25ps Delay 2", "T/H Delay 2", 'Stage Step']
  496. out = []
  497. with open(file,'r') as f:
  498. log = f.read().split('---')[1:]
  499. for entry in log:
  500. tmp={}
  501. ptr = 0
  502. for item in defaultKeys:
  503. tmp[item] = '?'
  504. if item == '25ps Delay 2':
  505. tmp[item] = '4'
  506. for item in entry.split('\n')[1:]:
  507. t = item.split(':')
  508. if len(t) == 1:
  509. tmp[str(ptr)] = t[0].strip('\t')
  510. ptr+=1
  511. else:
  512. tmp[str(t[0]).strip('\t')] = t[1].strip(' ')
  513. out.append(tmp)
  514. return out
  515. def getFromLog(self):
  516. """
  517. used to get information from the Logfile.
  518. """
  519. print('getFromLog')
  520. self.log = self.readLogfile()
  521. if self.log == None:
  522. print('no Log found')
  523. return False
  524. try:
  525. self.isAcquisition=False
  526. try:
  527. if "Acquisition" in self.log['0']:
  528. self.isAcquisition = True
  529. elif "Acquisition" in self.log['1']:
  530. self.isAcquisition = True
  531. except:
  532. pass
  533. try:
  534. self.skippedTurns = int(self.log['Number of Skipped Turns'])
  535. except:
  536. pass
  537. self.c330 = int(self.log['T/H Delay'])
  538. self.c25 = int(self.log['25ps Delay'])
  539. self.f = np.array([float(v) for v in self.log['ADC Delays'][1:-1].split(',')])
  540. try:
  541. self.datax = np.array([float(v) for v in self.log['datax'][1:-1].split(',')])
  542. except:
  543. self.datax = None
  544. pass
  545. self.adcNumber = len(self.f)
  546. file = self.log['Filename'].split('_')[-1]
  547. self.swap_adc = float(file[:-4]) < 1543859487
  548. if self.swap_adc and self.adcNumber > 4:
  549. if self.v: print('swapping ADC', self.log['Filename'])
  550. t = np.array(self.f[4:])
  551. self.f[4:] = self.f[:4]
  552. self.f[:4] = t
  553. self.c25b = int(self.log['25ps Delay 2'])
  554. try:
  555. self.stepper = int(self.log['Stage Step'])
  556. except:
  557. self.stepper = None
  558. try:
  559. self.workingChannels = np.array([float(v) for v in self.log['Working Channels'][1:-1].split(',')])
  560. except:
  561. self.workingChannels = None
  562. try:
  563. self.shiftFMC2 = int(self.log['shiftFMC2'])
  564. except:
  565. pass
  566. #print('get good')
  567. return True
  568. except:
  569. #print('getFromLog ', self.fileName)
  570. #traceback.print_exc()
  571. pass
  572. return False
  573. def _pad_array(self, array):
  574. #return array
  575. height, width = array.shape
  576. pwidth = int(2**np.floor(np.log2(width)))
  577. padded = np.zeros((height, pwidth))
  578. padded[:, :width] = array[:, :pwidth]
  579. return padded
  580. def _fft(self, array, is1D=False):
  581. start = time.time()
  582. print('fft', array.shape)
  583. if is1D:
  584. freqs = np.fft.rfft(array)
  585. else:
  586. freqs = np.fft.rfft(self._pad_array(array))
  587. #logging.debug("np fft: {} s".format(time.time() - start))
  588. print('FFT done {:.2f} s'.format(time.time() - start))
  589. return freqs