--- /dev/null
+set terminal pdf
+set output "data/calib.pdf"
+
+set xlabel "Fine-Time"
+set ylabel "Calibrated Fine-Time [ns]"
+set key left;
+set xrange [-1:512]
+set yrange [-0.1 : 5.1]
+
+plot \
+ 'data/calib.txt' using 1 with line title "Ch 0 Timing Trg", \
+ 'data/calib.txt' using 2 with line title "Ch 1 DLM sensed", \
+ 'data/calib.txt' using 3 with line title "Ch 2 Pulser", \
+ 'data/calib.txt' using 4 with line title "Ch 3 Timing Trg (CBM)", \
+ 'data/calib.txt' using 5 with line title "Ch 4 DLM Ref";
+
+reset
+set terminal pdf
+set xrange [-1:512]
+
+set output "data/histogram.pdf"
+
+set ylabel "Frequency"
+
+plot \
+ 'data/histogram.txt' using 1 with line title "Ch 0 Timing Trg", \
+ 'data/histogram.txt' using 2 with line title "Ch 1 DLM sensed", \
+ 'data/histogram.txt' using 3 with line title "Ch 2 Pulser", \
+ 'data/histogram.txt' using 4 with line title "Ch 3 Timing Trg (CBM)", \
+ 'data/histogram.txt' using 5 with line title "Ch 4 DLM Ref";
\ No newline at end of file
--- /dev/null
+import datetime
+import glob
+from hldLib import *
+import numpy as np
+from stats import *
+import os
+
+noEvents = 10000000
+counter = 0
+channels = 5
+histogram = np.zeros( (channels, 1024), dtype="uint" )
+
+def analyseCTSSSE(ssEvt, evt, counter):
+ global epochTimeInNs, eventTimeInCBM
+ global calibDataTDC
+ global idle
+ global netDLMChan, pinDLMChan
+
+ cts, remain = extractCTS(ssEvt[1])
+ sync, tdc = extractSync(remain)
+ assert(len(sync) in {1, 5, 11})
+
+ for w in tdc:
+ if w & 0x80000000:
+ data = tdcTimeData(w)
+ assert(data['channelNo'] < channels)
+ if data['fineTime'] != 0x3ff:
+ histogram[ data['channelNo'], data['fineTime'] ] += 1
+
+files = glob.glob("/local/mpenschuck/hldfiles/*.hld")
+files.sort()
+print "Read file ", files[-2]
+callbacks = {0x7005: analyseCTSSSE}
+counter = iteratorOverHld(files[-1], callbacks, noEvents)
+
+print "Processed %d events, found %d fineTimes (%d in smallest channel)" % (counter, np.sum(histogram), np.min(np.sum(histogram, axis=1)))
+
+calib = 5.* np.cumsum(histogram, axis=1).astype('float') / np.sum(histogram, axis=1)[:,np.newaxis]
+calib *= 5.0 / calib[:,-1].reshape(-1,1)
+
+np.save("calib.npy", calib)
+np.savetxt("data/calib.txt", calib.T)
+np.savetxt("data/histogram.txt", histogram.T)
\ No newline at end of file
--- /dev/null
+set terminal png
+set output "data/dlm_jitter.png"
+
+load "data/dlm_jitter.label"
+set label labelText at graph 0.05,0.95
+
+set xlabel "Deviation from average"
+set ylabel "Frequency"
+
+set title "Jitter of DLM"
+
+binwidth=10
+bin(x,width)=width*floor(x/width)
+
+plot 'data/dlm_jitter.txt' using (bin($2*1000,binwidth)):(1.0) smooth freq with boxes notitle
\ No newline at end of file
--- /dev/null
+import datetime
+import glob
+from hldLib import *
+import numpy as np
+from stats import *
+import os
+
+noEvents = 10000000
+counter = 0
+netDLMChan = 1
+pinDLMChan = 4
+
+epoch = 0
+epochTimeInNs = 0.0
+
+calibDataTDC = np.load("calib.npy")
+
+latencies = [[]]
+netTimeBuf = []
+pinTimeBuf = []
+idle = 0
+
+def analyseCTSSSE(ssEvt, evt, counter):
+ global epochTimeInNs, eventTimeInCBM
+ global calibDataTDC
+ global idle
+ global netDLMChan, pinDLMChan
+ global netTimeBuf, pinTimeBuf
+
+ cts, remain = extractCTS(ssEvt[1])
+ sync, tdc = extractSync(remain)
+ assert(len(sync) in {1, 5, 11})
+
+ channels, epochTimeInNs = interpretTDCData(tdc, calibDataTDC, epochTimeInNs)
+
+ netTimeBuf += channels.get(netDLMChan, [])
+ pinTimeBuf += channels.get(pinDLMChan, [])
+
+ if 0==len(channels.get(netDLMChan, [])) or 0==len(channels.get(pinDLMChan, [])):
+ if len(netTimeBuf) and len(pinTimeBuf):
+ idle += 1
+
+ if idle >= 1000:
+ netTime = np.array( netTimeBuf )
+
+ for pinTime in pinTimeBuf:
+ interval = np.min( np.abs( netTime - pinTime ) )
+ if interval < 5000:
+ latencies[-1].append( interval )
+
+ idle = 0
+ print len(pinTimeBuf), len(netTimeBuf)
+ print "start new block, old contained %d matches with avg %f" % (len(latencies[-1]), np.average(latencies[-1]))
+ latencies.append([])
+ netTimeBuf = []
+ pinTimeBuf = []
+
+ else:
+ idle = 0
+
+
+files = glob.glob("/local/mpenschuck/hldfiles/*.hld")
+files.sort()
+#files.pop()
+#files.pop()
+print "Read file ", files[-1]
+callbacks = {0x7005: analyseCTSSSE}
+counter = iteratorOverHld(files[-1], callbacks, noEvents)
+
+print "Processed %d events, found %d matches in %d blocks" % (counter, sum((len(x) for x in latencies)), len(latencies))
+
+stats = np.zeros ( (len(latencies),4) )
+i = 0
+for block in latencies:
+ if len(block) < 500: continue
+ times = np.array( block )
+ avgInterval = np.average(times)
+ stdInterval = np.std(times)
+
+ stats[i] = [i, times.shape[0], avgInterval, stdInterval]
+
+ print "Avg: %f ns, Std: %f ns" % (avgInterval, stdInterval)
+ np.savetxt("data/dlm_jitter.txt", np.vstack([times, times - avgInterval]).T )
+ text_file = open("data/dlm_jitter.label", "w")
+ text_file.write('labelText = "Events: %d\\nAvg: %.2f ns\\nRMS: %.2f ps"' % (times.shape[0], avgInterval, stdInterval * 1000) )
+ text_file.close()
+
+ os.system("gnuplot dlm_jitter.gp")
+ i+=1
+ os.system("mv data/dlm_jitter.png data/dlm_jitter%04d.png" % i)
+
+np.savetxt("data/dlm_jitter_blocks.txt", stats[:i])
--- /dev/null
+set terminal pdf
+set output "data/dlm_jitter_blocks.pdf"
+
+set xlabel "Run"
+set ylabel "Average DLM Latency [ns]"
+
+#set xrange [0:10]
+
+plot 'data/dlm_jitter_blocks.txt' using ($1+1):3:4 with errorbar title "DLM Latency"
\ No newline at end of file
--- /dev/null
+set terminal png
+set output "data/event_jitter.png"
+
+load "data/event_jitter.label"
+set label labelText at graph 0.05,0.95
+
+set xlabel "Deviation from average [ps]"
+set ylabel "Frequency"
+
+binwidth=10
+bin(x,width)=width*floor(x/width)
+plot 'data/event_jitter.txt' using (bin($2*1000,binwidth)):(1.0) smooth freq with boxes notitle
\ No newline at end of file
--- /dev/null
+from hldLib import *
+import glob
+import numpy as np
+
+# set-up constants and fetch calibration data
+noEvents = 1000000
+calibDataTDC = np.load("calib.npy")
+trbRefTimeTDCChan = 0
+cbmRefTimeTDCChan = 3
+
+# interpret the HLD file
+epochTimeInNs = 0.0
+eventTimeInCBM = []
+def analyseCTSSSE(ssEvt, evt, counter):
+ global epochTimeInNs, calibDataTDC, eventTimeInCBM
+ global trbRefTimeTDCChan, cbmRefTimeTDCChan
+
+ cts, remain = extractCTS(ssEvt[1])
+ sync, tdc = extractSync(remain)
+ assert(len(sync) in {1, 5, 11})
+
+ channels, epochTimeInNs = interpretTDCData(tdc, calibDataTDC, epochTimeInNs)
+
+ if (not channels[trbRefTimeTDCChan] or \
+ not channels[cbmRefTimeTDCChan] or \
+ len(channels[trbRefTimeTDCChan]) != 1 or \
+ len(channels[cbmRefTimeTDCChan]) != 1):
+ return
+
+ eventTimeInCBM.append( 8.0 * sync[2] + channels[trbRefTimeTDCChan][0] - channels[cbmRefTimeTDCChan][0] )
+
+# get file and iterate over it
+files = glob.glob("/local/mpenschuck/hldfiles/ct14290170201.hld")
+files.sort()
+print "Read file ", files[-1]
+callbacks = {0xf3c0: analyseCTSSSE}
+counter = iteratorOverHld(files[-1], callbacks, noEvents)
+
+# calculate slope and jitter
+print "Found %d events" % counter
+eventTimeInCBM = np.array(eventTimeInCBM)
+timeBetweenEvents = eventTimeInCBM[1:] - eventTimeInCBM[:-1]
+avgInterval = np.average(timeBetweenEvents)
+stdInterval = np.std(timeBetweenEvents)
+
+# dump values computed
+print "Avg: %f ns, Std: %f ns" % (avgInterval, stdInterval)
+np.savetxt("data/event_jitter.txt", np.vstack([timeBetweenEvents, timeBetweenEvents - avgInterval]).T )
+text_file = open("data/event_jitter.label", "w")
+text_file.write('labelText = "Events: %d\\nAvg: %.2f ns\\nFreq: %.2f KHz\\nRMS: %.2f ps"\n' % (counter, avgInterval, 1e6 / avgInterval, stdInterval * 1000) )
+text_file.close()
--- /dev/null
+import struct
+import datetime
+
+# iterates over a HLD files yieldling events of the following structure:
+# evt = array(
+# 0: dict(size, triggerType, evtNumFile, timestamp, runNum)
+# 1: array( #of sub-events
+# array(
+# 0: dict(size, subEvtId, trgNum, trgCode)
+# 1: array ( #of sub-sub-events
+# array(
+# 0: dict(size, subsubEvtId)
+# 1: array( data-words )
+# )))))
+def eventIterator(f):
+ f.seek(0)
+ pos = 0
+
+ while(1):
+ # we might need to skip some padding
+ assert(f.tell() <= pos)
+ f.seek(pos)
+
+ evtHdrBuf = f.read(8*4)
+ if (len(evtHdrBuf) != 8*4):
+ break #eof
+
+ hdr = struct.unpack("<" + ("I" * 8), evtHdrBuf)
+ if (hdr[1] > 0xf0000): hdr = struct.unpack(">" + ("I" * 8), evtHdrBuf)
+
+ evtSize = hdr[0]
+
+ pos += evtSize
+ if (evtSize % 8):
+ pos += 8 - (evtSize % 8)
+
+ # decode hdr
+ hdrDec = {
+ 'size': evtSize,
+ 'triggerType': hdr[2] & 0xffff,
+ 'evtNumFile': hdr[3],
+ 'timestamp': datetime.datetime(
+ (hdr[4] >> 16) & 0xff, (hdr[4] >> 8) & 0xff, (hdr[4] >> 0) & 0xff,
+ (hdr[5] >> 16) & 0xff, (hdr[5] >> 8) & 0xff, (hdr[5] >> 0) & 0xff ),
+ 'runNum': hdr[6]
+ }
+
+ # load payload
+ subEvents = []
+ innerPos = 8*4
+ while(innerPos < evtSize):
+ subEvtHdrBuf = f.read(4 * 4)
+ endian = "<"
+ subEvtHdr = struct.unpack("<" + ("I" * 4), subEvtHdrBuf)
+ if (subEvtHdr[1] > 0xf0000):
+ subEvtHdr = struct.unpack(">" + ("I" * 4), subEvtHdrBuf)
+ endian = ">"
+
+ subEvtSize = subEvtHdr[0]
+ subEvtHdrDec = {
+ 'size': subEvtSize,
+ 'subEvtId': subEvtHdr[2] & 0xffff,
+ 'trgNum': (subEvtHdr[3] >> 8) & 0xffffff,
+ 'trgCode': subEvtHdr[3] & 0xff
+ }
+
+ subsubEvents = []
+ ssPos = 4*4
+ while(ssPos < subEvtSize):
+ sseHdrBuf = f.read(4)
+ sseHdr = struct.unpack(endian + "I", sseHdrBuf)
+ sseSize = ((sseHdr[0] >> 16) & 0xffff) * 4
+ sseHdrDec = {
+ 'size': sseSize,
+ 'subsubEvtId': sseHdr[0] & 0xffff
+ }
+
+ sseBuf = f.read(sseSize)
+ sseCont = struct.unpack(endian + ("I" * (sseSize/4)), sseBuf)
+ subsubEvents.append( (sseHdrDec, sseCont) )
+
+ ssPos += sseSize + 4
+
+ subEvents.append( (subEvtHdrDec, subsubEvents) )
+
+ innerPos += subEvtSize
+
+ yield (hdrDec, subEvents)
+
+def dumpEvt(evt):
+ res = str(evt[0]) + "\n"
+ for subEvt in evt[1]:
+ res += " " + dumpSubEvt(subEvt).replace("\n", "\n ") + "\n"
+ return res
+
+def dumpSubEvt(sEvt):
+ h = sEvt[0]
+ res = "subEvtId: 0x%04x, trgNum: % 9d, trgCode: % 4d, size: % 4d\n" % (h['subEvtId'], h['trgNum'], h['trgCode'], h['size'])
+ for ssEvt in sEvt[1]:
+ res += " " + dumpSubSubEvt(ssEvt).replace("\n", "\n ") + "\n"
+ return res
+
+def dumpSubSubEvt(ssEvt):
+ res = "ID: 0x%04x, Size: %d\n" % (ssEvt[0]['subsubEvtId'], ssEvt[0]['size'])
+ res += dumpArray(ssEvt[1])
+ return res
+
+def dumpArray(arr):
+ res = ""
+ for i in range(0, len(arr)):
+ if i != 0 and i % 8 == 0: res += "\n"
+ res += " [% 3d] 0x%08x" % (i+1, arr[i])
+
+ return res
+
+def iteratorOverHld(filename, cbs, eventNum = -1):
+ i = 0
+ with open(filename, "rb") as hld:
+ for evt in eventIterator(hld):
+ if not len(evt[1]): continue
+ for sEvt in evt[1]:
+ for ssEvt in sEvt[1]:
+ ssid = ssEvt[0]['subsubEvtId']
+ if ssid in cbs:
+ if callable( cbs[ssid] ):
+ cbs[ssid](ssEvt, evt, i)
+ else:
+ for func in cbs[ssid]:
+ func(ssEvt, evt, i)
+
+ i += 1
+ if (0 < eventNum <= i):
+ break
+
+ return i
+
+def extractCTS(ssEvtData):
+ hdr = ssEvtData[0]
+ length = 1 + \
+ ((hdr >> 16) & 0xf) * 2 + \
+ ((hdr >> 20) & 0x1f) * 2 + \
+ ((hdr >> 25) & 0x1) * 2 + \
+ ((hdr >> 26) & 0x1) * 3 + \
+ ((hdr >> 27) & 0x1) * 1
+
+ if (((hdr >> 28) & 0x3) == 0x1): length += 1
+ if (((hdr >> 28) & 0x3) == 0x2): length += 4
+
+ return (ssEvtData[:length], ssEvtData[length:])
+
+def extractSync(ssEvtData):
+ hdr = ssEvtData[0]
+ assert(hdr >> 28 == 0x1)
+ packType = (hdr >> 26) & 0x3
+ length = 1
+ if (packType == 1): length += 4
+ if (packType == 3): length += 10
+
+ return (ssEvtData[:length], ssEvtData[length:])
+
+
+def extractTDC(ssEvtData):
+ length = ssEvtData[0]
+ assert(length >= len(ssEvtData))
+ return (ssEvtData[:length+1], ssEvtData[length:])
+
+def tdcTimeData(w):
+ if not (w & 0x80000000):
+ return None
+
+ return {
+ "coarseTime": (w >> 0) & 0x7ff,
+ "edge": (w >> 11) & 1,
+ "fineTime": (w >> 12) & 0x3ff,
+ "channelNo": (w >> 22) & 0x7f
+ }
+
+
+def interpretTDCData(data, calibDataTDC, epochTimeInNs = 0):
+ channels = {}
+ for w in data:
+ if w & 0x80000000:
+ tdcData = tdcTimeData(w)
+ chan = tdcData["channelNo"]
+ if tdcData["edge"] == 1:# and tdcData["fineTime"] != 0x3ff:
+ fineTimeInNs = calibDataTDC[tdcData["channelNo"], tdcData["fineTime"]]
+ assert( 0 <= fineTimeInNs <= 5.0 )
+ coarseTimeInNs = tdcData["coarseTime"] * 5.0
+ tdcTime = coarseTimeInNs - fineTimeInNs + epochTimeInNs
+
+ if not chan in channels: channels[chan] = []
+ channels[chan].append(tdcTime)
+
+
+ elif (w >> 29) == 3: # epoch counter
+ epoch = w & 0x0fffffff
+ epochTimeInNs = epoch * 10240.0
+
+ elif (w >> 29) == 1: # tdc header
+ pass
+ elif (w >> 29) == 2: # debug
+ pass
+ else:
+ print "Unknown TDC word type: 0x%08x" % w
+
+ return channels, epochTimeInNs
--- /dev/null
+import datetime
+import glob
+from hldLib import *
+import numpy as np
+from stats import *
+import os
+
+noEvents = 1000000
+counter = 0
+
+timestamps = [] # in us
+lastId = None
+trgOffset = 0
+lastRegNum = None
+
+def analyseMBS(ssEvt, evt, counter):
+ global timestamps
+ global lastRegNum, lastId, trgOffset
+
+ regNum = ssEvt[1][0] & 0x00ffffff
+
+ if lastRegNum == regNum: return
+ lastRegNum = regNum
+
+ inclTime = ssEvt[1][0] & 0x01000000
+ error = ssEvt[1][0] & 0x80000000
+
+ if (error): print "Error in regNum", regNum
+ assert(inclTime)
+
+ trgId = trgOffset + (ssEvt[1][2]&0xffff)
+ if lastId != None and lastId > trgId:
+ trgOffset += 0x10000
+ trgId += 0x10000
+
+ lastId = trgId
+ #print ssEvt[1][1]*5e-3
+
+ ts=20*trgId + ssEvt[1][1]*5e-3
+ #print ts
+
+ timestamps.append( ts )
+
+
+files = glob.glob("/local/mpenschuck/hldfiles/*.hld")
+files.sort()
+print "Read file ", files[-1]
+callbacks = {0xf30a: analyseMBS}
+counter = iteratorOverHld(files[-1], callbacks, noEvents)
+
+print "Processed %d events, found %d timestampts" % (counter, len(timestamps))
+
+timestamps = np.array(timestamps)
+sTimestamps = slope(timestamps)
+
+dev = sTimestamps - np.average(sTimestamps)
+
+print 1e3/30.52, np.average(sTimestamps), np.std(sTimestamps), np.max(dev)
--- /dev/null
+import numpy as np
+
+def slopeWithoutAvg(X):
+ slope = X[1:]-X[:-1]
+ return slope - np.average(slope)
+
+def slope(X):
+ return X[1:] - X[:-1]
\ No newline at end of file