diff --git a/AnalysisParameters.py b/AnalysisParameters.py new file mode 100644 index 0000000..ca12b5a --- /dev/null +++ b/AnalysisParameters.py @@ -0,0 +1,20 @@ +import numpy as np + +def init(): + global coefficients + global MinimalFittingLimit + global UpwardsOn + global PlotBuffer + global LimitForCorrelation + + coefficients = {} + #Channel 1: Axopatch + coefficients['i1'] = {'a': 0.9999, 'E': 0, 'S': 5, 'eventlengthLimit': 5e-3} + #Channel 2: Femto + coefficients['i2'] = {'a': 0.9999, 'E': 0, 'S': 5, 'eventlengthLimit': 5e-3} + + MinimalFittingLimit = 1e-3 + LimitForCorrelation = 10e-3 + UpwardsOn = 0 + + PlotBuffer = 500 diff --git a/CUSUM.py b/CUSUM.py new file mode 100644 index 0000000..dd835b5 --- /dev/null +++ b/CUSUM.py @@ -0,0 +1,131 @@ +import numpy as np +import pyqtgraph as pg +from timeit import default_timer as timer + +def detection(self, data, dt, threshhold , minlength , maxstates): + + s = timer() + logp = 0 #instantaneous log-likelihood for positive jumps + logn = 0 #instantaneous log-likelihood for negative jumps + cpos = np.zeros(len(data), dtype='float64') #cumulative log-likelihood function for positive jumps + cneg = np.zeros(len(data), dtype='float64') #cumulative log-likelihood function for negative jumps + gpos = np.zeros(2, dtype='float64') #decision function for positive jumps + gneg = np.zeros(2, dtype='float64') #decision function for negative jumps + edges = np.array([0], dtype='int64') #initialize an array with the position of the first subevent - the start of the event + real_start = np.array([], dtype='int64') #initialize an array with the position of the first subevent - the start of the event + real_end = np.array([], dtype='int64') #initialize an array with the position of the first subevent - the start of the event + real_Depth = np.array([], dtype='int64') #initialize an array with the position of the first subevent - the start of the event + + anchor = 0 #the last detected change + length = len(data) + self.var = np.std(data) + h = threshhold / self.var + k = 1000 + nStates = 0 + varM = data[0] + varS = 0 + mean = data[0] + print('length data =' + str(length)) + v = np.zeros(length, dtype='float64') + while k < length-100: + k += 1 + if nStates == 0: + variance = np.var(data[anchor:k]) # initial params for pattern region + mean = np.mean(data[anchor:k]) + if variance == 0: break + logp = threshhold/variance * (data[k] - mean - threshhold/2.) #instantaneous log-likelihood for current sample assuming local baseline has jumped in the positive direction + logn = -threshhold/variance * (data[k] - mean + threshhold/2.) #instantaneous log-likelihood for current sample assuming local baseline has jumped in the negative direction + cpos[k] = cpos[k-1] + logp #accumulate positive log-likelihoods + cneg[k] = cneg[k-1] + logn #accumulate negative log-likelihoods + gpos[1] = max(gpos[0] + logp, 0) #accumulate or reset positive decision function + gneg[1] = max(gneg[0] + logn, 0) #accumulate or reset negative decision function + if (gpos[1] > h or gneg[1] > h): + + if (gpos[1] > h): #significant positive jump detected + jump = anchor + np.argmin(cpos[anchor:k+1]) #find the location of the start of the jump + if jump - edges[nStates] > minlength and np.abs(data[jump+minlength]-data[jump]) >threshhold/4: + edges = np.append(edges, jump) + nStates += 1 + #print('EVENT!!!!! at ='+str(self.t[jump])) + anchor = k# no data meaning at bad points! + # away from bad point more! + cpos[0:len(cpos)] = 0 #reset all decision arrays + cneg[0:len(cneg)] = 0 + gpos[0:len(gpos)] = 0 + gneg[0:len(gneg)] = 0 + if (gneg[1] > h): #significant negative jump detected + jump = anchor + np.argmin(cneg[anchor:k+1]) + if jump - edges[nStates] > minlength and np.abs(data[jump+minlength]-data[jump]) >threshhold/4: + edges = np.append(edges, jump) + nStates += 1 + #print('EVENT!!!!! at ='+str(self.t[jump] )) + anchor = k # no data meaning at bad points! + # away from bad point more! + cpos[0:len(cpos)] = 0 #reset all decision arrays + cneg[0:len(cneg)] = 0 + gpos[0:len(gpos)] = 0 + gneg[0:len(gneg)] = 0 + + + + gpos[0] = gpos[1] + gneg[0] = gneg[1] + if maxstates > 0: + if nStates > maxstates: + print('too sensitive') + nStates = 0 + k = 0 + threshhold = threshhold*1.1 + h = h*1.1 + logp = 0 #instantaneous log-likelihood for positive jumps + logn = 0 #instantaneous log-likelihood for negative jumps + cpos = np.zeros(len(data), dtype='float64') #cumulative log-likelihood function for positive jumps + cneg = np.zeros(len(data), dtype='float64') #cumulative log-likelihood function for negative jumps + gpos = np.zeros(2, dtype='float64') #decision function for positive jumps + gneg = np.zeros(2, dtype='float64') #decision function for negative jumps + edges = np.array([0], dtype='int64') #initialize an array with the position of the first subevent - the start of the event + anchor = 0 #the last detected change + length = len(data) + mean = data[0] + nStates = 0 + mean = data[0] + edges = np.append(edges, len(data)-1) #mark the end of the event as an edge + nStates += 1 + + + cusum = dict() + print('Events = ' + str(self.t[edges])) + for i in range(len(edges)-1): + if edges[i+1] - edges[i] < int(0.05 * self.outputsamplerate): + real_start = np.append(real_start, edges[i]) + real_end = np.append(real_end, edges[i+1]) + real_Depth = np.append(real_Depth, np.mean(data[edges[i]:edges[i+1]])) + cusum['Real_Start'] = real_start + cusum['Real_End'] = real_end + cusum['Real_Depth'] = real_Depth + print('Real Start =' + str(self.t[cusum['Real_Start']] ) ) + print('Real End =' + str(self.t[cusum['Real_End']] ) ) + cusum['CurrentLevels'] = [np.average(data[edges[i]+minlength:edges[i+1]]) for i in range(nStates)] #detect current levels during detected sub-event + print('Length of time = ' + str(len(self.t))) + print('Edges[-1] = ' + str(edges[-1])) + cusum['EventDelay'] = edges #locations of sub-events in the data + cusum['Threshold'] = threshhold #record the threshold used + print('Event = '+str( cusum['EventDelay'])) + cusum['jumps'] = np.diff(cusum['CurrentLevels']) + + #self.__recordevent(cusum) + e = timer() + print('cusum took = ' + str(e-s) + 's') + return cusum + +def print_fitting(self, cusum): + self.p1.plot(self.t[0: cusum['EventDelay'][1]], np.repeat(np.array([cusum['CurrentLevels'][0]]), len(self.t[0:cusum['EventDelay'][1]])), pen=pg.mkPen(color=(173, 27, 183), width=3)) + for number_cusum in np.arange(1,len(cusum['EventDelay'])-1 ,1): + self.p1.plot(self.t[cusum['EventDelay'][number_cusum]:cusum['EventDelay'][number_cusum+1]], np.repeat(np.array([cusum['CurrentLevels'][number_cusum]]), len(self.t[cusum['EventDelay'][number_cusum]:cusum['EventDelay'][number_cusum+1]])), pen=pg.mkPen(color=(173, 27, 183), width=3)) + self.p1.plot(np.linspace(self.t[cusum['EventDelay'][number_cusum]],self.t[cusum['EventDelay'][number_cusum]] + 0.00001 ,100),np.linspace(cusum['CurrentLevels'][number_cusum-1],cusum['CurrentLevels'][number_cusum],100), pen=pg.mkPen(color=(173, 27, 183), width=3)) + self.p1.plot(self.t[cusum['EventDelay'][-2]:-1], np.repeat(np.array([cusum['CurrentLevels'][-1]]), len(self.t[cusum['EventDelay'][-2]:-1])), pen=pg.mkPen(color=(173, 27, 183), width=3)) + self.p1.autoRange() + + + + diff --git a/Camera.py b/Camera.py new file mode 100644 index 0000000..70945c9 --- /dev/null +++ b/Camera.py @@ -0,0 +1,24 @@ +import numpy as np +import cv2 + +cap = cv2.VideoCapture(1) + +print(cap.get(3)) +print(cap.get(4)) +ret = cap.set(3, cap.get(3)*2) +ret = cap.set(4, cap.get(4)*2) +while(True): + # Capture frame-by-frame + ret, frame = cap.read() + + # Our operations on the frame come here + #gray = cv2.cvtColor(frame, cv2.COLOR_BGR) + + # Display the resulting frame + cv2.imshow('frame', frame) + if cv2.waitKey(1) & 0xFF == ord('q'): + break + +# When everything done, release the capture +cap.release() +cv2.destroyAllWindows() \ No newline at end of file diff --git a/ConcatenateEvents.py b/ConcatenateEvents.py new file mode 100644 index 0000000..5d16bee --- /dev/null +++ b/ConcatenateEvents.py @@ -0,0 +1,254 @@ +## Cross-correlation of two channels +import AnalysisParameters as pm +import numpy as np +import scipy +import scipy.signal as sig +import Functions as fu +import os +import matplotlib.pyplot as plt +from matplotlib.backends.backend_pdf import PdfPages +from matplotlib.font_manager import FontProperties +import pandas as pd +import MiscParameters as pm +from matplotlib.ticker import EngFormatter +import h5py +from tkinter.filedialog import askopenfilenames +from matplotlib import rc +rc('mathtext', default='regular') +pm.init(LoadFiles = 0) + +baseline = 100 +fitOn = 1 +percentage = 1 +plotPoints=0 + +filenames = {'/Users/migraf/SWITCHdrive/PhD/BPS/Poster Resources/Data/17B_1MKCl_IV_80mer_both_Ch25_8_OriginalDB.hdf5'} +#filenames = askopenfilenames() + +for filename in filenames: + print(filename) + f = h5py.File(filename, 'r') + out = fu.OpenFile(f['General/FileName'].value) + file = os.sep + str(os.path.split(filename)[1][:-5]) + + # Graphene Or Not?? + if not out['graphene']: + ## Only need to plot i1 and i1_Up and v1 + toPlot = ['i1', 'i1_Up'] + for k in toPlot: + NumberOfEvents=f['LowPassSegmentation/' + k + '/NumberOfEvents'].value + if NumberOfEvents is not 0: + pp = PdfPages(pm.OutputFolder + file + '_Only_' + k + '_EventPlots.pdf') + fig = plt.figure() + ax1 = fig.add_subplot(211) + ax2 = fig.add_subplot(212, sharex=ax1) + ax1.set_title('Ionic Current Trace "' + k + '"') + ax2.set_title('Ionic Voltage') + ax1.set_ylabel('Current [nA]') + ax2.set_ylabel('Voltage [V]') + ax1.set_xlabel('Time [s]') + ax2.set_xlabel('Time [s]') + ax1.yaxis.set_major_formatter(EngFormatter(unit='A', places=1)) + ax2.yaxis.set_major_formatter(EngFormatter(unit='V', places=1)) + ax1.xaxis.set_major_formatter(EngFormatter(unit='s', places=1)) + ax2.xaxis.set_major_formatter(EngFormatter(unit='s', places=1)) + line1, = ax1.plot([]) + line2, = ax2.plot([], 'c') + if fitOn: + line1f, = ax1.plot([], 'r') + for i in np.linspace(0, NumberOfEvents-1, NumberOfEvents*percentage, dtype=np.uint64): + startp_i1 = np.int64( + f['LowPassSegmentation/' + k + '/' + 'StartPoints'].value[i] - baseline) + endp_i1 = np.int64( + f['LowPassSegmentation/' + k + '/' + 'EndPoints'].value[i] + baseline) + if startp_i1 < 0: + startp_i1 = 0 + if endp_i1 > len(out['i1']): + endp_i1 = len(out['i1']) + if fitOn: + if plotPoints: + fit1 = np.concatenate([np.ones(baseline) * + f['LowPassSegmentation/' + k + '/LocalBaseline'].value[i], + f['LowPassSegmentation/' + k + '/AllEvents/' + '{:09d}'.format(i)].value[i], + np.ones(baseline) * + f['LowPassSegmentation/' + k + '/LocalBaseline'].value[i]]) + else: + fit1 = np.concatenate([np.ones(baseline) * + f['LowPassSegmentation/' + k + '/LocalBaseline'].value[i], + np.ones(endp_i1 - startp_i1 - 2 * baseline) * + f['LowPassSegmentation/' + k + '/FitLevel'].value[i], + np.ones(baseline) * + f['LowPassSegmentation/' + k + '/LocalBaseline'].value[i]]) + line1f.set_data(np.arange(startp_i1, endp_i1)/out['samplerate'], fit1) + line1.set_data(np.arange(startp_i1, endp_i1)/out['samplerate'], out['i1'][startp_i1:endp_i1]) + line2.set_data(np.arange(startp_i1, endp_i1)/out['samplerate'], out['v1'][startp_i1:endp_i1]) + + ax1.set_title('{} Event {}\n Ionic Current Trace'.format(filter, i)) + ax1.relim() + ax2.relim() + ax1.autoscale_view(True, True, True) + ax2.autoscale_view(True, True, True) + fig.canvas.draw() + pp.savefig(fig) + print('{}: {} out of {} saved!'.format(toPlot, str(i), NumberOfEvents-1)) + pp.close() + else: + toPlot = [] + #Initialize all the cases + toPlot.append(['i1', '', np.uint64(f['LowPassSegmentation/i1/OnlyIndexes'].value), np.uint64(f['LowPassSegmentation/i1/OnlyIndexes'].value)]) + #toPlot.append(['i1_Up', '', np.uint64(f['LowPassSegmentation/i1_Up/OnlyIndexes'].value), np.uint64(f['LowPassSegmentation/i1_Up/OnlyIndexes'].value)]) + #toPlot.append(['', 'i2', np.uint64(f['LowPassSegmentation/i2/OnlyIndexes'].value), np.uint64(f['LowPassSegmentation/i2/OnlyIndexes'].value)]) + #toPlot.append(['', 'i2_Up', np.uint64(f['LowPassSegmentation/i2_Up/OnlyIndexes'].value), np.uint64(f['LowPassSegmentation/i2_Up/OnlyIndexes'].value)]) + #toPlot.append(['i1', 'i2', np.uint64(f['LowPassSegmentation/i1/CommonIndexesWithi2'].value), np.uint64(f['LowPassSegmentation/i2/CommonIndexesWithi1'].value)]) + #toPlot.append(['i1_Up', 'i2', np.uint64(f['LowPassSegmentation/i1_Up/CommonIndexesWithi2'].value), np.uint64(f['LowPassSegmentation/i2/CommonIndexesWithi1_Up'].value)]) + #toPlot.append(['i1_Up', 'i2_Up', np.uint64(f['LowPassSegmentation/i1_Up/CommonIndexesWithi2_Up'].value), np.uint64(f['LowPassSegmentation/i2_Up/CommonIndexesWithi1_Up'].value)]) + #toPlot.append(['i1', 'i2_Up', np.uint64(f['LowPassSegmentation/i1/CommonIndexesWithi2_Up'].value), np.uint64(f['LowPassSegmentation/i2_Up/CommonIndexesWithi1'].value)]) + + + line1data = np.array([]) + line2data = np.array([]) + line3data = np.array([]) + line4data = np.array([]) + line1fdata = np.array([]) + line2fdata = np.array([]) + for cases in toPlot: + NumberOfEvents = len(cases[2]) + for i in np.linspace(0, NumberOfEvents-1, NumberOfEvents*percentage, dtype=np.uint64): + if not cases[0] == '': + startp_i1 = np.int64( + f['LowPassSegmentation/' + cases[0] + '/' + 'StartPoints'].value[cases[2][i]] - baseline) + endp_i1 = np.int64( + f['LowPassSegmentation/' + cases[0] + '/' + 'EndPoints'].value[cases[2][i]] + baseline) + else: + startp_i1 = np.int64( + f['LowPassSegmentation/' + cases[1] + '/' + 'StartPoints'].value[cases[3][i]] - baseline) + endp_i1 = np.int64( + f['LowPassSegmentation/' + cases[1] + '/' + 'EndPoints'].value[cases[3][i]] + baseline) + if not cases[1] == '': + startp_i2 = np.int64( + f['LowPassSegmentation/' + cases[1] + '/' + 'StartPoints'].value[cases[3][i]] - baseline) + endp_i2 = np.int64( + f['LowPassSegmentation/' + cases[1] + '/' + 'EndPoints'].value[cases[3][i]] + baseline) + else: + startp_i2 = np.int64( + f['LowPassSegmentation/' + cases[0] + '/' + 'StartPoints'].value[cases[2][i]] - baseline) + endp_i2 = np.int64( + f['LowPassSegmentation/' + cases[0] + '/' + 'EndPoints'].value[cases[2][i]] + baseline) + + if startp_i1 < 0: + startp_i1 = 0 + if startp_i2 < 0: + startp_i2 = 0 + if endp_i1 > len(out['i1']): + endp_i1 = len(out['i1']) + if endp_i2 > len(out['i2']): + endp_i2 = len(out['i2']) + + i1 = out['i1'][startp_i1:endp_i1] + v1 = out['v1'][startp_i1:endp_i1] + i2 = out['i2'][startp_i2:endp_i2] + v2 = out['v2'][startp_i2:endp_i2] + + diff = len(i1)-len(i2) + if diff>0: #i1 is longer + i1 = i1[0:len(i2)] + v1 = v1[0:len(i2)] + if diff < 0: # i2 is longer + i2 = i2[0:len(i1)] + v2 = v2[0:len(i1)] + + line1data = np.append(line1data, i1) + line2data = np.append(line2data, i2) + line3data = np.append(line3data, v1) + line4data = np.append(line4data, v2) + + + if fitOn: + if not cases[0] == '': + if plotPoints: + fit1 = np.concatenate([np.ones(baseline) * + f['LowPassSegmentation/' + cases[0] + '/LocalBaseline'].value[cases[2][i]], + f['LowPassSegmentation/' + cases[0] + '/AllEvents/' + '{:09d}'.format(cases[2][i])].value, + np.ones(baseline) * + f['LowPassSegmentation/' + cases[0] + '/LocalBaseline'].value[cases[2][i]]]) + else: + fit1 = np.concatenate([np.ones(baseline) * + f['LowPassSegmentation/' + cases[0] + '/LocalBaseline'].value[cases[2][i]], + np.ones(endp_i1 - startp_i1 - 2 * baseline) * f[ + 'LowPassSegmentation/' + cases[0] + '/FitLevel'].value[cases[2][i]], + np.ones(baseline) * + f['LowPassSegmentation/' + cases[0] + '/LocalBaseline'].value[cases[2][i]]]) + if diff > 0: # i1 is longer + fit1 = fit1[0:len(i2)] + line1fdata = np.append(line1fdata, fit1) + if not cases[1] == '': + if plotPoints: + fit2 = np.concatenate([np.ones(baseline) * + f['LowPassSegmentation/' + cases[1] + '/LocalBaseline'].value[cases[3][i]], + f['LowPassSegmentation/' + cases[1] + '/AllEvents/' + '{:09d}'.format( + cases[3][i])].value, + np.ones(baseline) * + f['LowPassSegmentation/' + cases[1] + '/LocalBaseline'].value[cases[3][i]]]) + else: + fit2 = np.concatenate([np.ones(baseline) * + f['LowPassSegmentation/' + cases[1] + '/LocalBaseline'].value[cases[3][i]], + np.ones(endp_i2 - startp_i2 - 2 * baseline) * f['LowPassSegmentation/' + cases[1] + '/FitLevel'].value[cases[3][i]], + np.ones(baseline) * + f['LowPassSegmentation/' + cases[1] + '/LocalBaseline'].value[cases[3][i]]]) + if diff < 0: # i2 is longer + fit2 = fit2[0:len(i1)] + line2fdata = np.append(line2fdata, fit2) + + + fig = plt.figure() + ax1 = fig.add_subplot(311) + ax2 = fig.add_subplot(312, sharex=ax1) + ax3 = fig.add_subplot(313, sharex=ax1) + ax4 = ax3.twinx() + ax4.set_title('Voltage Trace') + ax1.set_ylabel('Current [nA]') + ax2.set_ylabel('Current [nA]') + ax3.set_ylabel('Ionic Voltage [V]') + ax3.set_xlabel('Time [s]') + ax1.set_xlabel('Time [s]') + ax2.set_xlabel('Time [s]') + ax1.yaxis.set_major_formatter(EngFormatter(unit='A', places=1)) + ax2.yaxis.set_major_formatter(EngFormatter(unit='A', places=1)) + ax3.yaxis.set_major_formatter(EngFormatter(unit='V', places=1)) + ax4.yaxis.set_major_formatter(EngFormatter(unit='V', places=1)) + ax3.xaxis.set_major_formatter(EngFormatter(unit='s', places=1)) + ax1.xaxis.set_major_formatter(EngFormatter(unit='s', places=1)) + ax2.xaxis.set_major_formatter(EngFormatter(unit='s', places=1)) + ax4.set_ylabel('Tr. Voltage [V]') + + t = np.arange(len(line1data))/out['samplerate'] + t2 = np.arange(len(line2data))/out['samplerate'] + + ax1.plot(t, line1data) + ax2.plot(t2, line2data) + ax3.plot(t, line3data, 'c') + ax4.plot(t2, line4data, 'y') + if fitOn: + ax1.plot(t, line1fdata, 'r') + #ax2.plot(t2, line2fdata, 'r') + ax4.tick_params(axis='y', colors='y') + ax4.yaxis.label.set_color('y') + ax3.tick_params(axis='y', colors='c') + ax3.yaxis.label.set_color('c') + ax1.set_title('Event {} of {}\n Ionic Current Trace "{}"'.format(i, NumberOfEvents, cases[0])) + ax1.relim() + ax2.relim() + ax3.relim() + ax4.relim() + ax1.autoscale_view(True, True, True) + ax2.autoscale_view(True, True, True) + # voltage plots + ax3.autoscale_view(True, True, False) + ax4.autoscale_view(True, True, False) + ax3.set_ylim(np.min(line3data) - np.mean(line3data) * 5 / 10, np.max(line3data) + np.mean(line3data) * 1 / 10) + ax4.set_ylim([np.min(line4data) - np.mean(line4data) * 2 / 10, np.max(line4data) + np.mean(line4data) * 5 / 10]) + fig.tight_layout() + fig.canvas.draw() + plt.show() + + diff --git a/FolderToExcel.py b/FolderToExcel.py new file mode 100644 index 0000000..f897976 --- /dev/null +++ b/FolderToExcel.py @@ -0,0 +1,36 @@ +import pandas as pd +import datetime +import os +from tkinter.filedialog import askopenfilenames +import numpy as np +title = '20180711_RW18_Temp' + +filenames = list(askopenfilenames()) # show an "Open" dialog box and return the path to the selected file + +timestrings=[] +for fi in filenames: + timestrings.append(datetime.datetime.fromtimestamp(os.path.getmtime(fi)).isoformat()) + +# Create a Pandas dataframe from some data. +df = pd.DataFrame({'Filename': filenames, + 'Cis Conc [M]': np.ones(len(filenames)), + 'Trans Conc [M]': np.ones(len(filenames)), + 'Laser Power [mW]': np.zeros(len(filenames)), + 'Lens [mm]': 0*np.ones(len(filenames)), + 'IV': np.ones(len(filenames)), + 'UseForFigure1': np.ones(len(filenames)), + 'Time': timestrings, + 'UseForCondEvolution': np.ones(len(filenames)), + 'Wavelength': 640*np.ones(len(filenames)), + 'SurfaceChargePlot': np.ones(len(filenames)), + 'UseForDecay': np.ones(len(filenames)), + 'pH': 7.4*np.ones(len(filenames))}) + +# Create a Pandas Excel writer using XlsxWriter as the engine. +writer = pd.ExcelWriter(str(os.path.split(filenames[0])[0])+os.sep+title+'.xlsx', engine='xlsxwriter') + +# Convert the dataframe to an XlsxWriter Excel object. +df.to_excel(writer, sheet_name='Sheet1') + +# Close the Pandas Excel writer and output the Excel file. +writer.save() \ No newline at end of file diff --git a/Functions.py b/Functions.py new file mode 100644 index 0000000..c360274 --- /dev/null +++ b/Functions.py @@ -0,0 +1,1439 @@ +import matplotlib +#matplotlib.use('Qt5Agg') +import numpy as np +import math +import scipy +import scipy.signal as sig +import os +import pickle as pkl +from scipy import constants as cst +from scipy import io +from scipy import signal +from PyQt5 import QtGui, QtWidgets +from numpy import linalg as lin +import pyqtgraph as pg +import matplotlib.pyplot as plt +from matplotlib.backends.backend_pdf import PdfPages +import pandas as pd +import h5py +from timeit import default_timer as timer +import platform +from scipy.optimize import curve_fit +from sys import platform +import pyabf +from matplotlib.ticker import EngFormatter + +def GetKClConductivity(Conc, Temp): + p = pkl.load(open('KCl_ConductivityValues.p', 'rb')) + return np.polyval(p[str(Conc)], Temp) + +def GetTempFromKClConductivity(Conc, Cond): + p = pkl.load(open('KCl_ConductivityValues.p', 'rb')) + return (Cond-p[str(Conc)][1])/p[str(Conc)][0] + + + +def RecursiveLowPassFast(signal, coeff, samplerate): + padlen = np.uint64(samplerate) + prepadded = np.ones(padlen)*np.mean(signal[0:1000]) + signaltofilter = np.concatenate((prepadded, signal)) + + mltemp = scipy.signal.lfilter([1 - coeff['a'], 0], [1, -coeff['a']], signaltofilter) + vltemp = scipy.signal.lfilter([1 - coeff['a'], 0], [1, -coeff['a']], np.square(signaltofilter - mltemp)) + + ml = np.delete(mltemp, np.arange(padlen)) + vl = np.delete(vltemp, np.arange(padlen)) + + sl = ml - coeff['S'] * np.sqrt(vl) + Ni = len(signal) + points = np.array(np.where(signal <= sl)[0]) + to_pop = np.array([]) + for i in range(1, len(points)): + if points[i] - points[i - 1] == 1: + to_pop = np.append(to_pop, i) + points = np.unique(np.delete(points, to_pop)) + RoughEventLocations = [] + NumberOfEvents = 0 + + for i in points: + if NumberOfEvents is not 0: + if i >= RoughEventLocations[NumberOfEvents-1][0] and i <= RoughEventLocations[NumberOfEvents-1][1]: + continue + NumberOfEvents += 1 + start = i + El = ml[i] + coeff['E'] * np.sqrt(vl[i]) + Mm = ml[i] + Vv = vl[i] + duration = 0 + endp = start + if (endp + 1) < len(signal): + while signal[endp + 1] < El and endp < (Ni - 2):# and duration < coeff['eventlengthLimit']*samplerate: + duration += 1 + endp += 1 + if duration >= coeff['eventlengthLimit'] * samplerate or endp > (Ni - 10): + NumberOfEvents -= 1 + continue + else: + k = start + while signal[k] < Mm and k > 1: + k -= 1 + start = k - 1 + k2 = i + 1 + #while signal[k2] > Mm: + # k2 -= 1 + #endp = k2 + if start < 0: + start = 0 + RoughEventLocations.append((start, endp, ml[start], vl[start])) + + return np.array(RoughEventLocations)#, ml, vl, sl + +def RecursiveLowPassFastUp(signal, coeff, samplerate): + ml = scipy.signal.lfilter([1 - coeff['a'], 0], [1, -coeff['a']], signal) + vl = scipy.signal.lfilter([1 - coeff['a'], 0], [1, -coeff['a']], np.square(signal - ml)) + sl = ml + coeff['S'] * np.sqrt(vl) + Ni = len(signal) + points = np.array(np.where(signal>=sl)[0]) + to_pop=np.array([]) + for i in range(1,len(points)): + if points[i] - points[i - 1] == 1: + to_pop=np.append(to_pop, i) + points = np.delete(points, to_pop) + + points =np.delete(points, np.array(np.where(points == 0)[0])) + + RoughEventLocations = [] + NumberOfEvents=0 + for i in points: + if NumberOfEvents is not 0: + if i >= RoughEventLocations[NumberOfEvents-1][0] and i <= RoughEventLocations[NumberOfEvents-1][1]: + continue + NumberOfEvents += 1 + start = i + El = ml[i] + coeff['E'] * np.sqrt(vl[i]) + Mm = ml[i] + duration = 0 + while signal[i + 1] > El and i < (Ni - 2) and duration < coeff['eventlengthLimit']*samplerate: + duration += 1 + i += 1 + if duration >= coeff['eventlengthLimit']*samplerate or i > (Ni - 10): + NumberOfEvents -= 1 + else: + k = start + while signal[k] > Mm and k > 2: + k -= 1 + start = k - 1 + k2 = i + 1 + while signal[k2] > Mm: + k2 -= 1 + endp = k2 + RoughEventLocations.append((start, endp, ml[start], vl[start])) + + return np.array(RoughEventLocations) + +def ImportABF(datafilename): + abf = pyabf.ABF(datafilename) + abf.info() # shows what is available + output={'type': 'Clampfit', 'graphene': 0, 'samplerate': abf.pointsPerSec, 'i1': -20000./65536 * abf.dataY, 'v1': abf.dataC, 'filename': datafilename} + return output + +def ImportAxopatchData(datafilename): + x=np.fromfile(datafilename, np.dtype('>f4')) + f=open(datafilename, 'rb') + graphene=0 + for i in range(0, 10): + a=str(f.readline()) + #print(a) + if 'Acquisition' in a or 'Sample Rate' in a: + samplerate=int(''.join(i for i in a if i.isdigit()))/1000 + if 'FEMTO preamp Bandwidth' in a: + femtoLP=int(''.join(i for i in a if i.isdigit())) + if 'I_Graphene' in a: + graphene=1 + print('This File Has a Graphene Channel!') + end = len(x) + if graphene: + #pore current + i1 = x[250:end-3:4] + #graphene current + i2 = x[251:end-2:4] + #pore voltage + v1 = x[252:end-1:4] + #graphene voltage + v2 = x[253:end:4] + print('The femto was set to : {} Hz, if this value was correctly entered in the LabView!'.format(str(femtoLP))) + output={'FemtoLowPass': femtoLP, 'type': 'Axopatch', 'graphene': 1, 'samplerate': samplerate, 'i1': i1, 'v1': v1, 'i2': i2, 'v2': v2, 'filename': datafilename} + else: + i1 = np.array(x[250:end-1:2]) + v1 = np.array(x[251:end:2]) + output={'type': 'Axopatch', 'graphene': 0, 'samplerate': samplerate, 'i1': i1, 'v1': v1, 'filename': datafilename} + return output + +def ImportChimeraRaw(datafilename): + matfile=io.loadmat(str(os.path.splitext(datafilename)[0])) + #buffersize=matfile['DisplayBuffer'] + data = np.fromfile(datafilename, np.dtype(' 0, 'neg': Item['Voltage'] < 0} + a = {} + b = {} + sigma_a = {} + sigma_b = {} + b_save = {} + x_values = {} + + for part in parts: + (a[part], b[part], sigma_a[part], sigma_b[part], b_save[part]) = YorkFit( + Item['Voltage'][parts[part]].flatten(), Item['Mean'][parts[part]].flatten(), + 1e-12 * np.ones(len(Item['Voltage'][parts[part]].flatten())), + Item['STD'][parts[part]].flatten()) + factor = b['neg'] / b['pos'] + Item['Rectification'] = factor + ItemExp['Rectification'] = factor + + ###EXPONENTIAL METHOD### + if approach is not 'mean': + l = 1000 + ItemExp['StartPoint'] = np.zeros(l, dtype=np.uint64) + ItemExp['EndPoint'] = np.zeros(l, dtype=np.uint64) + baselinestd = np.std(output[current][0:np.int64(1 * output['samplerate'])]) + movingstd = pd.rolling_std(output[current], 10) + + # Extract The Desired Parts + ind = (np.abs(movingstd - baselinestd) < 1e-9) & (np.abs(output[current]) < np.max(output[current]) / 2) + + # How Many Parts? + restart = True + counter = 0 + for i, value in enumerate(ind): + if value and restart: + ItemExp['StartPoint'][counter] = i + print('Start: {}\t'.format(i)) + restart = False + elif not value and not restart: + if ((i - 1) - ItemExp['StartPoint'][counter]) > 1 * output['samplerate']: + ItemExp['EndPoint'][counter] = i - 1 + print('End: {}'.format(i - 1)) + restart = True + counter += 1 + else: + restart = True + + if not restart: + counter += 1 + ItemExp['EndPoint'][counter] = len(ind) + + ItemExp['StartPoint'] = ItemExp['StartPoint'][np.where(ItemExp['StartPoint'])] + ItemExp['EndPoint'] = ItemExp['EndPoint'][np.where(ItemExp['EndPoint'])] + NumberOfPieces = len(ItemExp['StartPoint']) + ItemExp['Voltage'] = np.zeros(NumberOfPieces) + ItemExp['ExpValues'] = np.zeros((3, NumberOfPieces)) + ItemExp['STD'] = np.zeros(NumberOfPieces) + ItemExp['Mean'] = np.zeros(NumberOfPieces) + + # Make It Piecewise + for i in np.arange(0, NumberOfPieces): + trace = output[current][ItemExp['StartPoint'][i]:ItemExp['EndPoint'][i]] + popt, pcov = MakeExponentialFit(np.arange(len(trace)) / output['samplerate'], trace) + + ItemExp['Voltage'][i] = output[sweepedChannel][ItemExp['StartPoint'][i]] + if popt[0]: + ItemExp['ExpValues'][:, i] = popt + ItemExp['Mean'][i] = popt[2] + try: + ItemExp['STD'][i] = np.sqrt(np.diag(pcov))[2] + except RuntimeWarning: + ItemExp['STD'][i] = np.sqrt(np.std(trace)) + print('ExpFit: STD of voltage {} failed calculating...'.format(ItemExp['Voltage'][i])) + else: + print('Exponential Fit on for ' + current + ' failed at V=' + str(ItemExp['Voltage'][0])) + ItemExp['Mean'][i] = np.mean(trace) + ItemExp['STD'][i] = np.std(trace) + + ## FIT THE EXP CALCULATED VALUES ### + (a, b, sigma_a, sigma_b, b_save) = YorkFit(ItemExp['Voltage'], ItemExp['Mean'], + 1e-12 * np.ones(len(ItemExp['Voltage'])), ItemExp['STD']) + x_fit = np.linspace(min(Item['Voltage']), max(Item['Voltage']), 1000) + y_fit = scipy.polyval([b, a], x_fit) + ItemExp['YorkFitValues'] = {'x_fit': x_fit, 'y_fit': y_fit, 'Yintercept': a, 'Slope': b, + 'Sigma_Yintercept': sigma_a, + 'Sigma_Slope': sigma_b, 'Parameter': b_save} + All[current + '_Exp'] = ItemExp + + ##Remove NaNs + nans = np.isnan(Item['Mean'][:]) + nans = np.logical_not(nans) + Item['Voltage'] = Item['Voltage'][nans] + Item['StartPoint'] = Item['StartPoint'][nans] + Item['EndPoint'] = Item['EndPoint'][nans] + Item['Mean'] = Item['Mean'][nans] + Item['STD'] = Item['STD'][nans] + + ## Restrict to set voltage Range + if UseVoltageRange is not 0: + print('Voltages Cut') + ind = np.argwhere((Item['Voltage'] >= UseVoltageRange[0]) & (Item['Voltage'] <= UseVoltageRange[1])) + Item['Voltage'] = Item['Voltage'][ind].flatten() + Item['StartPoint'] = Item['StartPoint'][ind].flatten() + Item['EndPoint'] = Item['EndPoint'][ind].flatten() + Item['Mean'] = Item['Mean'][ind].flatten() + Item['STD'] = Item['STD'][ind].flatten() + + ## FIT THE MEAN CALCULATED VALUES ### + (a, b, sigma_a, sigma_b, b_save) = YorkFit(Item['Voltage'].flatten(), Item['Mean'].flatten(), 1e-9 * np.ones(len(Item['Voltage'])), Item['STD'].flatten()) + x_fit = np.linspace(min(Item['Voltage']), max(Item['Voltage']), 1000) + y_fit = scipy.polyval([b, a], x_fit) + Item['YorkFitValues'] = {'x_fit': x_fit, 'y_fit': y_fit, 'Yintercept': a, 'Slope': b, 'Sigma_Yintercept': sigma_a, + 'Sigma_Slope': sigma_b, 'Parameter': b_save} + + All[current] = Item + All['Currents'] = currents + + return All + +def ExpFunc(x, a, b, c): + return a * np.exp(-b * x) + c + +def YorkFit(X, Y, sigma_X, sigma_Y, r=0): + N_itermax=10 #maximum number of interations + tol=1e-15 #relative tolerance to stop at + N = len(X) + temp = np.matrix([X, np.ones(N)]) + #make initial guess at b using linear squares + + tmp = np.matrix(Y)*lin.pinv(temp) + b_lse = np.array(tmp)[0][0] + #a_lse=tmp(2); + b = b_lse #initial guess + omega_X = np.true_divide(1,np.power(sigma_X,2)) + omega_Y = np.true_divide(1, np.power(sigma_Y,2)) + alpha=np.sqrt(omega_X*omega_Y) + b_save = np.zeros(N_itermax+1) #vector to save b iterations in + b_save[0]=b + + for i in np.arange(N_itermax): + W=omega_X*omega_Y/(omega_X+b*b*omega_Y-2*b*r*alpha) + + X_bar=np.sum(W*X)/np.sum(W) + Y_bar=np.sum(W*Y)/np.sum(W) + + U=X-X_bar + V=Y-Y_bar + + beta=W*(U/omega_Y+b*V/omega_X-(b*U+V)*r/alpha) + + b=sum(W*beta*V)/sum(W*beta*U) + b_save[i+1]=b + if np.abs((b_save[i+1]-b_save[i])/b_save[i+1]) < tol: + break + + a=Y_bar-b*X_bar + x=X_bar+beta + y=Y_bar+b*beta + x_bar=sum(W*x)/sum(W) + y_bar=sum(W*y)/sum(W) + u=x-x_bar + #%v=y-y_bar + sigma_b=np.sqrt(1/sum(W*u*u)) + sigma_a=np.sqrt(1./sum(W)+x_bar*x_bar*sigma_b*sigma_b) + return (a, b, sigma_a, sigma_b, b_save) + +def DoubleExpFunc(x, Y0, percentFast, plateau, Kfast, Kslow): + SpanFast = (Y0-plateau) * percentFast * 0.01 + SpanSlow = (Y0-plateau) *(100-percentFast) * 0.01 + return plateau + ((Y0-plateau) * percentFast * 0.01)*np.exp(-Kfast*x) + ((Y0-plateau) *(100-percentFast) * 0.01)*np.exp(-Kslow*x) + +def MakeExponentialFit(xdata, ydata): + try: + popt, pcov = curve_fit(ExpFunc, xdata, ydata, method='lm', xtol = 1e-20, ftol = 1e-12, gtol = 1e-20) + #popt, pcov = curve_fit(DoubleExpFunc, xdata, ydata, p0 = (ydata[0], 50, ydata[-1], 1 / 15, 1 / 60)) + return (popt, pcov) + except RuntimeError: + popt = (0,0,0) + pcov = 0 + return (popt, pcov) + +def CutDataIntoVoltageSegments(output): + sweepedChannel = '' + if output['type'] == 'ChimeraNotRaw' or (output['type'] == 'Axopatch' and not output['graphene']): + ChangePoints = np.where(np.diff(output['v1']))[0] + sweepedChannel = 'v1' + if len(ChangePoints) is 0: + print('No voltage sweeps in this file:\n{}'.format(output['filename'])) + return (0,0) + elif (output['type'] == 'Axopatch' and output['graphene']): + ChangePoints = np.where(np.diff(output['v1']))[0] + sweepedChannel = 'v1' + if len(ChangePoints) is 0: + ChangePoints = np.where(np.diff(output['v2']))[0] + if len(ChangePoints) is 0: + print('No voltage sweeps in this file') + return (0,0) + else: + sweepedChannel = 'v2' + print('Cutting into Segments...\n{} change points detected in channel {}...'.format(len(ChangePoints), sweepedChannel)) + return (ChangePoints, sweepedChannel) + +def CalculatePoreSize(G, L, s): + return (G+np.sqrt(G*(G+16*L*s/np.pi)))/(2*s) + +def CalculateConductance(L, s, d): + return s*1/(4*L/(np.pi*d**2)+1/d) + +def GetSurfaceChargeFromLeeEquation(s, c, D, G, L, A, B, version=1): + lDu = 1/(8*np.pi*B*D*s)*( + version * np.sqrt((4*np.pi*A*D**2*s+np.pi*B*D**2*s - 4*B*G*L - 8*np.pi*D*G)**2 + - 16*np.pi*B*D*s*(np.pi*A*D**3*s - 4*A*D*G*L - 2*np.pi*D**2*G)) + - 4*np.pi*A*D**2*s - np.pi*B*D**2*s + 4*B*G*L + 8*np.pi*D*G + ) + e = cst.elementary_charge + Na = cst.Avogadro + return lDu*(2 * c * Na * 1e3 * e) + +def ConductivityFromConductanceModel(L, d, G): + return G * (4 * L / (np.pi * d ** 2) + 1 / d) + +def PlotIV(output, AllData, current = 'i1', unit=1e9, axis = '', WithFit = 1, PoreSize = [0,0], title = '', labeltxt = '', Polynomial=0, color='b', RectificationFactor=0, useEXP=0): + if useEXP: + current=current+'_Exp' + + if labeltxt is '': + labeltxt = str(os.path.split(output['filename'])[1])[:-4] + + ind = np.argsort(AllData[current]['Voltage']) + + #Calculate Rectification Factor + if RectificationFactor: + parts = {'pos': AllData[current]['Voltage'] > 0, 'neg': AllData[current]['Voltage'] < 0} + a={}; b={}; sigma_a={}; sigma_b={}; b_save={};x_values={} + + for part in parts: + (a[part], b[part], sigma_a[part], sigma_b[part], b_save[part]) = YorkFit(AllData[current]['Voltage'][parts[part]], AllData[current]['Mean'][parts[part]], 1e-12 * np.ones(len(AllData[current]['Voltage'][parts[part]])), AllData[current]['STD'][parts[part]]) + x_values[part] = np.linspace(min(AllData[current]['Voltage'][parts[part]]), max(AllData[current]['Voltage'][parts[part]]), 1000) + axis.plot(x_values[part], scipy.polyval([b[part], a[part]], x_values[part])*unit, color=color, ls='--') + factor = b['neg'] / b['pos'] + labeltxt = labeltxt + ', R:{:4.2f}'.format(factor) + + #Polynomial is guide to the eye + if not Polynomial: + axis.errorbar(AllData[current]['Voltage'], AllData[current]['Mean']*unit, yerr=AllData[current]['STD']*unit, fmt='o', label=labeltxt, color=color) + else: + p = np.polyfit(AllData[current]['Voltage'][ind], AllData[current]['Mean'][ind]*unit, Polynomial) + axis.errorbar(AllData[current]['Voltage'][ind], AllData[current]['Mean'][ind]*unit, yerr=AllData[current]['STD'][ind]*unit, fmt='o', label=labeltxt, color=color) + axis.plot(AllData[current]['Voltage'][ind], np.polyval(p, AllData[current]['Voltage'][ind]), color=color) + + axis.set_ylabel('Current ' + current + ' [nA]') + axis.set_xlabel('Voltage ' + AllData[current]['SweepedChannel'] +' [V]') + + if WithFit: + axis.set_title(title + '\nG={}S, R={}Ohm'.format(pg.siFormat(AllData[current]['YorkFitValues']['Slope']), pg.siFormat(1/(AllData[current]['YorkFitValues']['Slope'])))) + axis.plot(AllData[current]['YorkFitValues']['x_fit'], AllData[current]['YorkFitValues']['y_fit']*unit, 'r--', label='Linear Fit of '+labeltxt) + else: + axis.set_title(title + 'IV Plot') + + if PoreSize[0]: + textstr = 'Pore Size\nConductance: {}S/m\nLenght: {}m:\ndiameter: {}m'.format(pg.siFormat(PoreSize[0]), pg.siFormat(PoreSize[1]), pg.siFormat(CalculatePoreSize(AllData[current]['YorkFitValues']['Slope'], PoreSize[1], PoreSize[0]))) + axis.text(0.05, 0.95, textstr, transform=axis.transAxes, fontsize=12, + verticalalignment='top', bbox=dict(boxstyle='round', facecolor='wheat', alpha=0.5)) + return axis + +def RefinedEventDetection(out, AnalysisResults, signals, limit): + for sig in signals: + #Choose correct reference + if sig is 'i1_Up': + sig1 = 'i1' + elif sig is 'i2_Up': + sig1 = 'i2' + else: + sig1 = sig + if sig is 'i1' or 'i1_Up': + volt='v1' + elif sig is 'i2' or 'i2_Up': + volt='v2' + + if len(AnalysisResults[sig]['RoughEventLocations']) > 1: + startpoints = np.uint64(AnalysisResults[sig]['RoughEventLocations'][:, 0]) + endpoints = np.uint64(AnalysisResults[sig]['RoughEventLocations'][:, 1]) + localBaseline = AnalysisResults[sig]['RoughEventLocations'][:, 2] + localVariance = AnalysisResults[sig]['RoughEventLocations'][:, 3] + + CusumBaseline=500 + numberofevents = len(startpoints) + AnalysisResults[sig]['StartPoints'] = startpoints + AnalysisResults[sig]['EndPoints'] = endpoints + AnalysisResults[sig]['LocalBaseline'] = localBaseline + AnalysisResults[sig]['LocalVariance'] = localVariance + AnalysisResults[sig]['NumberOfEvents'] = len(startpoints) + + deli = np.zeros(numberofevents) + dwell = np.zeros(numberofevents) + fitvalue = np.zeros(numberofevents) + AllEvents = [] + + #####FIX THE UP AND DOWN STORY!! ALL THE PROBLEMS ARE IN HERE... + for i in range(numberofevents): + length = endpoints[i] - startpoints[i] + if out[sig1][startpoints[i]] < 0 and (sig == 'i1_Up' or sig == 'i2_Up'): + if length <= limit and length > 3: + # Impulsion Fit to minimal value + fitvalue[i] = np.max(out[sig1][startpoints[i]+np.uint(1):endpoints[i]-np.uint(1)]) + elif length > limit: + fitvalue[i] = np.mean(out[sig1][startpoints[i]+np.uint(5):endpoints[i]-np.uint(5)]) + else: + fitvalue[i] = np.max(out[sig1][startpoints[i]:endpoints[i]]) + deli[i] = -(localBaseline[i] - fitvalue[i]) + + elif out[sig1][startpoints[i]] < 0 and (sig == 'i1' or sig == 'i2'): + if length <= limit and length > 3: + # Impulsion Fit to minimal value + fitvalue[i] = np.min(out[sig1][startpoints[i]+np.uint(1):endpoints[i]-np.uint(1)]) + elif length > limit: + fitvalue[i] = np.mean(out[sig1][startpoints[i]+np.uint(5):endpoints[i]-np.uint(5)]) + else: + fitvalue[i] = np.min(out[sig1][startpoints[i]:endpoints[i]]) + deli[i] = -(localBaseline[i] - fitvalue[i]) + + elif out[sig1][startpoints[i]] > 0 and (sig == 'i1_Up' or sig == 'i2_Up'): + if length <= limit and length > 3: + # Impulsion Fit to minimal value + fitvalue[i] = np.max(out[sig1][startpoints[i]+np.uint(1):endpoints[i]-np.uint(1)]) + elif length > limit: + fitvalue[i] = np.mean(out[sig1][startpoints[i]+np.uint(5):endpoints[i]-np.uint(5)]) + else: + fitvalue[i] = np.max(out[sig1][startpoints[i]:endpoints[i]]) + deli[i] = (localBaseline[i] - fitvalue[i]) + + elif out[sig1][startpoints[i]] > 0 and (sig == 'i1' or sig == 'i2'): + if length <= limit and length > 3: + # Impulsion Fit to minimal value + fitvalue[i] = np.min(out[sig1][startpoints[i]+np.uint(1):endpoints[i]-np.uint(1)]) + elif length > limit: + fitvalue[i] = np.mean(out[sig1][startpoints[i]+np.uint(5):endpoints[i]-np.uint(5)]) + else: + fitvalue[i] = np.min(out[sig1][startpoints[i]:endpoints[i]]) + deli[i] = (localBaseline[i] - fitvalue[i]) + else: + print('Strange event that has to come from a neighboring universe...Computer will self-destruct in 3s!') + dwell[i] = (endpoints[i] - startpoints[i]) / out['samplerate'] + AllEvents.append(out[sig1][startpoints[i]:endpoints[i]]) + frac = deli / localBaseline + dt = np.array(0) + dt = np.append(dt, np.diff(startpoints) / out['samplerate']) + numberofevents = len(dt) + #AnalysisResults[sig]['CusumFits'] = AllFits + AnalysisResults[sig]['FractionalCurrentDrop'] = frac + AnalysisResults[sig]['DeltaI'] = deli + AnalysisResults[sig]['DwellTime'] = dwell + AnalysisResults[sig]['Frequency'] = dt + AnalysisResults[sig]['LocalVoltage'] = out[volt][startpoints] + AnalysisResults[sig]['AllEvents'] = AllEvents + AnalysisResults[sig]['FitLevel'] = fitvalue + else: + AnalysisResults[sig]['NumberOfEvents'] = 0 + + return AnalysisResults + +def CorrelateTheTwoChannels(AnalysisResults, DelayLimit, Ch1 = 'i1', Ch2 = 'i2'): + if len(AnalysisResults[Ch1]['RoughEventLocations']) is not 0: + i1StartP = np.int64(AnalysisResults[Ch1]['StartPoints'][:]) + else: + i1StartP = [] + if len(AnalysisResults[Ch2]['RoughEventLocations']) is not 0: + i2StartP = np.int64(AnalysisResults[Ch2]['StartPoints'][:]) + else: + i2StartP = [] + + # Common Events, # Take Longer + CommonEventsi1Index = np.array([], dtype=np.uint64) + CommonEventsi2Index = np.array([], dtype=np.uint64) + + for k in i1StartP: + val = i2StartP[(i2StartP > k - DelayLimit) & (i2StartP < k + DelayLimit)] + if len(val) == 1: + CommonEventsi2Index = np.append(CommonEventsi2Index, np.uint64(np.where(i2StartP == val)[0][0])) + CommonEventsi1Index = np.append(CommonEventsi1Index, np.uint64(np.where(i1StartP == k)[0][0])) + if len(val) > 1: + diff = np.absolute(val-k) + CommonEventsi2Index = np.append(CommonEventsi2Index, np.uint64(np.where(i2StartP == val[np.argmin(diff)]))[0][0]) + CommonEventsi1Index = np.append(CommonEventsi1Index, np.uint64(np.where(i1StartP == k)[0][0])) + # Only i1 + Onlyi1Indexes = np.delete(range(len(i1StartP)), CommonEventsi1Index) + #Onlyi1Indexes=[] + # Only i2 + Onlyi2Indexes = np.delete(range(len(i2StartP)), CommonEventsi2Index) + #Onlyi2Indexes=[] + + CommonIndexes={} + CommonIndexes[Ch1]=CommonEventsi1Index + CommonIndexes[Ch2]=CommonEventsi2Index + OnlyIndexes={} + OnlyIndexes[Ch1] = Onlyi1Indexes + OnlyIndexes[Ch2] = Onlyi2Indexes + return (CommonIndexes, OnlyIndexes) + +def PlotEvent(fig1, t1, i1, t2 = [], i2 = [], fit1 = np.array([]), fit2 = np.array([]), channel = 'i1'): + if len(t2)==0: + ax1 = fig1.add_subplot(111) + ax1.plot(t1, i1*1e9, 'b') + if len(fit1) is not 0: + ax1.plot(t1, fit1*1e9, 'y') + ax1.set_ylabel(channel + ' Current [nA]') + ax1.set_xlabel(channel + ' Time [s]') + ax1.ticklabel_format(useOffset=False) + ax1.ticklabel_format(useOffset=False) + return ax1 + else: + ax1 = fig1.add_subplot(211) + ax2 = fig1.add_subplot(212, sharex=ax1) + ax1.plot(t1, i1*1e9, 'b') + if len(fit1) is not 0: + ax1.plot(t1, fit1*1e9, 'y') + ax2.plot(t2, i2*1e9, 'r') + if len(fit2) is not 0: + ax2.plot(t2, fit2*1e9, 'y') + ax1.set_ylabel('Ionic Current [nA]') + #ax1.set_xticklabels([]) + ax2.set_ylabel('Transverse Current [nA]') + ax2.set_xlabel('Time [s]') + ax2.ticklabel_format(useOffset=False) + ax2.ticklabel_format(useOffset=False) + ax1.ticklabel_format(useOffset=False) + ax1.ticklabel_format(useOffset=False) + return ax1, ax2 + +def SaveAllPlots(CommonIndexes, OnlyIndexes, AnalysisResults, directory, out, buffer, withFit = 1): + if len(CommonIndexes['i1']) is not 0: + # Plot All Common Events + pp = PdfPages(directory + '_SavedEventsCommon.pdf') + ind1 = np.uint64(CommonIndexes['i1']) + ind2 = np.uint64(CommonIndexes['i2']) + + t = np.arange(0, len(out['i1'])) + t = t / out['samplerate'] * 1e3 + count=1 + for eventnumber in range(len(ind1)): + parttoplot = np.arange(AnalysisResults['i1']['StartPoints'][ind1[eventnumber]] - buffer, + AnalysisResults['i1']['EndPoints'][ind1[eventnumber]] + buffer, 1, dtype=np.uint64) + parttoplot2 = np.arange(AnalysisResults['i2']['StartPoints'][ind2[eventnumber]] - buffer, + AnalysisResults['i2']['EndPoints'][ind2[eventnumber]] + buffer, 1, dtype=np.uint64) + + fit1 = np.concatenate([np.ones(buffer) * AnalysisResults['i1']['LocalBaseline'][ind1[eventnumber]], + np.ones(AnalysisResults['i1']['EndPoints'][ind1[eventnumber]] - AnalysisResults['i1']['StartPoints'][ + ind1[eventnumber]]) * ( + AnalysisResults['i1']['LocalBaseline'][ind1[eventnumber]] - AnalysisResults['i1']['DeltaI'][ind1[eventnumber]]), + np.ones(buffer) * AnalysisResults['i1']['LocalBaseline'][ind1[eventnumber]]]) + + fit2 = np.concatenate([np.ones(buffer) * AnalysisResults['i2']['LocalBaseline'][ind2[eventnumber]], + np.ones(AnalysisResults['i2']['EndPoints'][ind2[eventnumber]] - AnalysisResults['i2']['StartPoints'][ + ind2[eventnumber]]) * ( + AnalysisResults['i2']['LocalBaseline'][ind2[eventnumber]] - AnalysisResults['i2']['DeltaI'][ind2[eventnumber]]), + np.ones(buffer) * AnalysisResults['i2']['LocalBaseline'][ind2[eventnumber]]]) + if withFit: + fig = PlotEvent(t[parttoplot], out['i1'][parttoplot], t[parttoplot2], out['i2'][parttoplot2], + fit1=fit1, fit2=fit2) + else: + fig = PlotEvent(t[parttoplot], out['i1'][parttoplot], t[parttoplot2], out['i2'][parttoplot2]) + + if not np.mod(eventnumber+1,200): + pp.close() + pp = PdfPages(directory + '_SavedEventsCommon_' + str(count) + '.pdf') + count+=1 + pp.savefig(fig) + print('{} out of {} saved!'.format(str(eventnumber), str(len(ind1)))) + print('Length i1: {}, Fit i1: {}'.format(len(out['i1'][parttoplot]), len(fit1))) + print('Length i2: {}, Fit i2: {}'.format(len(out['i2'][parttoplot2]), len(fit2))) + fig.clear() + plt.close(fig) + pp.close() + + if len(OnlyIndexes['i1']) is not 0: + # Plot All i1 + pp = PdfPages(directory + '_SavedEventsOnlyi1.pdf') + ind1 = np.uint64(OnlyIndexes['i1']) + + t = np.arange(0, len(out['i1'])) + t = t / out['samplerate'] * 1e3 + count=1 + for eventnumber in range(len(ind1)): + parttoplot = np.arange(AnalysisResults['i1']['StartPoints'][ind1[eventnumber]] - buffer, + AnalysisResults['i1']['EndPoints'][ind1[eventnumber]] + buffer, 1, dtype=np.uint64) + + fit1 = np.concatenate([np.ones(buffer) * AnalysisResults['i1']['LocalBaseline'][ind1[eventnumber]], + np.ones(AnalysisResults['i1']['EndPoints'][ind1[eventnumber]] - AnalysisResults['i1']['StartPoints'][ + ind1[eventnumber]]) * ( + AnalysisResults['i1']['LocalBaseline'][ind1[eventnumber]] - AnalysisResults['i1']['DeltaI'][ind1[eventnumber]]), + np.ones(buffer) * AnalysisResults['i1']['LocalBaseline'][ind1[eventnumber]]]) + + fig = PlotEvent(t[parttoplot], out['i1'][parttoplot], t[parttoplot], out['i2'][parttoplot], fit1=fit1) + if not np.mod(eventnumber+1,200): + pp.close() + pp = PdfPages(directory + '_SavedEventsCommon_' + str(count) + '.pdf') + count+=1 + pp.savefig(fig) + print('{} out of {} saved!'.format(str(eventnumber), str(len(ind1)))) + fig.clear() + plt.close(fig) + pp.close() + + if len(OnlyIndexes['i2']) is not 0: + # Plot All i2 + pp = PdfPages(directory + '_SavedEventsOnlyi2.pdf') + ind1 = np.uint64(OnlyIndexes['i2']) + + t = np.arange(0, len(out['i2'])) + t = t / out['samplerate'] * 1e3 + count=1 + for eventnumber in range(len(ind1)): + parttoplot = np.arange(AnalysisResults['i2']['StartPoints'][ind1[eventnumber]] - buffer, + AnalysisResults['i2']['EndPoints'][ind1[eventnumber]] + buffer, 1, dtype=np.uint64) + + fit1 = np.concatenate([np.ones(buffer) * AnalysisResults['i2']['LocalBaseline'][ind1[eventnumber]], + np.ones(AnalysisResults['i2']['EndPoints'][ind1[eventnumber]] - AnalysisResults['i2']['StartPoints'][ + ind1[eventnumber]]) * ( + AnalysisResults['i2']['LocalBaseline'][ind1[eventnumber]] - AnalysisResults['i2']['DeltaI'][ind1[eventnumber]]), + np.ones(buffer) * AnalysisResults['i2']['LocalBaseline'][ind1[eventnumber]]]) + + fig = PlotEvent(t[parttoplot], out['i1'][parttoplot], t[parttoplot], out['i2'][parttoplot], fit2=fit1) + if not np.mod(eventnumber+1,200): + pp.close() + pp = PdfPages(directory + '_SavedEventsCommon_' + str(count) + '.pdf') + count+=1 + pp.savefig(fig) + print('{} out of {} saved!'.format(str(eventnumber), str(len(ind1)))) + fig.clear() + plt.close(fig) + pp.close() + + # Derivative + if len(CommonIndexes['i1']) is not 0: + # Plot All i1 + pp = PdfPages(directory + '_i1vsderivi2.pdf') + ind1 = np.uint64(CommonIndexes['i1']) + ind2 = np.uint64(CommonIndexes['i2']) + + t = np.arange(0, len(out['i1'])) + t = t / out['samplerate'] * 1e3 + count=1 + for eventnumber in range(len(ind1)): + parttoplot = np.arange(AnalysisResults['i1']['StartPoints'][ind1[eventnumber]] - buffer, + AnalysisResults['i1']['EndPoints'][ind1[eventnumber]] + buffer, 1, dtype=np.uint64) + parttoplot2 = np.arange(AnalysisResults['i2']['StartPoints'][ind2[eventnumber]] - buffer, + AnalysisResults['i2']['EndPoints'][ind2[eventnumber]] + buffer, 1, dtype=np.uint64) + + fig = PlotEvent(t[parttoplot], out['i1'][parttoplot], t[parttoplot2][:-1], + np.diff(out['i2'][parttoplot2])) + + if not np.mod(eventnumber+1,200): + pp.close() + pp = PdfPages(directory + '_SavedEventsCommon_' + str(count) + '.pdf') + count+=1 + pp.savefig(fig) + print('{} out of {} saved!'.format(str(eventnumber), str(len(ind1)))) + fig.clear() + plt.close(fig) + pp.close() + +def PlotRecursiveLPResults(RoughEventLocations, inp, directory, buffer, channel='i2'): + pp = PdfPages(directory + '_' + channel + '_DetectedEventsFromLPFilter.pdf') + a=1 + for i in RoughEventLocations['RoughEventLocations']: + startp = np.uint64(i[0]-buffer*inp['samplerate']) + endp = np.uint64(i[1]+buffer*inp['samplerate']) + t = np.arange(startp, endp) + t = t / inp['samplerate'] * 1e3 + fig = PlotEvent(t, inp[channel][startp:endp], channel=channel) + pp.savefig(fig) + print('{} out of {} saved!'.format(str(a), str(len(RoughEventLocations['RoughEventLocations'])))) + a+=1 + fig.clear() + plt.close(fig) + pp.close() + +def SaveAllAxopatchEvents(AnalysisResults, directory, out, buffer, withFit = 1): + # Plot All Common Events + pp = PdfPages(directory + '_SavedEventsAxopatch.pdf') + t = np.arange(0, len(out['i1'])) + t = t / out['samplerate'] * 1e3 + + for eventnumber in range(AnalysisResults['i1']['NumberOfEvents']): + parttoplot = np.arange(AnalysisResults['i1']['StartPoints'][eventnumber] - buffer, + AnalysisResults['i1']['EndPoints'][eventnumber] + buffer, 1, dtype=np.uint64) + + fit1 = np.concatenate([np.ones(buffer) * AnalysisResults['i1']['LocalBaseline'][eventnumber], + np.ones(AnalysisResults['i1']['EndPoints'][eventnumber] - + AnalysisResults['i1']['StartPoints'][ + eventnumber]) * ( + AnalysisResults['i1']['LocalBaseline'][eventnumber] - + AnalysisResults['i1']['DeltaI'][eventnumber]), + np.ones(buffer) * AnalysisResults['i1']['LocalBaseline'][eventnumber]]) + + if withFit: + fig = PlotEvent(t[parttoplot], out['i1'][parttoplot], fit1=fit1) + else: + fig = PlotEvent(t[parttoplot], out['i1'][parttoplot]) + try: + pp.savefig(fig) + except: + print('Problem at {} !'.format(str(eventnumber))) + + print('{} out of {} saved!'.format(str(eventnumber), str(AnalysisResults['i1']['NumberOfEvents']))) + #print('Length i1: {}, Fit i1: {}'.format(len(out['i1'][parttoplot]), len(fit1))) + fig.clear() + plt.close(fig) + pp.close() + +def MakeIV(filenames, directory, conductance=10, title=''): + if len(conductance) is 1: + conductance=np.ones(len(filenames))*conductance + for idx,filename in enumerate(filenames): + print(filename) + + # Make Dir to save images + output = OpenFile(filename) + if not os.path.exists(directory): + os.makedirs(directory) + + AllData = MakeIVData(output, delay=2) + if AllData == 0: + print('!!!! No Sweep in: ' + filename) + continue + + # Plot Considered Part + # figExtracteParts = plt.figure(1) + # ax1 = figExtracteParts.add_subplot(211) + # ax2 = figExtracteParts.add_subplot(212, sharex=ax1) + # (ax1, ax2) = uf.PlotExtractedPart(output, AllData, current = 'i1', unit=1e9, axis = ax1, axis2=ax2) + # plt.show() + # figExtracteParts.savefig(directory + os.sep + 'PlotExtracted_' + str(os.path.split(filename)[1])[:-4] + '.eps') + # figExtracteParts.savefig(directory + os.sep + 'PlotExtracted_' + str(os.path.split(filename)[1])[:-4] + '.png', dpi=150) + + + # Plot IV + if output['graphene']: + figIV2 = plt.figure(3) + ax2IV = figIV2.add_subplot(111) + ax2IV = PlotIV(output, AllData, current='i2', unit=1e9, axis=ax2IV, WithFit=1, title=title[idx]) + figIV2.tight_layout() + figIV2.savefig(directory + os.sep + str(os.path.split(filename)[1]) + '_IV_i2.png', dpi=300) + figIV2.savefig(directory + os.sep + str(os.path.split(filename)[1]) + '_IV_i2.eps') + figIV2.clear() + + figIV = plt.figure(2) + ax1IV = figIV.add_subplot(111) + ax1IV = PlotIV(output, AllData, current='i1', unit=1e9, axis=ax1IV, WithFit=1, PoreSize=[conductance[idx], 20e-9], title=title[idx]) + figIV.tight_layout() + + # Save Figures + figIV.savefig(directory + os.sep + str(os.path.split(filename)[1]) + 'IV_i1.png', dpi=300) + figIV.savefig(directory + os.sep + str(os.path.split(filename)[1]) + 'IV_i1.eps') + figIV.clear() + +def TwoChannelAnalysis(filenames, outdir, UpwardsOn=0): + + for filename in filenames: + print(filename) + inp = OpenFile(filename) + file = os.sep + str(os.path.split(filename)[1][:-4]) + + if not os.path.exists(outdir): + os.makedirs(outdir) + + # Low Pass Event Detection + AnalysisResults = {} + + if inp['graphene']: + chan = ['i1', 'i2'] # For looping over the channels + else: + chan = ['i1'] + + for sig in chan: + AnalysisResults[sig] = {} + AnalysisResults[sig]['RoughEventLocations'] = RecursiveLowPassFast(inp[sig], pm.coefficients[sig], + inp['samplerate']) + if UpwardsOn: # Upwards detection can be turned on or off + AnalysisResults[sig + '_Up'] = {} + AnalysisResults[sig + '_Up']['RoughEventLocations'] = RecursiveLowPassFastUp(inp[sig], + pm.coefficients[sig], + inp['samplerate']) + + # f.PlotRecursiveLPResults(AnalysisResults['i2'], inp, directory, 1e-3, channel='i2') + # f.PlotRecursiveLPResults(AnalysisResults['i2_Up'], inp, directory+'UP_', 1e-3, channel='i2') + + # Refine the Rough Event Detection done by the LP filter and Add event infos + AnalysisResults = RefinedEventDetection(inp, AnalysisResults, signals=chan, + limit=pm.MinimalFittingLimit * inp['samplerate']) + + # Correlate the two channels + if inp['graphene']: + (CommonIndexes, OnlyIndexes) = f.CorrelateTheTwoChannels(AnalysisResults, 10e-3 * inp['samplerate']) + print('\n\nAnalysis Done...\nThere are {} common events\n{} Events on i1 only\n{} Events on i2 only'.format( + len(CommonIndexes['i1']), len(OnlyIndexes['i1']), len(OnlyIndexes['i2']))) + # Plot The Events + f.SaveAllPlots(CommonIndexes, OnlyIndexes, AnalysisResults, directory, inp, pm.PlotBuffer) + else: + # Plot The Events + f.SaveAllAxopatchEvents(AnalysisResults, directory, inp, pm.PlotBuffer) + +def SaveToHDF5(inp_file, AnalysisResults, coefficients, outdir): + file = str(os.path.split(inp_file['filename'])[1][:-4]) + f = h5py.File(outdir + file + '_OriginalDB.hdf5', "w") + general = f.create_group("General") + general.create_dataset('FileName', data=inp_file['filename']) + general.create_dataset('Samplerate', data=inp_file['samplerate']) + general.create_dataset('Machine', data=inp_file['type']) + general.create_dataset('TransverseRecorded', data=inp_file['graphene']) + general.create_dataset('TimeFileWritten', data=inp_file['TimeFileWritten']) + general.create_dataset('TimeFileLastModified', data=inp_file['TimeFileLastModified']) + general.create_dataset('ExperimentDuration', data=inp_file['ExperimentDuration']) + + segmentation_LP = f.create_group("LowPassSegmentation") + for k,l in AnalysisResults.items(): + set1 = segmentation_LP.create_group(k) + lpset1 = set1.create_group('LowPassSettings') + for o, p in coefficients[k].items(): + lpset1.create_dataset(o, data=p) + for m, l in AnalysisResults[k].items(): + if m is 'AllEvents': + eventgroup = set1.create_group(m) + for i, val in enumerate(l): + eventgroup.create_dataset('{:09d}'.format(i), data=val) + elif m is 'Cusum': + eventgroup = set1.create_group(m) + for i1, val1 in enumerate(AnalysisResults[k]['Cusum']): + cusevent = eventgroup.create_group('{:09d}'.format(i1)) + cusevent.create_dataset('NumberLevels', data=np.uint64(len(AnalysisResults[k]['Cusum'][i1]['levels']))) + if len(AnalysisResults[k]['Cusum'][i1]['levels']): + cusevent.create_dataset('up', data=AnalysisResults[k]['Cusum'][i1]['up']) + cusevent.create_dataset('down', data=AnalysisResults[k]['Cusum'][i1]['down']) + cusevent.create_dataset('both', data=AnalysisResults[k]['Cusum'][i1]['both']) + cusevent.create_dataset('fit', data=AnalysisResults[k]['Cusum'][i1]['fit']) + # 0: level number, 1: current, 2: length, 3: std + cusevent.create_dataset('levels_current', data=AnalysisResults[k]['Cusum'][i1]['levels'][1]) + cusevent.create_dataset('levels_length', data=AnalysisResults[k]['Cusum'][i1]['levels'][2]) + cusevent.create_dataset('levels_std', data=AnalysisResults[k]['Cusum'][i1]['levels'][3]) + else: + set1.create_dataset(m, data=l) + +def PlotExtractedPart(output, AllData, current = 'i1', unit=1e9, axis = '', axis2 = ''): + time = np.arange(0, len(output[current])) / output['samplerate'] + axis.plot(time, output[current] * unit, 'b', label=str(os.path.split(output['filename'])[1])[:-4]) + axis.set_ylabel('Current ' + current + ' [nA]') + axis.set_title('Time Trace') + for i in range(0, len(AllData[current]['StartPoint'])): + axis.plot(time[AllData[current]['StartPoint'][i]:AllData[current]['EndPoint'][i]], + output[current][AllData[current]['StartPoint'][i]:AllData[current]['EndPoint'][i]] * unit, 'r') + axis2.plot(time, output[AllData[current]['SweepedChannel']], 'b', label=str(os.path.split(output['filename'])[1])[:-4]) + axis2.set_ylabel('Voltage ' + AllData[current]['SweepedChannel'] + ' [V]') + axis2.set_xlabel('Time') + return (axis, axis2) + +def xcorr(x, y, k, normalize=True): + n = x.shape[0] + + # initialize the output array + out = np.empty((2 * k) + 1, dtype=np.double) + lags = np.arange(-k, k + 1) + + # pre-compute E(x), E(y) + mu_x = x.mean() + mu_y = y.mean() + + # loop over lags + for ii, lag in enumerate(lags): + + # use slice indexing to get 'shifted' views of the two input signals + if lag < 0: + xi = x[:lag] + yi = y[-lag:] + elif lag > 0: + xi = x[:-lag] + yi = y[lag:] + else: + xi = x + yi = y + + # x - mu_x; y - mu_y + xdiff = xi - mu_x + ydiff = yi - mu_y + + # E[(x - mu_x) * (y - mu_y)] + out[ii] = xdiff.dot(ydiff) / n + + # NB: xdiff.dot(ydiff) == (xdiff * ydiff).sum() + + if normalize: + # E[(x - mu_x) * (y - mu_y)] / (sigma_x * sigma_y) + out /= np.std(x) * np.std(y) + + return lags, out + +def cusum_detection(data, threshhold, minlength, maxstates): + print(threshhold) + s = timer() + logp = 0 # instantaneous log-likelihood for positive jumps + logn = 0 # instantaneous log-likelihood for negative jumps + cpos = np.zeros(len(data), dtype='float64') # cumulative log-likelihood function for positive jumps + cneg = np.zeros(len(data), dtype='float64') # cumulative log-likelihood function for negative jumps + gpos = np.zeros(2, dtype='float64') # decision function for positive jumps + gneg = np.zeros(2, dtype='float64') # decision function for negative jumps + edges = np.array([0], + dtype='int64') # initialize an array with the position of the first subevent - the start of the event + real_start = np.array([], + dtype='int64') # initialize an array with the position of the first subevent - the start of the event + real_end = np.array([], + dtype='int64') # initialize an array with the position of the first subevent - the start of the event + real_Depth = np.array([], + dtype='int64') # initialize an array with the position of the first subevent - the start of the event + + anchor = 0 # the last detected change + length = len(data) + data_std = np.std(data) + h = threshhold / data_std + k = 0 + nStates = np.uint64(0) + varM = data[0] + varS = 0 + mean = data[0] + print('length data =' + str(length)) + v = np.zeros(length, dtype='float64') + while k < length - 100: + k += 1 + if nStates == 0: + variance = np.var(data[anchor:k]) # initial params for pattern region + mean = np.mean(data[anchor:k]) + if variance == 0: break + logp = threshhold / variance * (data[ + k] - mean - threshhold / 2.) # instantaneous log-likelihood for current sample assuming local baseline has jumped in the positive direction + logn = -threshhold / variance * (data[ + k] - mean + threshhold / 2.) # instantaneous log-likelihood for current sample assuming local baseline has jumped in the negative direction + cpos[k] = cpos[k - 1] + logp # accumulate positive log-likelihoods + cneg[k] = cneg[k - 1] + logn # accumulate negative log-likelihoods + gpos[1] = max(gpos[0] + logp, 0) # accumulate or reset positive decision function + gneg[1] = max(gneg[0] + logn, 0) # accumulate or reset negative decision function + if (gpos[1] > h or gneg[1] > h): + + if (gpos[1] > h): # significant positive jump detected + jump = np.uint64(anchor + np.argmin(cpos[anchor:k + 1])) # find the location of the start of the jump + if jump - edges[np.uint64(nStates)] > minlength and np.abs(data[np.uint64(jump + minlength)] - data[jump]) > threshhold / 4: + edges = np.append(edges, jump) + nStates += 1 + # print('EVENT!!!!! at ='+str(self.t[jump])) + anchor = k # no data meaning at bad points! + # away from bad point more! + cpos[0:len(cpos)] = 0 # reset all decision arrays + cneg[0:len(cneg)] = 0 + gpos[0:len(gpos)] = 0 + gneg[0:len(gneg)] = 0 + if (gneg[1] > h): # significant negative jump detected + jump = anchor + np.argmin(cneg[anchor:k + 1]) + if jump - edges[np.uint64(nStates)] > minlength and np.abs(data[np.uint64(jump + minlength)] - data[jump]) > threshhold / 4: + edges = np.append(edges, jump) + nStates += 1 + # print('EVENT!!!!! at ='+str(self.t[jump] )) + anchor = k # no data meaning at bad points! + # away from bad point more! + cpos[0:len(cpos)] = 0 # reset all decision arrays + cneg[0:len(cneg)] = 0 + gpos[0:len(gpos)] = 0 + gneg[0:len(gneg)] = 0 + + gpos[0] = gpos[1] + gneg[0] = gneg[1] + if maxstates > 0: + if nStates > maxstates: + print('too sensitive') + nStates = 0 + k = 0 + threshhold = threshhold * 1.1 + h = h * 1.1 + logp = 0 # instantaneous log-likelihood for positive jumps + logn = 0 # instantaneous log-likelihood for negative jumps + cpos = np.zeros(len(data), dtype='float64') # cumulative log-likelihood function for positive jumps + cneg = np.zeros(len(data), dtype='float64') # cumulative log-likelihood function for negative jumps + gpos = np.zeros(2, dtype='float64') # decision function for positive jumps + gneg = np.zeros(2, dtype='float64') # decision function for negative jumps + edges = np.array([0], + dtype='int64') # initialize an array with the position of the first subevent - the start of the event + anchor = 0 # the last detected change + length = len(data) + mean = data[0] + nStates = 0 + mean = data[0] + edges = np.append(edges, len(data) - 1) # mark the end of the event as an edge + nStates += 1 + + cusum = dict() + print('Events = ' + str([edges])) + for i in range(len(edges) - 1): + real_start = np.append(real_start, edges[i]) + real_end = np.append(real_end, edges[i + 1]) + real_Depth = np.append(real_Depth, np.mean(data[np.uint64(edges[i]):np.uint64(edges[i + 1])])) + + cusum['Real_Start'] = real_start + cusum['Real_End'] = real_end + cusum['Real_Depth'] = real_Depth + print('Real Start =' + str(cusum['Real_Start'])) + print('Real End =' + str(cusum['Real_End'])) + cusum['CurrentLevels'] = [np.average(data[np.uint64(edges[i]) + minlength:np.uint64(edges[i + 1])]) for i in + range(np.uint64(nStates))] # detect current levels during detected sub-event + print('Edges[-1] = ' + str(edges[-1])) + cusum['EventDelay'] = edges # locations of sub-events in the data + cusum['Threshold'] = threshhold # record the threshold used + print('Event = ' + str(cusum['EventDelay'])) + cusum['jumps'] = np.diff(cusum['CurrentLevels']) + + e = timer() + print('cusum took = ' + str(e - s) + 's') + return cusum + +def detect_cusum(data, var, threshhold, minlength, maxstates): + logp = 0 # instantaneous log-likelihood for positive jumps + logn = 0 # instantaneous log-likelihood for negative jumps + cpos = np.zeros(len(data), dtype='float64') # cumulative log-likelihood function for positive jumps + cneg = np.zeros(len(data), dtype='float64') # cumulative log-likelihood function for negative jumps + gpos = np.zeros(len(data), dtype='float64') # decision function for positive jumps + gneg = np.zeros(len(data), dtype='float64') # decision function for negative jumps + edges = np.array([0], + dtype='int64') # initialize an array with the position of the first subevent - the start of the event + anchor = 0 # the last detected change + length = len(data) + mean = data[0] + h = threshhold / var + k = self.safety_reg - self.control1 + nStates = 0 + varM = data[0] + varS = 0 + mean = data[0] + print('length data =' + str(length)) + v = np.zeros(length, dtype='float64') + while k < length - 100: + k += 1 + if nStates == 0: variance = np.var(data[anchor:k]) + mean = np.mean(data[anchor:k]) + logp = threshhold / variance * (data[ + k] - mean - threshhold / 2.) # instantaneous log-likelihood for current sample assuming local baseline has jumped in the positive direction + logn = -threshhold / variance * (data[ + k] - mean + threshhold / 2.) # instantaneous log-likelihood for current sample assuming local baseline has jumped in the negative direction + cpos[k] = cpos[k - 1] + logp # accumulate positive log-likelihoods + cneg[k] = cneg[k - 1] + logn # accumulate negative log-likelihoods + gpos[k] = max(gpos[k - 1] + logp, 0) # accumulate or reset positive decision function + gneg[k] = max(gneg[k - 1] + logn, 0) # accumulate or reset negative decision function + if (gpos[k] > h or gneg[k] > h): + + if (gpos[k] > h): # significant positive jump detected + jump = anchor + np.argmin(cpos[anchor:k + 1]) # find the location of the start of the jump + if jump - edges[nStates] > minlength and np.abs(data[jump + minlength] - data[jump]) > threshhold / 4: + edges = np.append(edges, jump) + nStates += 1 + print('EVENT!!!!! at =' + str((self.control1 + jump) / self.out['samplerate'])) + anchor = k # no data meaning at bad points! + # away from bad point more! + cpos[0:len(cpos)] = 0 # reset all decision arrays + cneg[0:len(cneg)] = 0 + gpos[0:len(gpos)] = 0 + gneg[0:len(gneg)] = 0 + if (gneg[k] > h): # significant negative jump detected + jump = anchor + np.argmin(cneg[anchor:k + 1]) + if jump - edges[nStates] > minlength and np.abs(data[jump + minlength] - data[jump]) > threshhold / 4: + edges = np.append(edges, jump) + nStates += 1 + print('EVENT!!!!! at =' + str((self.control1 + jump) / self.out['samplerate'])) + anchor = k # no data meaning at bad points! + # away from bad point more! + cpos[0:len(cpos)] = 0 # reset all decision arrays + cneg[0:len(cneg)] = 0 + gpos[0:len(gpos)] = 0 + gneg[0:len(gneg)] = 0 + + if maxstates > 0: + if nStates > 10: + print('too sensitive') + nStates = 0 + k = 0 + threshhold = threshhold * 1.1 + h = h * 1.1 + logp = 0 # instantaneous log-likelihood for positive jumps + logn = 0 # instantaneous log-likelihood for negative jumps + cpos = np.zeros(len(data), dtype='float64') # cumulative log-likelihood function for positive jumps + cneg = np.zeros(len(data), dtype='float64') # cumulative log-likelihood function for negative jumps + gpos = np.zeros(len(data), dtype='float64') # decision function for positive jumps + gneg = np.zeros(len(data), dtype='float64') # decision function for negative jumps + edges = np.array([0], + dtype='int64') # initialize an array with the position of the first subevent - the start of the event + anchor = 0 # the last detected change + length = len(data) + mean = data[0] + nStates = 0 + mean = data[0] + edges = np.append(edges, len(data)) # mark the end of the event as an edge + nStates += 1 + + cusum = dict() + cusum['CurrentLevels'] = [np.average(data[edges[i] + minlength:edges[i + 1]]) for i in + range(nStates)] # detect current levels during detected sub-events + cusum['EventDelay'] = edges # locations of sub-events in the data + cusum['Threshold'] = threshhold # record the threshold used + cusum['jumps'] = np.diff(cusum['CurrentLevels']) + # self.__recordevent(cusum) + + return cusum + +def CUSUM(input, delta, h, startp): + Nd = 0 + kd = [] + krmv = [] + krmvdown = [] + both = [] + k0 = 0 + k = 0 + l = len(input) + m = np.zeros(l) + m[k0] = input[k0] + v = np.zeros(l) + sp = np.zeros(l) + Sp = np.zeros(l) + gp = np.zeros(l) + sn = np.zeros(l) + Sn = np.zeros(l) + gn = np.zeros(l) + mc = [] + while k < l-1: + k += 1 + m[k] = np.mean(input[k0:k+1]) + v[k] = np.var(input[k0:k+1]) + + sp[k] = delta / v[k] * (input[k] - m[k] - delta / 2) + sn[k] = -delta / v[k] * (input[k] - m[k] + delta / 2) + + Sp[k] = Sp[k - 1] + sp[k] + Sn[k] = Sn[k - 1] + sn[k] + + gp[k] = np.max([gp[k - 1] + sp[k], 0]) + gn[k] = np.max([gn[k - 1] + sn[k], 0]) + + if gp[k] > h or gn[k] > h: + if gp[k] > h: + kmin = np.argmin(Sp[k0:k]) + krmv.append(kmin + k0 - 1) + both.append(kmin + k0 - 1) + else: + kd.append(k) + kmin = np.argmin(Sn[k0:k]) + krmvdown.append(kmin + k0 - 1) + both.append(kmin + k0 - 1) + k0 = k + m[k0] = input[k0] + v[k0] = 0 + sp[k0] = 0 + Sp[k0] = 0 + gp[k0] = 0 + sn[k0] = 0 + Sn[k0] = 0 + gn[k0] = 0 + Nd = Nd + 1 + ''' + if Nd == 0: + mc = np.mean(input) * np.ones(k) + elif Nd == 1: + mc = [m[krmv[0]] * np.ones(krmv[0]), m[k] * np.ones(k - krmv[0])] + else: + mc.append(m[krmv[0]] * np.ones(krmv[0])) + for ii in range(1, Nd-1): + mc.append(m[krmv[ii]] * np.ones(krmv[ii] - krmv[ii - 1])) + mc.append(m[k] * np.ones(k - krmv[Nd-1])) + ''' + ## Calculate Inner Levels + if len(both) >= 2: + levels = [np.zeros(len(both)-1), np.zeros(len(both)-1), + np.zeros(len(both)-1), np.zeros(len(both)-1)] + # 0: level number, 1: current, 2: length, 3: std + fit = np.array([]) + for ind, g in enumerate(both[:-1]): + levels[0][ind] = ind + levels[1][ind] = np.mean(input[g:both[ind + 1]]) + levels[2][ind] = both[ind + 1] - g + levels[3][ind] = np.std(input[g:both[ind + 1]]) + np.append(fit, levels[1][ind] * np.ones(np.uint64(levels[2][ind]))) + else: + levels = [] + fit = [] + out = {'up': krmv+startp, 'down': krmvdown+startp, 'both': both+startp, 'levels':levels, 'fit': fit} + return out + +def CondPoreSizeTick(x, pos): + formCond = EngFormatter(unit='S') + formPoreSize = EngFormatter(unit='m') + outstring = '{}, {}'.format(formCond.format_eng(x), formPoreSize.format_eng(CalculatePoreSize(x, 1e-9, 10))) + return outstring + +def EredoxBefore14062018(): + Eredox = np.array([31.7, 82.9, 135, 185], dtype=float) + Eredox = Eredox * 1e-3 + return (Eredox-Eredox[0]) + +def DebyeHuckelParam(c, T = 20): + ## Valid Only for same valency and c1 = c2 + epsilon_r = 87.740-0.40008*T+9.398e-4*T**2-1.410e-6*T**3 + print('Dielectric of Water is {}'.format(epsilon_r)) + n = c*1e3*cst.Avogadro + formCond = EngFormatter(unit='m') + k = np.sqrt((cst.e**2*2*n)/(cst.k*(T+273.15)*cst.epsilon_0*epsilon_r)) + return [k, 1/k]#, '{}'.format(formCond.format_eng(1/k))] + +def ElectrostaticPotential(T, sigma, Tau=5e18, pK=7.9, pH=11): + return cst.k * T / cst.e * (np.log(-sigma / (cst.e * Tau + sigma)) + (pK - pH) * np.log(10)) + +def GetTempRedoxPotential(Cdiff=1e-3/1e-1, T=293.15): + return cst.R * T / cst.physical_constants['Faraday constant'][0] * np.log(Cdiff) + +##Result: Negative ion divided by positive ion +#Goldman–Hodgkin–Katz voltage equation +def GetIonSelectivityWithoutPotential(c_trans, c_cis, Erev, T): + n_trans = c_trans#*1e3#*cst.Avogadro + n_cis = c_cis#*1e3#*cst.Avogadro + A = Erev * cst.physical_constants['Faraday constant'][0] / (cst.R*T) + return -(n_trans-n_cis*np.exp(-A))*(1-np.exp(A))/((n_trans-n_cis*np.exp(A))*(1-np.exp(-A))) + +def GetIonSelectivityWithPotential(c_trans, c_cis, Erev, T, phi): + sel1 = GetIonSelectivityWithoutPotential(c_trans, c_cis, Erev, T) + return sel1*np.exp((-2*phi*cst.physical_constants['Faraday constant'][0])/(cst.R * T)) + +def Selectivity(ConcGradient, V): + return V * cst.physical_constants['Faraday constant'][0]/((cst.R*293.15) * np.log(ConcGradient)) \ No newline at end of file diff --git a/KCLConductivities.xlsx b/KCLConductivities.xlsx new file mode 100644 index 0000000..261960f Binary files /dev/null and b/KCLConductivities.xlsx differ diff --git a/KCl_ConductivityValues.p b/KCl_ConductivityValues.p new file mode 100644 index 0000000..e4e1212 Binary files /dev/null and b/KCl_ConductivityValues.p differ diff --git a/LockInPlotting.py b/LockInPlotting.py new file mode 100644 index 0000000..fee733a --- /dev/null +++ b/LockInPlotting.py @@ -0,0 +1,107 @@ +import UsefulFunctions as uf +import pyqtgraph as pg +import matplotlib.pyplot as plt +from timeit import default_timer as timer +import scipy +import numpy as np +import os +import matplotlib +from matplotlib.backends.backend_pdf import PdfPages +from matplotlib.font_manager import FontProperties +from tkinter import Tk +from matplotlib.ticker import EngFormatter +from tkinter.filedialog import askopenfilenames +matplotlib.rcParams['agg.path.chunksize'] = 100000 + +fontP = FontProperties() +fontP.set_size('small') +props = dict(boxstyle='round', facecolor='wheat', alpha=0.5) +Tk().withdraw() +os.system('''/usr/bin/osascript -e 'tell app "Finder" to set frontmost of process "python" to true' ''') + +filebyfile = 1 +expname = 'All' +#filenames = askopenfilenames() # show an "Open" dialog box and return the path to the selected file +filenames={'/Volumes/lben/lben-commun/2018 User Data/Michael/LockIn/modelcell_05-02-2018_14_40.txt', '/Volumes/lben/lben-commun/2018 User Data/Michael/LockIn/2modelcells, resistor_05-02-2018_14_55.txt'} +range = [10, 10e3] +voltage = 100e-3 + +for file in filenames: + filehandle = open(file, 'r') + (f, V, theta, I, R, C) = np.loadtxt(filehandle, unpack=True) + V = V*1e-3 + I = I*1e-9 + C = C*1e-9 + theta = -theta + print(I) + if range[1]: + ind = np.argwhere((f >= range[0]) & (f <= range[1])).flatten() + print(ind) + f = f[ind] + V = V[ind] + theta = theta[ind] + I = I[ind] + R = R[ind] + C = C[ind] + print(I) + + filehandle.close() + figIV = plt.figure(1) + ImpR = voltage/I*np.cos(np.deg2rad(theta)) + ImpIm = voltage/I*np.sin(np.deg2rad(theta)) + + axR = figIV.add_subplot(222) + axR.set_title('Resistance') + axR.set_xlabel('Frequency') + axR.set_ylabel('Resistance') + axR.set_xscale('log') + axR.set_yscale('log') + axR.yaxis.set_major_formatter(EngFormatter(unit='$\Omega$', places=0)) + axR.xaxis.set_major_formatter(EngFormatter(unit='Hz', places=0)) + axR.plot(f, R) + + axC = figIV.add_subplot(224) + axC.set_title('Capacitance') + axC.set_xlabel('Frequency') + axC.set_ylabel('Capacitance') + axC.set_xscale('log') + axC.set_yscale('linear') + axC.yaxis.set_major_formatter(EngFormatter(unit='C', places=0)) + axC.xaxis.set_major_formatter(EngFormatter(unit='Hz', places=0)) + axC.plot(f, C) + + axI = figIV.add_subplot(221) + axI.set_title('Current') + axI.set_xlabel('Frequency') + axI.set_ylabel('Current') + axI.set_xscale('log') + axI.set_yscale('log') + axI.yaxis.set_major_formatter(EngFormatter(unit='A', places=0)) + axI.xaxis.set_major_formatter(EngFormatter(unit='Hz', places=0)) + axI.plot(f, I) + + axT = figIV.add_subplot(223) + axT.set_title('Phase') + axT.set_xlabel('Frequency') + axT.set_ylabel('Phase') + axT.set_xscale('log') + axT.set_yscale('linear') + axT.yaxis.set_major_formatter(EngFormatter(unit='°', places=0)) + axT.xaxis.set_major_formatter(EngFormatter(unit='Hz', places=0)) + axT.plot(f, theta) + + figImp = plt.figure(2) + axNyq = figImp.add_subplot(111) + axNyq.set_title('Nyquist Plot') + axNyq.set_xlabel('Real Part [Re(Z)]') + axNyq.set_ylabel('Imaginary Part [Im(Z)]') + axNyq.plot(ImpR, ImpIm, ls = 'None', marker = 'o') + + figIV.savefig(file[:-4] + '10kHz.png', dpi=300) + figImp.savefig(file[:-4] + '_Nyquist10kHz.png', dpi=300) + # plt.show() + figIV.clear() + figImp.clear() + + ## Save Img, Real and Freq to txt file + np.savetxt(file[:-4] + '_Data10kHz.txt', np.transpose((ImpR, ImpIm, f)), fmt='%.14E', delimiter=' ', newline='\n', header=str(len(ImpIm)), comments='') diff --git a/MainPlotGui.py b/MainPlotGui.py new file mode 100644 index 0000000..1aa038a --- /dev/null +++ b/MainPlotGui.py @@ -0,0 +1,85 @@ +from PyQt5.QtWidgets import QMainWindow, QApplication +import pyqtgraph.exporters +from PyQt5 import QtGui +from plotgui import Ui_Plotting +import pyqtgraph as pg +import Functions as f +import sys +import numpy as np +import os +import glob + + +# Draw UI +app = QApplication(sys.argv) +window = QMainWindow() +ui = Ui_Plotting() +ui.setupUi(window) + +folder = '' +currentFile = '' + +def ChangedFolder(): + global folder + folder = QtGui.QFileDialog.getExistingDirectory(window, 'Select in which Folder to save your data', os.getcwd()) + file_list = glob.glob(folder + os.sep + '*.dat') + file_list.sort(key=os.path.getmtime) + for i in file_list: + ui.listWidget.addItem(os.path.split(i)[1]) + +def ItemChanged(it): + print(it.text()) + global currentFile + currentFile = it.text() + # Update Plots + UpdatePlots(currentFile) + ui.label.setText(currentFile) + +def UpdatePlots(currentFile): + inp = f.OpenFile(folder + os.sep + currentFile) + ui.graphicsView.plotItem.clear() + ui.graphicsView_2.plotItem.clear() + ui.graphicsView_3.plotItem.clear() + ui.graphicsView.plot(y=inp['i1'], x=np.arange(0, len(inp['i1']))/ inp['samplerate'], pen = 'k') + ui.graphicsView_3.plot(y=inp['i2'], x=np.arange(0, len(inp['i2']))/inp['samplerate'], pen = 'r') + ui.graphicsView_2.plot(y=inp['v2'], x=np.arange(0, len(inp['i2']))/inp['samplerate'], pen = 'r') + ui.graphicsView_2.plot(y=inp['v1'], x=np.arange(0, len(inp['i1']))/inp['samplerate'], pen = 'k') + +def SaveWindow(): + win = pg.GraphicsWindow() + #win.addItem(ui.graphicsView.plotItem) + win.show() + exporter = pg.exporters.ImageExporter(ui.graphicsView.plotItem) + exporter.parameters()['width'] = 10000 # (note this also affects height parameter) + exporter.export('fileName.png') + exporter2 = pg.exporters.ImageExporter(ui.graphicsView_3.plotItem) + exporter2.parameters()['width'] = 10000 # (note this also affects height parameter) + exporter2.export('fileName_1.png') + + +ui.listWidget.itemDoubleClicked.connect(ItemChanged) +ui.pushButton.pressed.connect(ChangedFolder) +ui.pushSave.pressed.connect(SaveWindow) + +## Plotting Settings +ui.graphicsView.plotItem.setDownsampling(ds=10, auto=True, mode='subsample') +ui.graphicsView.plotItem.setClipToView(True) +pg.setConfigOptions(antialias=True) +pg.setConfigOptions(background=None) +ui.graphicsView.plotItem.setLabel('left', "Current", units='A') +ui.graphicsView.plotItem.setLabel('bottom', "Time", units='s') + +ui.graphicsView_2.plotItem.setDownsampling(ds=10, auto=True, mode='subsample') +ui.graphicsView_2.plotItem.setClipToView(True) +ui.graphicsView_2.plotItem.setXLink(ui.graphicsView.plotItem) +ui.graphicsView_2.plotItem.setLabel('left', "Voltage", units='V') +ui.graphicsView_2.plotItem.setLabel('bottom', "Time", units='s') + +ui.graphicsView_3.plotItem.setDownsampling(ds=100, auto=True, mode='subsample') +ui.graphicsView_3.plotItem.setClipToView(True) +ui.graphicsView_3.plotItem.setXLink(ui.graphicsView.plotItem) +ui.graphicsView_3.plotItem.setLabel('left', "Current", units='A') +ui.graphicsView_3.plotItem.setLabel('bottom', "Time", units='s') + +window.show() +sys.exit(app.exec_()) \ No newline at end of file diff --git a/MakeIndividualIVs.py b/MakeIndividualIVs.py new file mode 100644 index 0000000..827a082 --- /dev/null +++ b/MakeIndividualIVs.py @@ -0,0 +1,82 @@ +import numpy as np +import scipy +import scipy.signal as sig +import Functions as uf +import pyqtgraph as pg +import os +import matplotlib.pyplot as plt +from matplotlib.backends.backend_pdf import PdfPages +from tkinter import Tk +from tkinter.filedialog import askopenfilenames +from matplotlib.font_manager import FontProperties +fontP = FontProperties() +fontP.set_size('small') +props = dict(boxstyle='round', facecolor='wheat', alpha=0.5) + +Tk().withdraw() +os.system('''/usr/bin/osascript -e 'tell app "Finder" to set frontmost of process "python" to true' ''') + +#filenames = ['/Volumes/backup/2017/Michael/Axopatch/20170512/10mMKClInFlowCellORingPore1mm.dat'] +#filenames = ['/Volumes/Michael/Axopatch/20180430/R17_ph74_KCl_1M_1M_0mW_473nm_IV_3.dat'] +expname = 'All' +reversePolarity = 0 + +filenames = askopenfilenames() # show an "Open" dialog box and return the path to the selected file + +for filename in filenames: + print(filename) + + #Make Dir to save images + output = uf.OpenFile(filename) + if reversePolarity: + print('Polarity Reversed!!!!') + output['i1']= -output['i1'] + output['v1']= -output['v1'] + + directory = (str(os.path.split(filename)[0]) + os.sep + expname + '_SavedImages') + if not os.path.exists(directory): + os.makedirs(directory) + + AllData = uf.MakeIVData(output, delay = 9)#, UseVoltageRange = [-0.4, 0.4]) + if AllData == 0: + print('!!!! No Sweep in: ' + filename) + continue + + #Plot Considered Part + #figExtracteParts = plt.figure(1) + #ax1 = figExtracteParts.add_subplot(211) + #ax2 = figExtracteParts.add_subplot(212, sharex=ax1) + #(ax1, ax2) = uf.PlotExtractedPart(output, AllData, current = 'i1', unit=1e9, axis = ax1, axis2=ax2) + #plt.show() + #figExtracteParts.savefig(directory + os.sep + 'PlotExtracted_' + str(os.path.split(filename)[1])[:-4] + '.eps') + #figExtracteParts.savefig(directory + os.sep + 'PlotExtracted_' + str(os.path.split(filename)[1])[:-4] + '.png', dpi=150) + + + # Plot IV + if output['graphene']: + figIV2 = plt.figure(3) + figIV2.clear() + ax2IV = figIV2.add_subplot(111) + ax2IV = uf.PlotIV(output, AllData, current='i2', unit=1e9, axis=ax2IV, WithFit=1) + figIV2.tight_layout() + figIV2.savefig(directory + os.sep + str(os.path.split(filename)[1]) + '_IV_i2.png', dpi=300) + figIV2.savefig(directory + os.sep + str(os.path.split(filename)[1]) + '_IV_i2.eps') + + figIV = plt.figure(2) + ax1IV = figIV.add_subplot(111) + #ax1IV = uf.PlotIV(output, AllData, current='i1', unit=1e9, axis = ax1IV, WithFit = 1, PoreSize=[10, 0.7e-9], useEXP = 0, color ='b', + # labeltxt='Exponential Fit', Polynomial=0) + ax1IV = uf.PlotIV(output, AllData, current='i1', unit=1e9, axis = ax1IV, WithFit = 1, useEXP = 0, color ='y', + labeltxt='MeanFit', PoreSize=[10, 1e-9], title=str(os.path.split(filename)[1])) + + ax1IV.legend(loc='upper center', bbox_to_anchor=(0.8, 0.2), + ncol=1, fancybox=True, shadow=True, prop=fontP) + + figIV.tight_layout() + #Save Figures + #plt.show() + figIV.savefig(directory + os.sep + str(os.path.split(filename)[1]) + 'IV_i1.png', dpi=300) + figIV.savefig(directory + os.sep + str(os.path.split(filename)[1]) + 'IV_i1.eps') + + #plt.show() + figIV.clear() \ No newline at end of file diff --git a/MakeListOfFilesToCombine.py b/MakeListOfFilesToCombine.py new file mode 100644 index 0000000..0bf0050 --- /dev/null +++ b/MakeListOfFilesToCombine.py @@ -0,0 +1,20 @@ +import AnalysisParameters as pm +import time +import MiscParameters as pm +import h5py +from openpyxl import Workbook +from tkinter.filedialog import askopenfilenames +pm.init(LoadFiles = 0) + +filenames = askopenfilenames(filetypes=[('HDF5 files', '*.hdf5')]) +ExperimentName = pm.OutputFolder + '02B_10mM_1M_1kb_pH10.xlsx' +wb = Workbook() +ws1 = wb.active + +for (i,k) in enumerate(filenames): + ws1['A{}'.format(i+1)] = k + f = h5py.File(k, 'r') + ws1['B{}'.format(i + 1)] = f['General/TransverseRecorded'].value + ws1['C{}'.format(i + 1)] = time.ctime(f['General/TimeFileWritten'].value) + +wb.save(filename = ExperimentName) \ No newline at end of file diff --git a/MakePSDs.py b/MakePSDs.py new file mode 100644 index 0000000..4224208 --- /dev/null +++ b/MakePSDs.py @@ -0,0 +1,97 @@ +#Input Stuff +import AnalysisParameters as pm +import numpy as np +import scipy +import scipy.signal as sig +import Functions as f +import os +import matplotlib.pyplot as plt +from matplotlib.backends.backend_pdf import PdfPages +from tkinter import Tk +from tkinter.filedialog import askopenfilenames +from matplotlib.ticker import EngFormatter +from matplotlib.font_manager import FontProperties +fontP = FontProperties() +props = dict(boxstyle='round', facecolor='wheat', alpha=0.5) +import pyqtgraph as pg + +fontP.set_size('small') +pm.init() +root = Tk() +root.withdraw() +os.system('''/usr/bin/osascript -e 'tell app "Finder" to set frontmost of process "python" to true' ''') +SaveAsPDF=0 +foldername='PSD' + +# Load File, empty string prompts a pop.up window for file selection. Else a file-path can be given +root.update() +filenames = ['/Users/migraf/Desktop/Temp/BigBerta_Noise_2.dat'] +#filenames = askopenfilenames() +root.destroy() + +# Make Dir to save images +directory = (str(os.path.split(filenames[0])[0]) + os.sep + foldername) +if not os.path.exists(directory): + os.makedirs(directory) +if SaveAsPDF: + pp = PdfPages(directory + os.sep + '_ALLPSD.pdf') + +for filename in filenames: + print(filename) + #filename = '/Users/migraf/Desktop/TestTrace/07B_10mMKClBoth_1kBDN_BothChannels12.dat' + inp = f.OpenFile(filename) + folder = str(os.path.split(filename)[0]) + os.sep +'PSD' + file = os.sep + str(os.path.split(filename)[1][:-4]) + if not os.path.exists(folder): + os.makedirs(folder) + + fig1 = plt.figure() + ax = fig1.add_subplot(211) + ax2 = fig1.add_subplot(212, sharex = ax) + fr, Pxx_den = scipy.signal.periodogram(inp['i1'], inp['samplerate']) + #f, Pxx_den = scipy.signal.welch(input, samplerate, nperseg=10*256, scaling='spectrum') + ax.set_ylabel(r'PSD of Ionic [$\frac{pA^2}{Hz}$]') + ax.set_xlabel('Frequency') + ax.set_yscale('log') + ax.set_xscale('log') + #ax.set_ylim(1e4, 1e17) + #ax.set_xlim(100e-3, 100e3) + + ax.xaxis.set_major_formatter(EngFormatter(unit='Hz')) + ax.plot(fr, Pxx_den*1e24, 'b') + ax.grid(1) + ax.autoscale() + + if inp['graphene']: + print(np.mean(inp['v2'])) + fr, Pxx_den = scipy.signal.periodogram(inp['i2'], inp['samplerate']) + ax2.plot(fr, Pxx_den * 1e24, 'r') + ax2.set_ylabel(r'PSD of Transverse [$\frac{pA^2}{Hz}$]') + ax2.set_xlabel('Frequency') + ax2.set_yscale('log') + ax2.set_xscale('log') + ax2.xaxis.set_major_formatter(EngFormatter(unit='Hz')) + ax2.plot(fr, Pxx_den * 1e24, 'r') + ax2.grid(1) + ax2.autoscale() + #ax.legend(['Ion', 'Transverse']) + textstr = 'STD ionic: {}\nSTD trans: {}'.format(pg.siFormat(np.std(inp['i1'])), pg.siFormat(np.std(inp['i2']))) + ax2.set_title('STD trans: {}'.format(pg.siFormat(np.std(inp['i2'])))) + ax.set_title('STD ionic: {}'.format(pg.siFormat(np.std(inp['i1'])))) + else: + textstr = 'STD ionic: {}'.format(pg.siFormat(np.std(inp['i1']))) + ax.set_title(textstr) + #ax2.text(0.75, 0.1, textstr, transform=ax.transAxes, fontsize=8, + # verticalalignment='bottom', bbox=props) + fig1.tight_layout() + if SaveAsPDF: + pp.savefig(fig1) + else: + fig1.savefig(directory + os.sep + str(os.path.split(filename)[1][:-4]) + '_PSD.png', dpi=300) + + fig1.clear() + ax.clear() + plt.close(fig1) + +if SaveAsPDF: + pp.close() diff --git a/MergedIVs.py b/MergedIVs.py new file mode 100644 index 0000000..e102b32 --- /dev/null +++ b/MergedIVs.py @@ -0,0 +1,87 @@ +import numpy as np +import scipy +import scipy.signal as sig +import UsefulFunctions as uf +import pyqtgraph as pg +import os +import matplotlib.pyplot as plt +from matplotlib.backends.backend_pdf import PdfPages +from tkinter import Tk +from tkinter.filedialog import askopenfilenames +from matplotlib.font_manager import FontProperties +fontP = FontProperties() +fontP.set_size('small') +props = dict(boxstyle='round', facecolor='wheat', alpha=0.5) + +Tk().withdraw() +os.system('''/usr/bin/osascript -e 'tell app "Finder" to set frontmost of process "python" to true' ''') + +#filenames = ['/Volumes/backup/2017/Michael/Axopatch/20170512/10mMKClInFlowCellORingPore1mm.dat'] +filenames = ['/Volumes/Michael/Axopatch_DR/20171025/03A_GroundOnTrans_OvernightAfterChimeraMeasurements_IV.dat', + '/Volumes/Michael/Axopatch_DR/20171025/03A_AfterWash_1MKClpH7_GroundToTrans_IV_2.dat', + '/Volumes/Michael/Axopatch_DR/20171025/03A_AfterWash_1MKClpH115_GroundToTrans_IV_2.dat', + '/Volumes/Michael/Axopatch_DR/20171025/03A_AfterWash_1MKClpH7_GroundToTrans_500mV_IV_6.dat', + '/Volumes/Michael/Axopatch_DR/20171025/03A_AfterWash_1MKClpH341_GroundToTrans_500mV_IV_3.dat', + '/Volumes/Michael/Axopatch_DR/20171025/03A_AfterWash_1MKClpH7_GroundToTrans_500mV_IV_7.dat'] + +labels = ['1) After Poly-D-Lysine, pH7', + '2) 1M KCl, pH 7', + '3) 1M KCl, pH 11', + '4) 1M KCl, pH 7', + '5) 1M KCl, pH 3' + '6) 1M KCl, pH 7'] +expname = 'All' + +filenames = askopenfilenames() # show an "Open" dialog box and return the path to the selected file + +figIV = plt.figure(2) +ax1IV = figIV.add_subplot(111) + +for filename in filenames: + print(filename) + + #Make Dir to save images + output = uf.OpenFile(filename) + directory = (str(os.path.split(filename)[0]) + os.sep + expname + '_SavedImages') + if not os.path.exists(directory): + os.makedirs(directory) + + AllData = uf.MakeIVData(output, delay = 2) + if AllData == 0: + print('!!!! No Sweep in: ' + filename) + continue + + #Plot Considered Part + #figExtracteParts = plt.figure(1) + #ax1 = figExtracteParts.add_subplot(211) + #ax2 = figExtracteParts.add_subplot(212, sharex=ax1) + #(ax1, ax2) = uf.PlotExtractedPart(output, AllData, current = 'i1', unit=1e9, axis = ax1, axis2=ax2) + #plt.show() + #figExtracteParts.savefig(directory + os.sep + 'PlotExtracted_' + str(os.path.split(filename)[1])[:-4] + '.eps') + #figExtracteParts.savefig(directory + os.sep + 'PlotExtracted_' + str(os.path.split(filename)[1])[:-4] + '.png', dpi=150) + + + # Plot IV + if output['graphene']: + figIV2 = plt.figure(3) + ax2IV = figIV2.add_subplot(111) + ax2IV = uf.PlotIV(output, AllData, current='i2', unit=1e9, axis=ax2IV, WithFit=1) + figIV2.tight_layout() + figIV2.savefig(directory + os.sep + str(os.path.split(filename)[1]) + '_IV_i2.png', dpi=150) + figIV2.savefig(directory + os.sep + str(os.path.split(filename)[1]) + '_IV_i2.eps') + figIV2.clear() + + ax1IV = uf.PlotIV(output, AllData, current = 'i1', unit=1e9, axis = ax1IV, WithFit = 0) + figIV.tight_layout() + + #Save Figures +ax1IV.legend() +ax1IV. +figIV.savefig(directory + os.sep + str(os.path.split(filename)[1]) + 'Merged_IV_i1.png', dpi=300) +figIV.savefig(directory + os.sep + str(os.path.split(filename)[1]) + 'Merged_IV_i1.eps') +figIV.clear() + + +# print(AllData['i2']['YorkFitValues']['Yintercept']) + +#plt.show() \ No newline at end of file diff --git a/PlotAllChimeraFiles.py b/PlotAllChimeraFiles.py new file mode 100644 index 0000000..7b703d0 --- /dev/null +++ b/PlotAllChimeraFiles.py @@ -0,0 +1,37 @@ +import UsefulFunctions as uf +import pyqtgraph as pg +import matplotlib.pyplot as plt +from timeit import default_timer as timer +import scipy +import numpy as np +import os +import matplotlib +from matplotlib.backends.backend_pdf import PdfPages +from matplotlib.font_manager import FontProperties +from tkinter import Tk +from tkinter.filedialog import askopenfilenames + +fontP = FontProperties() +fontP.set_size('small') +props = dict(boxstyle='round', facecolor='wheat', alpha=0.5) + +Tk().withdraw() +os.system('''/usr/bin/osascript -e 'tell app "Finder" to set frontmost of process "python" to true' ''') + +#filenames = ['/Volumes/backup/2017/Michael/Axopatch/20170512/10mMKClInFlowCellORingPore1mm.dat'] +#filenames = ['/Volumes/backup/2017/Michael/Axopatch/20170824/30B_10mMKClCis100mMTrans_PosElecToCis_IV.dat'] +expname = 'All' + +filenames = askopenfilenames(filetypes = (("chimera files", "*.log"), ("all files", "*.*"))) + +for filename in filenames: + print(filename) + out=uf.ImportChimeraRaw(filename, 200*1e3) + coefficients = {'a': 0.999, 'E': 0, 'S': 5, 'eventlengthLimit': 10e-3 * out['samplerate']} + fig1, ax = plt.subplots(1) + ax.plot(np.arange(0, len(out['i1']))/out['sampleratelp'], out['i1'] * 1e9, 'r') + ax.set_xlabel('Time [s]') + ax.set_ylabel('Current [nA]') + fig1.savefig(filename[:-4] + '.png', dpi=150) + fig1.clear() + plt.close() \ No newline at end of file diff --git a/PlotData.py b/PlotData.py new file mode 100644 index 0000000..675a4fe --- /dev/null +++ b/PlotData.py @@ -0,0 +1,68 @@ +from pyqtgraph.Qt import QtGui, QtCore +from pyqtgraph import PlotDataItem +from pyqtgraph import PlotItem +import types +from types import MethodType +import scipy.signal as sig +import numpy as np +import scipy +import pyqtgraph as pg +import Functions as f + + +inp = f.OpenFile('/Volumes/Michael/Axopatch/20180316/1msSignal_AppliedToModelCellThroughAxopatch_1.dat') + +#Downsampling Methods +ds_factor = 10.0 +# Downsample data +downsampled_data = sig.resample(np.float64(inp['i1']), np.uint64(len(inp['i1'])/ds_factor)) +downsampled_data_x = np.arange(0, len(downsampled_data))/(inp['samplerate']/ds_factor) + +# Replacement for the default getData function +def getData2(obj): + print('In Function') + # Calculate the visible range + range = obj.viewRect() + print(range) + if range is not None: + dx = float(inp['i1'][1] - inp['i1'][0]) / (inp['i1'].size[0] - 1) + x0 = (range.left() - inp['i1'][0]) / dx + x1 = (range.right() - inp['i1'][0]) / dx + # Decide whether to use downsampled or original data + if (x1 - x0) > 20000: + print('Is Downsampled') + obj.xData = downsampled_data_x + obj.yData = downsampled_data + else: + print('Not Downsampled') + obj.xData = np.arange(0, len(inp['i1']))/inp['samplerate'] + obj.yData = inp['i1'] + # Run the original getData of PlotDataItem + return PlotDataItem.getData(obj) + + +#QtGui.QApplication.setGraphicsSystem('raster') +app = QtGui.QApplication([]) +#mw = QtGui.QMainWindow() +#mw.resize(800,800) + +win = pg.GraphicsWindow(title="Basic plotting examples") +win.resize(1000, 600) +win.setWindowTitle('pyqtgraph example: Plotting') + +p1 = PlotItem(y=downsampled_data, x=np.arange(0, len(downsampled_data))/(inp['samplerate']/ds_factor)) +p1.getData = types.MethodType(getData2, p1) + +# Enable antialiasing for prettier plots +pg.setConfigOptions(antialias=True) +#p1.addItem(win) +win.addItem(p1) +#p1 = win.addPlot(title="Ionic Current Trace", y=inp['i1'], x=np.arange(0,len(inp['i1']))/inp['samplerate']) +p1.setLabel('left', "Current", units='A') +p1.setLabel('bottom', "Time", units='s') + +## Start Qt event loop unless running in interactive mode or using pyside. +if __name__ == '__main__': + import sys + if (sys.flags.interactive != 1) or not hasattr(QtCore, 'PYQT_VERSION'): + QtGui.QApplication.instance().exec_() diff --git a/PlotDataSimple.py b/PlotDataSimple.py new file mode 100644 index 0000000..dfb9cb9 --- /dev/null +++ b/PlotDataSimple.py @@ -0,0 +1,87 @@ +from pyqtgraph.Qt import QtGui, QtCore +from pyqtgraph import FileDialog +from pyqtgraph import PlotDataItem +from pyqtgraph import PlotItem +import types +from types import MethodType +import scipy.signal as sig +import numpy as np +import scipy +import pyqtgraph as pg +import Functions as f + + +inp = f.OpenFile('/Volumes/Michael/Axopatch/20180316/1msSignal_AppliedToModelCellThroughAxopatch_1.dat') + +#Downsampling Methods +ds_factor = 10.0 + +#QtGui.QApplication.setGraphicsSystem('raster') +app = QtGui.QApplication([]) +#mw = QtGui.QMainWindow() +#mw.resize(800,800) + +win = pg.GraphicsWindow(title="Basic plotting examples") +win.resize(1000, 600) + +win.setWindowTitle('pyqtgraph example: Plotting') +""" +p0 = win.addPlot(y=inp['i1'], x=np.arange(0, len(inp['i1']))/inp['samplerate']) +lr = pg.LinearRegionItem([1000, len(inp['i1']-1000)]) +lr.setZValue(-10) +p0.addItem(lr) +p0.hideButtons() +#p0.enableAutoRange(y=False, x=False) +p0.setMenuEnabled(enableMenu=False, enableViewBoxMenu='same') +pg.setConfigOptions(antialias=True) +p0.setLabel('left', "Current", units='A') +p0.setLabel('bottom', "Time", units='s') +p0.setMouseEnabled(x=False, y=False) +win.nextRow() +""" +p1 = win.addPlot(title='Ionic Current') +p1.setDownsampling(ds=10, auto=True, mode='subsample') +p1.setClipToView(True) +pg.setConfigOptions(antialias=True) +p1.plot(y=inp['i1'], x=np.arange(0, len(inp['i1']))/inp['samplerate']) +p1.setLabel('left', "Current", units='A') +p1.setLabel('bottom', "Time", units='s') + +win.nextRow() + +p2 = win.addPlot(title='Transverse Current') +p2.setDownsampling(ds=10, auto=True, mode='subsample') +p2.setClipToView(True) +pg.setConfigOptions(antialias=True) +p2.plot(y=inp['i2'], x=np.arange(0, len(inp['i2']))/inp['samplerate'], pen='r') +p2.setXLink(p1) +p2.setLabel('left', "Current", units='A') +p2.setLabel('bottom', "Time", units='s') + +win.nextRow() + +p3 = win.addPlot(title='Voltages') +p3.setDownsampling(ds=100, auto=True, mode='subsample') +p3.setClipToView(True) +pg.setConfigOptions(antialias=True) +p3.plot(y=inp['v2'], x=np.arange(0, len(inp['i2']))/inp['samplerate'], pen='r') +p3.plot(y=inp['v1'], x=np.arange(0, len(inp['i2']))/inp['samplerate']) +p3.setXLink(p1) +p3.setLabel('left', "Current", units='A') +p3.setLabel('bottom', "Time", units='s') +""" +def updatePlot(): + p1.setXRange(*lr.getRegion(), padding=0) +def updateRegion(): + lr.setRegion(p1.getViewBox().viewRange()[0]) +lr.sigRegionChanged.connect(updatePlot) +p1.sigXRangeChanged.connect(updateRegion) +updatePlot() +""" + + +## Start Qt event loop unless running in interactive mode or using pyside. +if __name__ == '__main__': + import sys + if (sys.flags.interactive != 1) or not hasattr(QtCore, 'PYQT_VERSION'): + QtGui.QApplication.instance().exec_() diff --git a/PlotDataWithFileBrowser.py b/PlotDataWithFileBrowser.py new file mode 100644 index 0000000..72d431f --- /dev/null +++ b/PlotDataWithFileBrowser.py @@ -0,0 +1,75 @@ +from pyqtgraph.flowchart import Flowchart +from pyqtgraph.Qt import QtGui, QtCore +import pyqtgraph as pg +import numpy as np +import pyqtgraph.metaarray as metaarray + +app = QtGui.QApplication([]) + +## Create main window with grid layout +win = QtGui.QMainWindow() +win.setWindowTitle('pyqtgraph example: Flowchart') +cw = QtGui.QWidget() +win.setCentralWidget(cw) +layout = QtGui.QGridLayout() +cw.setLayout(layout) + +## Create flowchart, define input/output terminals +fc = Flowchart(terminals={ + 'dataIn': {'io': 'in'}, + 'dataOut': {'io': 'out'} +}) +w = fc.widget() + +## Add flowchart control panel to the main window +layout.addWidget(fc.widget(), 0, 0, 3, 1) + +## Add two plot widgets +pw1 = pg.PlotWidget() +pw2 = pg.PlotWidget() +pw3 = pg.PlotWidget() +layout.addWidget(pw1, 0, 1) +layout.addWidget(pw2, 1, 1) +layout.addWidget(pw3, 2, 1) + +win.show() + +## generate signal data to pass through the flowchart +data = np.random.normal(size=1000) +data[200:300] += 1 +data += np.sin(np.linspace(0, 100, 1000)) +data = metaarray.MetaArray(data, info=[{'name': 'Time', 'values': np.linspace(0, 1.0, len(data))}, {}]) + +## Feed data into the input terminal of the flowchart +fc.setInput(dataIn=data) + +## populate the flowchart with a basic set of processing nodes. +## (usually we let the user do this) +plotList = {'Top Plot': pw1, 'Bottom Plot': pw2} + +pw1Node = fc.createNode('PlotWidget', pos=(0, -150)) +pw1Node.setPlotList(plotList) +pw1Node.setPlot(pw1) + +pw2Node = fc.createNode('PlotWidget', pos=(150, -150)) +pw2Node.setPlot(pw2) +pw2Node.setPlotList(plotList) + +pw3Node = fc.createNode('PlotWidget', pos=(150, -150)) +pw3Node.setPlot(pw3) +pw3Node.setPlotList(plotList) + +fNode = fc.createNode('GaussianFilter', pos=(0, 0)) +fNode.ctrls['sigma'].setValue(5) +fc.connectTerminals(fc['dataIn'], fNode['In']) +fc.connectTerminals(fc['dataIn'], pw1Node['In']) +fc.connectTerminals(fNode['Out'], pw2Node['In']) +fc.connectTerminals(fNode['Out'], fc['dataOut']) + + + +## Start Qt event loop unless running in interactive mode or using pyside. +if __name__ == '__main__': + import sys + if (sys.flags.interactive != 1) or not hasattr(QtCore, 'PYQT_VERSION'): + QtGui.QApplication.instance().exec_() diff --git a/PlotFile.py b/PlotFile.py new file mode 100644 index 0000000..0eb9ca0 --- /dev/null +++ b/PlotFile.py @@ -0,0 +1,26 @@ +import numpy as np +import scipy +from scipy.optimize import curve_fit +from scipy import constants as cst +import Functions as uf +import os +import matplotlib.pyplot as plt +from matplotlib.backends.backend_pdf import PdfPages +import h5py +import pandas as pd +from matplotlib.ticker import EngFormatter +from matplotlib import rc +#rc('text', usetex=True) +formCurrent = EngFormatter(unit='A', places=3) +formA = EngFormatter(unit='A', places=3) +formB = EngFormatter(unit='s', places=3) + +file = '/Volumes/lben/lben-commun/2018 User Data/Michael/Axopatch/20180504/W11R149_ph74_1M_1mM_KCl_100mW_relaxation_473.dat' +dat = uf.OpenFile(file) + +fig1 = plt.figure(1, figsize=(8, 4)) +ax_part1 = fig1.add_subplot(1, 1, 1) + +ax_part1.plot(dat['i1']) + +plt.show() \ No newline at end of file diff --git a/PlotFile2Channels.py b/PlotFile2Channels.py new file mode 100644 index 0000000..ef8296d --- /dev/null +++ b/PlotFile2Channels.py @@ -0,0 +1,70 @@ +import UsefulFunctions as uf +import pyqtgraph as pg +import matplotlib.pyplot as plt +from timeit import default_timer as timer +import scipy +import numpy as np +import os +import matplotlib +from matplotlib.backends.backend_pdf import PdfPages +from matplotlib.font_manager import FontProperties +from tkinter import Tk +from tkinter.filedialog import askopenfilenames +from matplotlib.ticker import EngFormatter +matplotlib.rcParams['agg.path.chunksize'] = 100000 + +fontP = FontProperties() +fontP.set_size('small') +props = dict(boxstyle='round', facecolor='wheat', alpha=0.5) +Tk().withdraw() +os.system('''/usr/bin/osascript -e 'tell app "Finder" to set frontmost of process "python" to true' ''') + +#filenames = ['/Volumes/backup/2017/Michael/Axopatch/20170512/10mMKClInFlowCellORingPore1mm.dat'] +filenames = ['/Volumes/lben-archives/2016/Michael/Axopatch/21112016/17B_100mMCis1MtransKCl_80mer_9.dat'] +expname = 'All' + +#filenames = askopenfilenames() # show an "Open" dialog box and return the path to the selected file +#filenames={'/Users/migraf/Desktop/22B_UpperRibbonOverNight_.dat'} + +#With Voltages +volt = 1 + +for filename in filenames: + print(filename) + out = uf.ImportAxopatchData(filename) + fig = plt.figure(1, figsize=(8.7,5.96) ) + + if volt: + ax = fig.add_subplot(311) + ax2 = fig.add_subplot(312, sharex = ax) + ax3 = fig.add_subplot(313, sharex = ax) + ax3.yaxis.set_major_formatter(EngFormatter(unit='V')) + ax3.set_ylabel('Voltage') + else: + ax = fig.add_subplot(211) + ax2 = fig.add_subplot(212, sharex = ax) + + ax.plot(np.arange(0, len(out['i1']))/out['samplerate'], out['i1'], 'b') + ax2.plot(np.arange(0, len(out['i2']))/out['samplerate'], out['i2'], 'r') + + if volt: + ax3.plot(np.arange(0, len(out['i1']))/out['samplerate'], out['v1'], 'b') + ax3.plot(np.arange(0, len(out['i1'])) / out['samplerate'], out['v2'], 'r') + #ax.set_xlim(41000, 42600) + #ax2.set_xlim(41000, 42600) + #ax.set_ylim(-5,15) + + ax.set_xlabel('Time') + ax2.set_xlabel('Time') + + ax.set_ylabel('Ionic Current') + ax2.set_ylabel('Transverse Current') + + ax.xaxis.set_major_formatter(EngFormatter(unit='s')) + ax.yaxis.set_major_formatter(EngFormatter(unit='A')) + ax2.yaxis.set_major_formatter(EngFormatter(unit='A')) + + + #fig1.savefig(filename[:-4] + '.png', dpi=300) + plt.show() + #fig1.clear() diff --git a/SaveAllEvents.py b/SaveAllEvents.py new file mode 100644 index 0000000..58db9b8 --- /dev/null +++ b/SaveAllEvents.py @@ -0,0 +1,241 @@ +## Cross-correlation of two channels +import AnalysisParameters as pm +import numpy as np +import scipy +import scipy.signal as sig +import Functions as fu +import os +import matplotlib.pyplot as plt +from matplotlib.backends.backend_pdf import PdfPages +from matplotlib.font_manager import FontProperties +import pandas as pd +import MiscParameters as pm +from matplotlib.ticker import EngFormatter +import h5py +from tkinter.filedialog import askopenfilenames +from matplotlib import rc +rc('mathtext', default='regular') +pm.init(LoadFiles = 0) + +baseline = 250 +fitOn = 0 +percentage = 1 +plotPoints=0 + +filenames = {'/Users/migraf/Desktop/Chan/Data/R43_KCl gradient_100mMcis_1mM trans_4_20180405_182358_OriginalDB.hdf5'} +#filenames = askopenfilenames() + +for filename in filenames: + print(filename) + f = h5py.File(filename, 'r') + out = fu.OpenFile(f['General/FileName'].value) + file = os.sep + str(os.path.split(filename)[1][:-5]) + + # Graphene Or Not?? + if not out['graphene']: + ## Only need to plot i1 and i1_Up and v1 + #toPlot = ['i1', 'i1_Up'] + toPlot = ['i1'] + for k in toPlot: + NumberOfEvents=f['LowPassSegmentation/' + k + '/NumberOfEvents'].value + if NumberOfEvents is not 0: + pp = PdfPages(pm.OutputFolder + file + '_Only_' + k + '_EventPlots.pdf') + fig = plt.figure() + ax1 = fig.add_subplot(211) + ax2 = fig.add_subplot(212, sharex=ax1) + ax1.set_title('Ionic Current Trace "' + k + '"') + ax2.set_title('Ionic Voltage') + ax1.set_ylabel('Current [nA]') + ax2.set_ylabel('Voltage [V]') + ax1.set_xlabel('Time [s]') + ax2.set_xlabel('Time [s]') + ax1.yaxis.set_major_formatter(EngFormatter(unit='A', places=1)) + ax2.yaxis.set_major_formatter(EngFormatter(unit='V', places=1)) + ax1.xaxis.set_major_formatter(EngFormatter(unit='s', places=1)) + ax2.xaxis.set_major_formatter(EngFormatter(unit='s', places=1)) + line1, = ax1.plot([]) + line2, = ax2.plot([], 'c') + if fitOn: + line1f, = ax1.plot([], 'r') + for i in np.linspace(0, NumberOfEvents-1, NumberOfEvents*percentage, dtype=np.uint64): + startp_i1 = np.int64( + f['LowPassSegmentation/' + k + '/' + 'StartPoints'].value[i] - baseline) + endp_i1 = np.int64( + f['LowPassSegmentation/' + k + '/' + 'EndPoints'].value[i] + baseline) + if startp_i1 < 0: + startp_i1 = 0 + if endp_i1 > len(out['i1']): + endp_i1 = len(out['i1']) + if fitOn: + if plotPoints: + fit1 = np.concatenate([np.ones(baseline) * + f['LowPassSegmentation/' + k + '/LocalBaseline'].value[i], + f['LowPassSegmentation/' + k + '/AllEvents/' + '{:09d}'.format(i)].value[i], + np.ones(baseline) * + f['LowPassSegmentation/' + k + '/LocalBaseline'].value[i]]) + else: + fit1 = np.concatenate([np.ones(baseline) * + f['LowPassSegmentation/' + k + '/LocalBaseline'].value[i], + np.ones(endp_i1 - startp_i1 - 2 * baseline) * + f['LowPassSegmentation/' + k + '/FitLevel'].value[i], + np.ones(baseline) * + f['LowPassSegmentation/' + k + '/LocalBaseline'].value[i]]) + line1f.set_data(np.arange(startp_i1, endp_i1)/out['samplerate'], fit1) + line1.set_data(np.arange(startp_i1, endp_i1)/out['samplerate'], out['i1'][startp_i1:endp_i1]) + line2.set_data(np.arange(startp_i1, endp_i1)/out['samplerate'], out['v1'][startp_i1:endp_i1]) + + ax1.set_title('{} Event {}\n Ionic Current Trace'.format(filter, i)) + ax1.relim() + ax2.relim() + ax1.autoscale_view(True, True, True) + ax2.autoscale_view(True, True, True) + fig.canvas.draw() + pp.savefig(fig) + print('{}: {} out of {} saved!'.format(toPlot, str(i), NumberOfEvents-1)) + pp.close() + else: + toPlot = [] + #Initialize all the cases + toPlot.append(['i1', '', np.uint64(f['LowPassSegmentation/i1/OnlyIndexes'].value), np.uint64(f['LowPassSegmentation/i1/OnlyIndexes'].value)]) + toPlot.append(['i1_Up', '', np.uint64(f['LowPassSegmentation/i1_Up/OnlyIndexes'].value), np.uint64(f['LowPassSegmentation/i1_Up/OnlyIndexes'].value)]) + toPlot.append(['', 'i2', np.uint64(f['LowPassSegmentation/i2/OnlyIndexes'].value), np.uint64(f['LowPassSegmentation/i2/OnlyIndexes'].value)]) + toPlot.append(['', 'i2_Up', np.uint64(f['LowPassSegmentation/i2_Up/OnlyIndexes'].value), np.uint64(f['LowPassSegmentation/i2_Up/OnlyIndexes'].value)]) + toPlot.append(['i1', 'i2', np.uint64(f['LowPassSegmentation/i1/CommonIndexesWithi2'].value), np.uint64(f['LowPassSegmentation/i2/CommonIndexesWithi1'].value)]) + toPlot.append(['i1_Up', 'i2', np.uint64(f['LowPassSegmentation/i1_Up/CommonIndexesWithi2'].value), np.uint64(f['LowPassSegmentation/i2/CommonIndexesWithi1_Up'].value)]) + toPlot.append(['i1_Up', 'i2_Up', np.uint64(f['LowPassSegmentation/i1_Up/CommonIndexesWithi2_Up'].value), np.uint64(f['LowPassSegmentation/i2_Up/CommonIndexesWithi1_Up'].value)]) + toPlot.append(['i1', 'i2_Up', np.uint64(f['LowPassSegmentation/i1/CommonIndexesWithi2_Up'].value), np.uint64(f['LowPassSegmentation/i2_Up/CommonIndexesWithi1'].value)]) + fig = plt.figure() + ax1 = fig.add_subplot(311) + ax2 = fig.add_subplot(312, sharex=ax1) + ax3 = fig.add_subplot(313, sharex=ax1) + ax4 = ax3.twinx() + ax4.set_title('Voltage Trace') + ax1.set_ylabel('Current [nA]') + ax2.set_ylabel('Current [nA]') + ax3.set_ylabel('Ionic Voltage [V]') + ax3.set_xlabel('Time [s]') + ax1.set_xlabel('Time [s]') + ax2.set_xlabel('Time [s]') + ax1.yaxis.set_major_formatter(EngFormatter(unit='A', places=1)) + ax2.yaxis.set_major_formatter(EngFormatter(unit='A', places=1)) + ax3.yaxis.set_major_formatter(EngFormatter(unit='V', places=1)) + ax4.yaxis.set_major_formatter(EngFormatter(unit='V', places=1)) + ax3.xaxis.set_major_formatter(EngFormatter(unit='s', places=1)) + ax1.xaxis.set_major_formatter(EngFormatter(unit='s', places=1)) + ax2.xaxis.set_major_formatter(EngFormatter(unit='s', places=1)) + ax4.set_ylabel('Tr. Voltage [V]') + line1, = ax1.plot([]) + line2, = ax2.plot([]) + line3, = ax3.plot([], 'c') + line4, = ax4.plot([], 'y') + ax4.tick_params(axis='y', colors='y') + ax4.yaxis.label.set_color('y') + ax3.tick_params(axis='y', colors='c') + ax3.yaxis.label.set_color('c') + + if fitOn: + line1f, = ax1.plot([], 'r') + line2f, = ax2.plot([], 'r') + + for cases in toPlot: + ax2.set_title('Transverse Current Trace "' + cases[1] + '"') + pp = PdfPages(pm.OutputFolder + file + '_' + cases[0] + '_vs_' + cases[1] + '_EventPlots.pdf') + NumberOfEvents = len(cases[2]) + for i in np.linspace(0, NumberOfEvents-1, NumberOfEvents*percentage, dtype=np.uint64): + if not cases[0] == '': + startp_i1 = np.int64( + f['LowPassSegmentation/' + cases[0] + '/' + 'StartPoints'].value[cases[2][i]] - baseline) + endp_i1 = np.int64( + f['LowPassSegmentation/' + cases[0] + '/' + 'EndPoints'].value[cases[2][i]] + baseline) + else: + startp_i1 = np.int64( + f['LowPassSegmentation/' + cases[1] + '/' + 'StartPoints'].value[cases[3][i]] - baseline) + endp_i1 = np.int64( + f['LowPassSegmentation/' + cases[1] + '/' + 'EndPoints'].value[cases[3][i]] + baseline) + if not cases[1] == '': + startp_i2 = np.int64( + f['LowPassSegmentation/' + cases[1] + '/' + 'StartPoints'].value[cases[3][i]] - baseline) + endp_i2 = np.int64( + f['LowPassSegmentation/' + cases[1] + '/' + 'EndPoints'].value[cases[3][i]] + baseline) + else: + startp_i2 = np.int64( + f['LowPassSegmentation/' + cases[0] + '/' + 'StartPoints'].value[cases[2][i]] - baseline) + endp_i2 = np.int64( + f['LowPassSegmentation/' + cases[0] + '/' + 'EndPoints'].value[cases[2][i]] + baseline) + if fitOn: + if not cases[0] == '': + if plotPoints: + fit1 = np.concatenate([np.ones(baseline) * + f['LowPassSegmentation/' + cases[0] + '/LocalBaseline'].value[cases[2][i]], + f['LowPassSegmentation/' + cases[0] + '/AllEvents/' + '{:09d}'.format(cases[2][i])].value, + np.ones(baseline) * + f['LowPassSegmentation/' + cases[0] + '/LocalBaseline'].value[cases[2][i]]]) + else: + fit1 = np.concatenate([np.ones(baseline) * + f['LowPassSegmentation/' + cases[0] + '/LocalBaseline'].value[cases[2][i]], + np.ones(endp_i1 - startp_i1 - 2 * baseline) * f[ + 'LowPassSegmentation/' + cases[0] + '/FitLevel'].value[cases[2][i]], + np.ones(baseline) * + f['LowPassSegmentation/' + cases[0] + '/LocalBaseline'].value[cases[2][i]]]) + line1f.set_data(np.arange(startp_i1, endp_i1) / out['samplerate'], fit1) + else: + line1f.set_data([], []) + if not cases[1] == '': + if plotPoints: + fit2 = np.concatenate([np.ones(baseline) * + f['LowPassSegmentation/' + cases[1] + '/LocalBaseline'].value[cases[3][i]], + f['LowPassSegmentation/' + cases[1] + '/AllEvents/' + '{:09d}'.format( + cases[3][i])].value, + np.ones(baseline) * + f['LowPassSegmentation/' + cases[1] + '/LocalBaseline'].value[cases[3][i]]]) + else: + fit2 = np.concatenate([np.ones(baseline) * + f['LowPassSegmentation/' + cases[1] + '/LocalBaseline'].value[cases[3][i]], + np.ones(endp_i2 - startp_i2 - 2 * baseline) * f['LowPassSegmentation/' + cases[1] + '/FitLevel'].value[cases[3][i]], + np.ones(baseline) * + f['LowPassSegmentation/' + cases[1] + '/LocalBaseline'].value[cases[3][i]]]) + line2f.set_data(np.arange(startp_i2, endp_i2) / out['samplerate'], fit2) + else: + line2f.set_data([], []) + + if startp_i1 < 0: + startp_i1 = 0 + if startp_i2 < 0: + startp_i2 = 0 + if endp_i1 > len(out['i1']): + endp_i1 = len(out['i1']) + if endp_i2 > len(out['i2']): + endp_i2 = len(out['i2']) + + i1 = out['i1'][startp_i1:endp_i1] + v1 = out['v1'][startp_i1:endp_i1] + t1 = np.arange(len(i1)) / out['samplerate'] + i2 = out['i2'][startp_i2:endp_i2] + v2 = out['v2'][startp_i1:endp_i1] + t2 = np.arange(len(i2)) / out['samplerate'] + + line1.set_data(t1, i1) + line3.set_data(t1, v1) + line4.set_data(t1, v2) + line2.set_data(t2, i2) + + ax1.set_title('Event {} of {}\n Ionic Current Trace "{}"'.format(i, NumberOfEvents, cases[0])) + ax1.relim() + ax2.relim() + ax3.relim() + ax4.relim() + ax1.autoscale_view(True, True, True) + ax2.autoscale_view(True, True, True) + # voltage plots + ax3.autoscale_view(True, True, False) + ax4.autoscale_view(True, True, False) + ax3.set_ylim(np.min(v1) - np.mean(v1) * 5 / 10, np.max(v1) + np.mean(v1) * 1 / 10) + ax4.set_ylim([np.min(v2) - np.mean(v2) * 2 / 10, np.max(v2) + np.mean(v2) * 5 / 10]) + fig.tight_layout() + fig.canvas.draw() + pp.savefig(fig) + print('{} out of {} saved!'.format(str(i), NumberOfEvents - 1)) + pp.close() + plt.close() + + diff --git a/SaveAllEventsSingleChannel.py b/SaveAllEventsSingleChannel.py new file mode 100644 index 0000000..650cb4d --- /dev/null +++ b/SaveAllEventsSingleChannel.py @@ -0,0 +1,114 @@ +## Cross-correlation of two channels +import AnalysisParameters as pm +import numpy as np +import scipy +import scipy.signal as sig +import Functions as fu +import os +import matplotlib.pyplot as plt +from matplotlib.backends.backend_pdf import PdfPages +from matplotlib.font_manager import FontProperties +import pandas as pd +import MiscParameters as pm +from matplotlib.ticker import EngFormatter +import h5py +from tkinter.filedialog import askopenfilenames +from matplotlib import rc +rc('mathtext', default='regular') +pm.init(LoadFiles = 0) + +baselinetime = 10e-3 +fitOn = 1 +percentage = 1 +plotPoints=0 +AllInOneFile = 0 + +#filenames = {'/Volumes/lben/lben-commun/2018 User Data/Martina/Axonpatch/20180518_13A/Results/13A_1MKCl_events250mV3_OriginalDB.hdf5'} +filenames = askopenfilenames() + +if AllInOneFile: + pp = PdfPages(pm.OutputFolder + 'All_EventPlots.pdf') + fig = plt.figure() + ax1 = fig.add_subplot(211) + ax2 = fig.add_subplot(212, sharex=ax1) + ax2.set_title('Ionic Voltage') + ax1.set_ylabel('Current') + ax2.set_ylabel('Voltage') + ax1.set_xlabel('Time') + ax2.set_xlabel('Time') + ax1.yaxis.set_major_formatter(EngFormatter(unit='A', places=1)) + ax2.yaxis.set_major_formatter(EngFormatter(unit='V', places=1)) + ax1.xaxis.set_major_formatter(EngFormatter(unit='s', places=1)) + ax2.xaxis.set_major_formatter(EngFormatter(unit='s', places=1)) + line1, = ax1.plot([]) + line2, = ax2.plot([], 'c') + if fitOn: + line1f, = ax1.plot([], 'r', ls='dashed') + +for filename in filenames: + print(filename) + f = h5py.File(filename, 'r') + out = fu.OpenFile(f['General/FileName'].value) + baseline = np.uint64(baselinetime * out['samplerate']) + file = os.sep + str(os.path.split(filename)[1][:-5]) + toPlot = ['i1'] + for k in toPlot: + NumberOfEvents=f['LowPassSegmentation/' + k + '/NumberOfEvents'].value + if NumberOfEvents is not 0: + if not AllInOneFile: + pp = PdfPages(pm.OutputFolder + file + '_Only_' + k + '_EventPlots.pdf') + fig = plt.figure() + ax1 = fig.add_subplot(211) + ax2 = fig.add_subplot(212, sharex=ax1) + ax2.set_title('Ionic Voltage') + ax1.set_ylabel('Current') + ax2.set_ylabel('Voltage') + ax1.set_xlabel('Time') + ax2.set_xlabel('Time') + ax1.yaxis.set_major_formatter(EngFormatter(unit='A', places=1)) + ax2.yaxis.set_major_formatter(EngFormatter(unit='V', places=1)) + ax1.xaxis.set_major_formatter(EngFormatter(unit='s', places=1)) + ax2.xaxis.set_major_formatter(EngFormatter(unit='s', places=1)) + line1, = ax1.plot([]) + line2, = ax2.plot([], 'c') + if fitOn: + line1f, = ax1.plot([], 'r', ls='dashed') + for i in np.linspace(0, NumberOfEvents-1, NumberOfEvents*percentage, dtype=np.uint64): + startp_i1 = np.int64( + f['LowPassSegmentation/' + k + '/' + 'StartPoints'].value[i] - baseline) + endp_i1 = np.int64( + f['LowPassSegmentation/' + k + '/' + 'EndPoints'].value[i] + baseline) + if startp_i1 < 2*baseline: + continue + if endp_i1 > len(out['i1']): + endp_i1 = len(out['i1']) + if fitOn: + if plotPoints: + fit1 = np.concatenate(([np.ones(baseline) * + f['LowPassSegmentation/' + k + '/LocalBaseline'].value[i], + f['LowPassSegmentation/' + k + '/AllEvents/' + '{:09d}'.format(i)].value[i], + np.ones(baseline) * + f['LowPassSegmentation/' + k + '/LocalBaseline'].value[i]])) + else: + fit1 = np.concatenate([np.ones(baseline) * + f['LowPassSegmentation/' + k + '/LocalBaseline'].value[i], + np.ones(np.uint64(endp_i1 - startp_i1 - 2 * baseline)) * + f['LowPassSegmentation/' + k + '/FitLevel'].value[i], + np.ones(baseline) * + f['LowPassSegmentation/' + k + '/LocalBaseline'].value[i]]) + line1f.set_data(np.arange(endp_i1-startp_i1)/out['samplerate'], fit1) + line1.set_data(np.arange(endp_i1-startp_i1)/out['samplerate'], out['i1'][startp_i1:endp_i1]) + line2.set_data(np.arange(endp_i1-startp_i1)/out['samplerate'], out['v1'][startp_i1:endp_i1]) + + ax1.set_title('Event {}\n {}'.format(i, file[1:-11])) + ax1.relim() + ax2.relim() + ax1.autoscale_view(True, True, True) + ax2.autoscale_view(True, True, True) + fig.canvas.draw() + pp.savefig(fig) + print('{}: {} out of {} saved!'.format(toPlot, str(i), NumberOfEvents-1)) + if not AllInOneFile: + pp.close() +if AllInOneFile: + pp.close() \ No newline at end of file diff --git a/SaveAllEventsToPDF.py b/SaveAllEventsToPDF.py new file mode 100644 index 0000000..f148851 --- /dev/null +++ b/SaveAllEventsToPDF.py @@ -0,0 +1,245 @@ +## Cross-correlation of two channels +import AnalysisParameters as pm +import numpy as np +import scipy +import scipy.signal as sig +import Functions as fu +import os +import matplotlib.pyplot as plt +from matplotlib.backends.backend_pdf import PdfPages +from matplotlib.font_manager import FontProperties +import pandas as pd +import MiscParameters as pm +import h5py +from tkinter.filedialog import askopenfilenames +from matplotlib import rc +rc('mathtext', default='regular') +pm.init(LoadFiles = 0) + +baseline = 250 +fitOn = 1 + +filenames = {'/Users/migraf/Desktop/Chan/Data/R43_KCl gradient_100mMcis_1mM trans_4_20180405_182358_OriginalDB.hdf5'} +#filenames = askopenfilenames() + +for filename in filenames: + print(filename) + f = h5py.File(filename, 'r') + out = fu.OpenFile(f['General/FileName'].value) + file = os.sep + str(os.path.split(filename)[1][:-5]) + + Combinations={} + + + categories1 = ['i1', 'i1_Up'] + + + + if out['graphene']: + filters = ['CommonIndexes', 'OnlyIndexesi1', 'OnlyIndexesi2'] + categories2 = ['i2', 'i2_Up'] + else: + categories2 = [''] + filters = ['OnlyIndexesi1'] + + for cat1 in categories1: + for cat2 in categories2: + for filter in filters: + print(filter) + pp = PdfPages(pm.OutputFolder + file + '_' + cat1 + '_vs_' + cat2 + '_' + filter + '_EventPlots.pdf') + fig = plt.figure() + ax1 = fig.add_subplot(311) + ax2 = fig.add_subplot(312, sharex = ax1) + ax3 = fig.add_subplot(313, sharex = ax1) + ax4 = ax3.twinx() + ax1.set_title('Ionic Current Trace "' + cat1 + '"') + ax2.set_title('Transverse Current Trace "' + cat2 + '"') + ax4.set_title('Voltage Trace') + ax1.set_ylabel('Current [nA]') + ax2.set_ylabel('Current [nA]') + ax3.set_ylabel('Ionic Voltage [V]') + ax3.set_xlabel('Time [ms]') + ax1.set_xlabel('Time [ms]') + ax2.set_xlabel('Time [ms]') + ax4.set_ylabel('Tr. Voltage [V]') + line1, = ax1.plot([]) + line2, = ax2.plot([]) + line3, = ax3.plot([], 'c') + line4, = ax4.plot([], 'y') + ax4.tick_params(axis='y', colors = 'y') + ax4.yaxis.label.set_color('y') + ax3.tick_params(axis='y', colors = 'c') + ax3.yaxis.label.set_color('c') + + if fitOn: + line1f, = ax1.plot([], 'r') + line2f, = ax2.plot([], 'r') + + # Generate Event List + if filter is 'CommonIndexes': + i1_ActualEventList = np.uint64(f['LowPassSegmentation/' + cat1 + '/CommonIndexesWith' + cat2].value) + i2_ActualEventList = np.uint64(f['LowPassSegmentation/' + cat2 + '/CommonIndexesWith' + cat1].value) + NumberOfEvents = len(i2_ActualEventList) + if filter is 'OnlyIndexesi1' and out['graphene']: + i1_ActualEventList = np.uint64(f['LowPassSegmentation/' + cat1 + '/OnlyIndexesWith' + cat2].value) + i2_ActualEventList = np.uint64(f['LowPassSegmentation/' + cat1 + '/OnlyIndexesWith' + cat2].value) + NumberOfEvents = len(i1_ActualEventList) + if filter is 'OnlyIndexesi1' and not out['graphene']: + i1_ActualEventList = np.arange(f['LowPassSegmentation/' + cat1 + '/NumberOfEvents'].value) + NumberOfEvents = len(i1_ActualEventList) + if filter is 'OnlyIndexesi2': + i1_ActualEventList = np.uint64(f['LowPassSegmentation/' + cat2 + '/OnlyIndexesWith' + cat1].value) + i2_ActualEventList = np.uint64(f['LowPassSegmentation/' + cat2 + '/OnlyIndexesWith' + cat1].value) + NumberOfEvents = len(i1_ActualEventList) + + for i in np.arange(0, NumberOfEvents): + if filter is 'OnlyIndexesi2': + startp_i2 = np.int64( + f['LowPassSegmentation/'+ cat2 + '/' + 'StartPoints'].value[i2_ActualEventList[i]] - baseline) + endp_i2 = np.int64(f['LowPassSegmentation/' + cat2 + '/' + 'EndPoints'].value[i2_ActualEventList[i]] + baseline) + startp_i1 = startp_i2 + endp_i1 = endp_i2 + if filter is 'OnlyIndexesi1': + startp_i1 = np.int64( + f['LowPassSegmentation/' + cat1 + '/' + 'StartPoints'].value[i1_ActualEventList[i]] - baseline) + endp_i1 = np.int64(f['LowPassSegmentation/' + cat1 + '/' + 'EndPoints'].value[i1_ActualEventList[i]] + baseline) + startp_i2 = startp_i1 + endp_i2 = endp_i1 + if filter is 'CommonIndexes': + startp_i1 = np.int64(f['LowPassSegmentation/' + cat1 + '/' + 'StartPoints'].value[i1_ActualEventList[i]] - baseline) + endp_i1 = np.int64(f['LowPassSegmentation/' + cat1 + '/' + 'EndPoints'].value[i1_ActualEventList[i]] + baseline) + startp_i2 = np.int64(f['LowPassSegmentation/' + cat2 + '/' + 'StartPoints'].value[i2_ActualEventList[i]] - baseline) + endp_i2 = np.int64(f['LowPassSegmentation/' + cat2 + '/' + 'EndPoints'].value[i2_ActualEventList[i]] + baseline) + if startp_i1 < 0: + startp_i1 = 0 + if startp_i2 < 0: + startp_i2 = 0 + if endp_i1 > len(out['i1']): + endp_i1 = len(out['i1']) + if out['graphene']: + if endp_i2 > len(out['i2']): + endp_i2 = len(out['i2']) + i2 = out['i2'][startp_i2:endp_i2] + v2 = out['v2'][startp_i1:endp_i1] + t2 = np.arange(startp_i2, endp_i2) / out['samplerate'] + + i1 = out['i1'][startp_i1:endp_i1] + v1 = out['v1'][startp_i1:endp_i1] + t1 = np.arange(startp_i1, endp_i1)/out['samplerate'] + + if fitOn: + if filter is 'OnlyIndexesi1' and (endp_i2 - startp_i2 - 2 * baseline)>0: + if len(cat1) > 2: + fit1 = np.concatenate([np.ones(baseline) * f['LowPassSegmentation/' + cat1 + '/LocalBaseline'].value[i1_ActualEventList[i]], + np.ones(endp_i1-startp_i1-2*baseline) * ( + f['LowPassSegmentation/' + cat1 + '/LocalBaseline'].value[i1_ActualEventList[i]] + + f['LowPassSegmentation/' + cat1 + '/DeltaI'].value[i1_ActualEventList[i]]), + np.ones(baseline) * f['LowPassSegmentation/' + cat1 + '/LocalBaseline'].value[i1_ActualEventList[i]]]) + else: + fit1 = np.concatenate([np.ones(baseline) * + f['LowPassSegmentation/' + cat1 + '/LocalBaseline'].value[ + i1_ActualEventList[i]], + np.ones(endp_i1 - startp_i1 - 2 * baseline) * ( + f['LowPassSegmentation/' + cat1 + '/LocalBaseline'].value[ + i1_ActualEventList[i]] - + f['LowPassSegmentation/' + cat1 + '/DeltaI'].value[ + i1_ActualEventList[i]]), + np.ones(baseline) * + f['LowPassSegmentation/' + cat1 + '/LocalBaseline'].value[ + i1_ActualEventList[i]]]) + + fit2=0 + line1f.set_data(t1, fit1) + line2f.set_data([], []) + elif filter is 'OnlyIndexesi2' and (endp_i2 - startp_i2 - 2 * baseline)>0: + if len(cat1) > 2: + fit2 = np.concatenate([np.ones(baseline) * f['LowPassSegmentation/' + cat2 + '/LocalBaseline'].value[i2_ActualEventList[i]], + np.ones(endp_i2-startp_i2-2*baseline) * ( + f['LowPassSegmentation/' + cat2 + '/LocalBaseline'].value[i2_ActualEventList[i]] + + f['LowPassSegmentation/' + cat2 + '/DeltaI'].value[i2_ActualEventList[i]]), + np.ones(baseline) * f['LowPassSegmentation/' + cat2 + '/LocalBaseline'].value[i2_ActualEventList[i]]]) + else: + fit2 = np.concatenate([np.ones(baseline) * + f['LowPassSegmentation/' + cat2 + '/LocalBaseline'].value[ + i2_ActualEventList[i]], + np.ones(endp_i2 - startp_i2 - 2 * baseline) * ( + f['LowPassSegmentation/' + cat2 + '/LocalBaseline'].value[ + i2_ActualEventList[i]] - + f['LowPassSegmentation/' + cat2 + '/DeltaI'].value[ + i2_ActualEventList[i]]), + np.ones(baseline) * + f['LowPassSegmentation/' + cat2 + '/LocalBaseline'].value[ + i2_ActualEventList[i]]]) + fit1=0 + line1f.set_data([], []) + line2f.set_data(t2, fit2) + elif filter is 'CommonIndexes' and (endp_i2 - startp_i2 - 2 * baseline)>0: + if len(cat2) > 2: + fit2 = np.concatenate( + [np.ones(baseline) * f['LowPassSegmentation/' + cat2 + '/LocalBaseline'].value[i2_ActualEventList[i]], + np.ones(endp_i2 - startp_i2 - 2 * baseline) * ( + f['LowPassSegmentation/' + cat2 + '/LocalBaseline'].value[i2_ActualEventList[i]] + + f['LowPassSegmentation/' + cat2 + '/DeltaI'].value[i2_ActualEventList[i]]), + np.ones(baseline) * f['LowPassSegmentation/' + cat2 + '/LocalBaseline'].value[i2_ActualEventList[i]]]) + else: + fit2 = np.concatenate( + [np.ones(baseline) * f['LowPassSegmentation/' + cat2 + '/LocalBaseline'].value[ + i2_ActualEventList[i]], + np.ones(endp_i2 - startp_i2 - 2 * baseline) * ( + f['LowPassSegmentation/' + cat2 + '/LocalBaseline'].value[i2_ActualEventList[i]] - + f['LowPassSegmentation/' + cat2 + '/DeltaI'].value[i2_ActualEventList[i]]), + np.ones(baseline) * f['LowPassSegmentation/' + cat2 + '/LocalBaseline'].value[ + i2_ActualEventList[i]]]) + if len(cat1) > 2: + fit1 = np.concatenate( + [np.ones(baseline) * f['LowPassSegmentation/' + cat1 + '/LocalBaseline'].value[i1_ActualEventList[i]], + np.ones(endp_i1 - startp_i1 - 2 * baseline) * ( + f['LowPassSegmentation/' + cat1 + '/LocalBaseline'].value[i1_ActualEventList[i]] + + f['LowPassSegmentation/' + cat1 + '/DeltaI'].value[i1_ActualEventList[i]]), + np.ones(baseline) * f['LowPassSegmentation/' + cat1 + '/LocalBaseline'].value[i1_ActualEventList[i]]]) + else: + fit1 = np.concatenate( + [np.ones(baseline) * f['LowPassSegmentation/' + cat1 + '/LocalBaseline'].value[i1_ActualEventList[i]], + np.ones(endp_i1 - startp_i1 - 2 * baseline) * ( + f['LowPassSegmentation/' + cat1 + '/LocalBaseline'].value[i1_ActualEventList[i]] - + f['LowPassSegmentation/' + cat1 + '/DeltaI'].value[i1_ActualEventList[i]]), + np.ones(baseline) * f['LowPassSegmentation/' + cat1 + '/LocalBaseline'].value[i1_ActualEventList[i]]]) + + line1f.set_data(t1, fit1) + line2f.set_data(t2, fit2) + else: + line1f.set_data([], []) + line2f.set_data([], []) + + + #print('Values\t i1: {}, t1:{}, i2:{}, t2:{}\nFit1:{}, Fit2:{}'.format(len(i1),len(t1), len(i2),len(t2), len(fit1),len(fit2))) + + line1.set_data(t1, i1) + line3.set_data(t1, v1) + if out['graphene']: + line4.set_data(t1, v2) + line2.set_data(t2, i2) + + ax1.set_title('{} Event {}\n Ionic Current Trace'.format(filter, i)) + ax1.relim() + ax2.relim() + ax3.relim() + ax4.relim() + ax1.autoscale_view(True, True, True) + ax2.autoscale_view(True, True, True) + #voltage plots + ax3.autoscale_view(True, True, False) + ax4.autoscale_view(True, True, False) + ax3.set_ylim(np.min(v1)-np.mean(v1)*5/10, np.max(v1)+np.mean(v1)*1/10) + if out['graphene']: + ax4.set_ylim([np.min(v2)-np.mean(v2)*2/10, np.max(v2)+np.mean(v2)*5/10]) + fig.tight_layout() + fig.canvas.draw() + pp.savefig(fig) + print('{} out of {} saved!'.format(str(i), NumberOfEvents-1)) + + pp.close() + if NumberOfEvents == 0: + print('Empty PDF deleted: {}'.format(pm.OutputFolder + file + '_' + cat1 + '_vs_' + cat2 + '_' + filter + '_EventPlots.pdf')) + os.remove(pm.OutputFolder + file + '_' + cat1 + '_vs_' + cat2 + '_' + filter + '_EventPlots.pdf') + plt.close() \ No newline at end of file diff --git a/ScatterPlots.py b/ScatterPlots.py new file mode 100644 index 0000000..3ffdaec --- /dev/null +++ b/ScatterPlots.py @@ -0,0 +1,57 @@ +import AnalysisParameters as pm +import numpy as np +import scipy +import scipy.signal as sig +import Functions as f +import os +import matplotlib.pyplot as plt +from matplotlib.backends.backend_pdf import PdfPages +from matplotlib.font_manager import FontProperties +import pandas as pd +import h5py +from tkinter.filedialog import askopenfilenames +pm.init() + +#currents = {'i1','i2'} +currents = {'i1'} + +#filenames = askopenfilenames() # show an "Open" dialog box and return the path to the selected file +filenames = {'/Users/migraf/Desktop/Chan/Data/R43_KCl gradient_100mMcis_1mM trans_4_20180405_182358_OriginalDB.hdf5'} +count = 0 + +dt = {} +dI = {} +count = {} +ind = {} +for k in currents: + dt[k] = np.array([]) + dI[k] = np.array([]) + count[k] = 0 + +for k in currents: + for file in filenames: + print(file) + f = h5py.File(file, 'r') + dt[k] = np.append(dt[k], f['LowPassSegmentation/' + k + '/DwellTime'].value) + dI[k] = np.append(dI[k], f['LowPassSegmentation/' + k + '/FractionalCurrentDrop'].value) + ind[k] = f['LowPassSegmentation/' + k + '/CommonIndex'].value + count[k] += len(dt[k]) + +fig1, ax = plt.subplots(1) +ax.plot(dt['i1'] * 1e3, dI['i1']*100, 'ob', label = 'Ionic Current') +ax.plot(dt['i2'] * 1e3, dI['i2']*100, 'or', label = 'Transverse Current') + +#ax.set_xlim(0, 2) +#ax.set_ylim(10, 40) + +ax.set_xlabel('Time [ms]') +ax.set_ylabel('Current Drop dI/I0 [%]') +ax.set_title('MoS2 transverse detection, {} events'.format(count)) + +fig1.savefig(file[:-4] + 'Scatter.png', dpi=300) +fig1.savefig(file[:-4] + 'Scatter.eps') + +plt.show() + +#fig1.clear() +#fig1.close() \ No newline at end of file diff --git a/ScatterPlotsForSingleChannel.py b/ScatterPlotsForSingleChannel.py new file mode 100644 index 0000000..5f9d7df --- /dev/null +++ b/ScatterPlotsForSingleChannel.py @@ -0,0 +1,187 @@ +import numpy as np +import scipy +import scipy.signal as sig +import Functions as f +import os +import matplotlib.pyplot as plt +import matplotlib +from matplotlib.backends.backend_pdf import PdfPages +from matplotlib.font_manager import FontProperties +from matplotlib.ticker import EngFormatter +import pandas as pd +import h5py +from tkinter.filedialog import askopenfilenames + + +filenames = askopenfilenames() # show an "Open" dialog box and return the path to the selected file +#filenames = {'/Volumes/lben/lben-commun/2018 User Data/Martina/Axonpatch/20180518_13A/Results/13A_1MKCl_events250mV3_OriginalDB.hdf5'} + +Name = '13A Crown in 1M KCl + something, Axopatch' +cm = plt.cm.get_cmap('RdYlBu') + +count = 0 +dt = {} +dIF = {} +count = {} + +dt = np.array([]) +dIF = np.array([]) +dI = np.array([]) +v = np.array([]) +t = np.array([]) +t2 = np.array([]) +count = 0 + +for file in filenames: + print(file) + f = h5py.File(file, 'r') + dt = np.append(dt, f['LowPassSegmentation/' + 'i1' + '/DwellTime'].value) + dIF = np.append(dIF, f['LowPassSegmentation/' + 'i1' + '/FractionalCurrentDrop'].value) + dI = np.append(dI, f['LowPassSegmentation/' + 'i1' + '/DeltaI'].value) + t = np.append(t, f['General/TimeFileWritten'].value*np.ones(len(f['LowPassSegmentation/' + 'i1' + '/DeltaI'].value))) + t2 = np.append(t2, f['General/TimeFileLastModified'].value*np.ones(len(f['LowPassSegmentation/' + 'i1' + '/DeltaI'].value))) + v = np.append(v, f['LowPassSegmentation/' + 'i1' + '/LocalVoltage'].value) + count += len(f['LowPassSegmentation/' + 'i1' + '/DeltaI'].value) + +# Sort the voltages. +availableVoltages = np.unique(v[np.where(v > 0)]) +print(availableVoltages) +data = [] +datadII0 = [] +dataDwell = [] +dataRate = [] +labels = [] +NEvents = [] +for vo in availableVoltages: + ind = np.argwhere(v == vo) + data.append(dI[ind]) + datadII0.append(dIF[ind]) + dataDwell.append(dt[ind]) + NEvents.append(len(ind)) + if f['General/Machine'].value == 'Axopatch': + print('Modified-Written is applied') + if (np.max(t2[ind])-np.min(t[ind])) == 0: + dataRate.append(0) + else: + dataRate.append(len(ind)/(np.max(t2[ind])-np.min(t[ind]))) + else: + if (np.max(t[ind])-np.min(t[ind])) == 0: + dataRate.append(0) + else: + dataRate.append(len(ind)/(np.max(t[ind])-np.min(t[ind]))) + labels.append('{:0.0f}mV'.format(vo*1000)) + + +# Fractional Current Drop Scatter Plot +fig1 = plt.figure(1, figsize=(9, 6)) +ax = fig1.add_subplot(111) +sc = ax.scatter(dt[np.where(v > 0)], dIF[np.where(v > 0)]*100, c=v[np.where(v > 0)], vmin=min(v[np.where(v > 0)]), vmax=max(v[np.where(v > 0)]), s=35, cmap=cm) +cbar = plt.colorbar(sc, ticks=availableVoltages) +cbar.ax.set_yticklabels(labels) # vertically oriented colorbar +ax.xaxis.set_major_formatter(EngFormatter(unit='s')) +ax.set_xlabel('Time') +ax.set_ylabel('Current Drop dI/I0 [%]') +ax.set_title('{}\nScatter Plot, {} events'.format(Name, count)) +fig1.savefig(file[:-4] + 'ScatterFrac.png', dpi=300) +fig1.savefig(file[:-4] + 'ScatterFrac.eps') +ax.set_xlim(0, 50e-3) +ax.set_ylim(0, 30) +fig1.savefig(file[:-4] + 'ScatterFracZoomed.png', dpi=300) +fig1.savefig(file[:-4] + 'ScatterFracZoomed.eps') + +#fig1.clear() +#fig1.close() + +# Fractional Current Drop Scatter Plot +fig6 = plt.figure(6, figsize=(9, 6)) +ax6 = fig6.add_subplot(111) +sc6 = ax6.scatter(dt[np.where(v > 0)], dI[np.where(v > 0)], c=v[np.where(v > 0)], vmin=min(v[np.where(v > 0)]), vmax=max(v[np.where(v > 0)]), s=35, cmap=cm) +cbar6 = plt.colorbar(sc6, ticks=availableVoltages) +cbar6.ax.set_yticklabels(labels) # vertically oriented colorbar +ax6.xaxis.set_major_formatter(EngFormatter(unit='s')) +ax6.yaxis.set_major_formatter(EngFormatter(unit='A')) +ax6.set_xlabel('Time') +ax6.set_ylabel('Current Drop dI') +ax6.set_title('{}\nScatter Plot, {} events'.format(Name, count)) +fig6.savefig(file[:-4] + 'ScatterdI.png', dpi=300) +fig6.savefig(file[:-4] + 'ScatterdI.eps') +ax6.set_xlim(0, 20e-3) +ax6.set_ylim(0, 2.5e-9) +fig6.savefig(file[:-4] + 'ScatterdIZoomed.png', dpi=300) +fig6.savefig(file[:-4] + 'ScatterdIZoomed.eps') + +#fig1.clear() +#fig1.close() + +# BoxPlot Delta I vs Voltages +fig2 = plt.figure(2, figsize=(9, 6)) +ax2 = fig2.add_subplot(111) +bp = ax2.boxplot(data, notch=False, sym='') +ax2.yaxis.set_major_formatter(EngFormatter(unit='A')) +ax2.set_xlabel('Voltage') +ax2.set_ylabel('Current Drop dI') +ax2.set_title('{}\nBoxplot (outliers removed)\n{} events'.format(Name, count)) +ax2.set_xticklabels(labels) +ax2.get_xaxis().tick_bottom() +ax2.get_yaxis().tick_left() +fig2.savefig(file[:-4] + 'BoxplotdI.png', dpi=300) +fig2.savefig(file[:-4] + 'BoxplotdI.eps') + +# BoxPlot DwellTime vs Voltages +fig3 = plt.figure(3, figsize=(9, 6)) +ax3 = fig3.add_subplot(111) +bp = ax3.boxplot(dataDwell, notch=False, sym='') +ax3.yaxis.set_major_formatter(EngFormatter(unit='s')) +ax3.set_xlabel('Voltage') +ax3.set_ylabel('Dwell Time dt') +ax3.set_title('{}\nBoxplot (outliers removed)\n{} events'.format(Name, count)) +ax3.set_xticklabels(labels) +ax3.get_xaxis().tick_bottom() +ax3.get_yaxis().tick_left() +fig3.savefig(file[:-4] + 'BoxplotDwellTime.png', dpi=300) +fig3.savefig(file[:-4] + 'BoxplotDwellTime.eps') + +# BoxPlot Event Rate vs Voltages +fig4 = plt.figure(4, figsize=(9, 6)) +ax4 = fig4.add_subplot(111) +bp = ax4.bar(np.arange(len(dataRate)), dataRate) +ax4.yaxis.set_major_formatter(EngFormatter(unit=r'$\frac{1}{s}$')) +ax4.set_xlabel('Voltage') +ax4.set_ylabel('Event Rate') +ax4.set_title('{}\Barplot\n{} events'.format(Name, count)) +ax4.set_xticks(np.arange(len(dataRate))) +ax4.set_xticklabels(labels) +ax4.get_xaxis().tick_bottom() +ax4.get_yaxis().tick_left() +fig4.savefig(file[:-4] + 'BarplotEventRate.png', dpi=300) +fig4.savefig(file[:-4] + 'BarplotEventRate.eps') + +# Bar Plot Event Number vs Voltages +fig5 = plt.figure(5, figsize=(9, 6)) +ax5 = fig5.add_subplot(111) +bp = ax5.bar(np.arange(len(NEvents)), NEvents) +ax5.yaxis.set_major_formatter(EngFormatter(unit='')) +ax5.set_xlabel('Voltage') +ax5.set_ylabel('Number Of Events') +ax5.set_title('{}\nBarplot\n{} events'.format(Name, count)) +ax5.set_xticks(np.arange(len(NEvents))) +ax5.set_xticklabels(labels) +ax5.get_xaxis().tick_bottom() +ax5.get_yaxis().tick_left() +fig5.savefig(file[:-4] + 'BarplotEventNumber.png', dpi=300) +fig5.savefig(file[:-4] + 'BarplotEventNumber.eps') + +# BoxPlot Delta I vs Voltages +fig7 = plt.figure(7, figsize=(9, 6)) +ax7 = fig7.add_subplot(111) +bp = ax7.boxplot(datadII0, notch=False, sym='') +ax7.set_xlabel('Voltage') +ax7.set_ylabel('Current Drop dI/I0 [%]') +ax7.set_title('{}\nBoxplot (outliers removed)\n{} events'.format(Name, count)) +ax7.set_xticklabels(labels) +ax7.get_xaxis().tick_bottom() +ax7.get_yaxis().tick_left() +fig7.savefig(file[:-4] + 'BoxplotdII0.png', dpi=300) +fig7.savefig(file[:-4] + 'BoxplotdII0.eps') + +plt.show() \ No newline at end of file diff --git a/ScatterPlotsForTwoChannels.py b/ScatterPlotsForTwoChannels.py new file mode 100644 index 0000000..d123501 --- /dev/null +++ b/ScatterPlotsForTwoChannels.py @@ -0,0 +1,77 @@ +import MiscParameters as pm +import numpy as np +import scipy +import scipy.signal as sig +import Functions as f +import os +import matplotlib.pyplot as plt +from matplotlib.backends.backend_pdf import PdfPages +from matplotlib.font_manager import FontProperties +import pandas as pd +import h5py +from tkinter.filedialog import askopenfilenames +from matplotlib import rc +rc('mathtext', default='regular') +pm.init(LoadFiles = 0) + +currents = {'i1', 'i2'} +#filenames = askopenfilenames() # show an "Open" dialog box and return the path to the selected file +filenames = {'/Users/migraf/SWITCHdrive/PhD/BPS/Poster Resources/Data/17B_10mMCis100mMtransKCl_80mer_2_OriginalDB.hdf5'} + +count = 0 +dt = {} +dI = {} +delay = np.array([], dtype=np.int64) +count = {} +ind = {} +fig2, ax3 = plt.subplots() + +for k in currents: + dt[k] = np.array([]) + dI[k] = np.array([]) + count[k] = 0 +for file in filenames: + for k in currents: + print(file) + filen = os.sep + str(os.path.split(file)[1][:-4]) + f = h5py.File(file, 'r') + dt[k] = np.append(dt[k], f['LowPassSegmentation/' + k + '/DwellTime'].value) + dI[k] = np.append(dI[k], f['LowPassSegmentation/' + k + '/FractionalCurrentDrop'].value) + ind[k] = np.int64(f['LowPassSegmentation/' + k + '/' + pm.filter].value) + count[k] += len(dt[k]) + sr = f['General/Samplerate'].value + delay = f['XCorrelation/' + pm.filter + '/Lag'].value + delay = delay[delay < 0.0004] +# ax3.hist() + ax3.hist(delay*1e6, 20, + label=r'Mean: {:03.2f}$\mu$s, std: {:03.2f}$\mu$s'.format(np.mean(delay*1e6), np.std(delay*1e6))) + +## SCATTER PLOTS +fig1, ax = plt.subplots() +ax.plot(dt['i1'][ind['i1']] * 1e3, dI['i1'][ind['i1']]*100, 'ob', label = 'Ionic Current') +ax.set_ylabel(r'Ionic Current Drop $\frac{\Delta I}{I_0}$ [%]', color='b') +ax.tick_params('y', colors='b') +ax.set_xlabel('Time [ms]') +ax.set_title('MoS2 transverse detection, {} events'.format(count)) + +ax2 = ax.twinx() +ax2.plot(dt['i2'][ind['i2']] * 1e3, dI['i2'][ind['i2']]*100, 'or', label = 'Transverse Current') +ax2.set_ylabel(r'Transverse Current Drop $\frac{\Delta I}{I_0}$ [%]', color='r') +ax2.tick_params('y', colors='r') +#ax2.set_xlim(0, 1) + + +## Correlation Histograms +ax3.set_xlabel(r'Time [$\mu$s]') +ax3.set_ylabel('Counts') +ax3.set_title('Correlated events: {}, Histogram of delay'.format(len(delay))) +ax3.legend() + +fig1.savefig(pm.OutputFolder + filen + 'Scatter.png', dpi=300) +fig1.savefig(pm.OutputFolder + filen + 'Scatter.eps') +fig2.savefig(pm.OutputFolder + filen + 'DelayHist.png', dpi=300) +fig2.savefig(pm.OutputFolder + filen + 'DelayHist.eps') + +plt.show() +#fig1.clear() +#fig1.close() \ No newline at end of file diff --git a/SingleChannelAnalysis.py b/SingleChannelAnalysis.py new file mode 100644 index 0000000..5ca2879 --- /dev/null +++ b/SingleChannelAnalysis.py @@ -0,0 +1,95 @@ +#Input Stuff +import MiscParameters as pm +import numpy as np +import Functions as f +import os +import matplotlib.pyplot as plt +from tkinter import Tk +from tkinter.filedialog import askopenfilenames +from tkinter.filedialog import askopenfilename +from matplotlib.font_manager import FontProperties +fontP = FontProperties() +fontP.set_size('small') +pm.init(LoadFiles = 0) +root = Tk() +root.withdraw() +os.system('''/usr/bin/osascript -e 'tell app "Finder" to set frontmost of process "python" to true' ''') + +# Load File, empty string prompts a pop.up window for file selection. Else a file-path can be given + +root.update() +#filenames={'/Volumes/lben/lben-commun/2018 User Data/Martina/Axonpatch/20180518_13A/13A_1MKCl_events250mV3.dat'} +filenames = askopenfilenames() +root.destroy() + +#filenames=['/Volumes/lben/lben-commun/2018 User Data/Mike/mike 27 03 2018/Events 3/Events 3_20180327_195201.log'] + +for filename in filenames: + print(filename) + inp = f.OpenFile(filename, ChimeraLowPass=pm.ChimeraLowPass) + folder = pm.OutputFolder + file = os.sep + str(os.path.split(filename)[1][:-4]) + print('Number of samples in file: {}'.format(len(inp['i1']))) + if not os.path.exists(folder): + os.makedirs(folder) + + #Low Pass Event Detection + AnalysisResults = {} + + chan = 'i1' + AnalysisResults[chan] = {} + AnalysisResults[chan]['RoughEventLocations'] = f.RecursiveLowPassFast(inp[chan], pm.coefficients[chan], inp['samplerate']) + print('Found {} events'.format(len(AnalysisResults[chan]['RoughEventLocations']))) + + ##Event time limit -> deletes events that are too long + ind = np.array([]) + for (k,i) in enumerate(AnalysisResults[chan]['RoughEventLocations'][:]): + if i[1] - i[0] <= pm.minmalEventLengthPoints: + ind = np.append(ind, k) + AnalysisResults[chan]['RoughEventLocations'] = np.delete( + AnalysisResults[chan]['RoughEventLocations'][:], ind, axis=0) + + if pm.UpwardsOn: # Upwards detection can be turned on or off + AnalysisResults[chan + '_Up'] = {} + AnalysisResults[chan + '_Up']['RoughEventLocations'] = f.RecursiveLowPassFastUp(inp[chan], pm.coefficients[chan], inp['samplerate']) + ind = np.array([]) + for (k, i) in enumerate(AnalysisResults[chan + '_Up']['RoughEventLocations'][:]): + if i[1] - i[0] <= pm.minmalEventLengthPoints: + ind = np.append(ind, k) + AnalysisResults[chan + '_Up']['RoughEventLocations'] = np.delete( + AnalysisResults[chan + '_Up']['RoughEventLocations'][:], ind, axis=0) + + print('Deleted All Events shorter than {:0.2e}s'.format(pm.minmalEventLengthPoints*inp['samplerate'])) + + + ############Plot the Lowpass Detections + if pm.PlotTheLowPassDetection: + fig = plt.figure(1, figsize=(16,5) ) + ax = fig.add_subplot(111) + + ax.plot(np.arange(0, len(inp['i1']), 1)/inp['samplerate'], inp['i1'], 'b') + + for i in AnalysisResults['i1']['RoughEventLocations']: + ax.plot(np.arange(np.uint64(i[0]), np.uint64(i[1]), 1)/inp['samplerate'], inp['i1'][np.uint64(i[0]):np.uint64(i[1])], 'r') + + if pm.UpwardsOn: + for i in AnalysisResults['i1_Up']['RoughEventLocations']: + ax.plot(y=inp['i1'][np.uint64(i[0]):np.uint64(i[1])], x=np.arange(np.uint64(i[0]), np.uint64(i[1]), 1)/inp['samplerate'], pen='r') + + ax.set + ax.set_ylabel("Current [A]") + ax.set_xlabel("Time [s]") + + + + # Refine the Rough Event Detection done by the LP filter and Add event infos + if inp['graphene']: + AnalysisResults = f.RefinedEventDetection(inp, AnalysisResults, signals=['i1', 'i2'], limit=pm.MinimalFittingLimit*inp['samplerate']) + else: + AnalysisResults = f.RefinedEventDetection(inp, AnalysisResults, signals=['i1'], limit=pm.MinimalFittingLimit*inp['samplerate']) + + ## Print Lenghts + f.SaveToHDF5(inp, AnalysisResults, pm.coefficients, folder) + + if pm.PlotTheLowPassDetection: + plt.show() \ No newline at end of file diff --git a/UsefulFunctions.py b/UsefulFunctions.py new file mode 100644 index 0000000..cedff4e --- /dev/null +++ b/UsefulFunctions.py @@ -0,0 +1,1498 @@ +import matplotlib +matplotlib.use('Qt5Agg') +import numpy as np +import scipy +import scipy.signal as sig +import os +import pickle as pkl +from scipy import io +from scipy import signal +from PyQt5 import QtGui, QtWidgets +from numpy import linalg as lin +import pyqtgraph as pg +import matplotlib.pyplot as plt +from matplotlib.backends.backend_pdf import PdfPages +import pandas as pd +import h5py +from timeit import default_timer as timer +import platform +from scipy.optimize import curve_fit +import AnalysisParameters as pm + +def Reshape1DTo2D(inputarray, buffersize): + npieces = np.uint16(len(inputarray)/buffersize) + voltages = np.array([], dtype=np.float64) + currents = np.array([], dtype=np.float64) + #print(npieces) + + for i in range(1, npieces+1): + if i % 2 == 1: + currents = np.append(currents, inputarray[(i-1)*buffersize:i*buffersize-1], axis=0) + #print('Length Currents: {}'.format(len(currents))) + else: + voltages = np.append(voltages, inputarray[(i-1)*buffersize:i*buffersize-1], axis=0) + #print('Length Voltages: {}'.format(len(voltages))) + + v1 = np.ones((len(voltages)), dtype=np.float64) + i1 = np.ones((len(currents)), dtype=np.float64) + v1[:]=voltages + i1[:]=currents + + out = {'v1': v1, 'i1': i1} + print('Currents:' + str(v1.shape)) + print('Voltages:' + str(i1.shape)) + return out + +def CalculatePoreSize(G, L, s): + return (G+np.sqrt(G*(G+16*L*s/np.pi)))/(2*s) + +def ImportAxopatchData(datafilename): + x=np.fromfile(datafilename, np.dtype('>f4')) + f=open(datafilename, 'rb') + graphene=0 + for i in range(0, 10): + a=str(f.readline()) + #print(a) + if 'Acquisition' in a or 'Sample Rate' in a: + samplerate=int(''.join(i for i in a if i.isdigit()))/1000 + if 'FEMTO preamp Bandwidth' in a: + femtoLP=int(''.join(i for i in a if i.isdigit())) + if 'I_Graphene' in a: + graphene=1 + print('This File Has a Graphene Channel!') + end = len(x) + if graphene: + #pore current + i1 = x[250:end-3:4] + #graphene current + i2 = x[251:end-2:4] + #pore voltage + v1 = x[252:end-1:4] + #graphene voltage + v2 = x[253:end:4] + print('The femto was set to : {} Hz, if this value was correctly entered in the LabView!'.format(str(femtoLP))) + output={'FemtoLowPass': femtoLP, 'type': 'Axopatch', 'graphene': 1, 'samplerate': samplerate, 'i1': i1, 'v1': v1, 'i2': i2, 'v2': v2, 'filename': datafilename} + else: + i1 = np.array(x[250:end-1:2]) + v1 = np.array(x[251:end:2]) + output={'type': 'Axopatch', 'graphene': 0, 'samplerate': samplerate, 'i1': i1, 'v1': v1, 'filename': datafilename} + return output + +def ImportChimeraRaw(datafilename, LPfiltercutoff=0): + matfile=io.loadmat(str(os.path.splitext(datafilename)[0])) + #buffersize=matfile['DisplayBuffer'] + data = np.fromfile(datafilename, np.dtype('= coeff['eventlengthLimit'] or i > (Ni - 10): + NumberOfEvents -= 1 + else: + k = start + while signal[k] < Mm and k > 1: + k -= 1 + start = k - 1 + k2 = i + 1 + while signal[k2] > Mm: + k2 -= 1 + endp = k2 + 1 + + RoughEventLocations.append((start,endp,Mm, Vv)) + ml[i] = Mm + vl[i] = Vv + return np.array(RoughEventLocations) + +def RecursiveLowPassFast(signal, coeff): + ml = scipy.signal.lfilter([1 - coeff['a'], 0], [1, -coeff['a']], signal) + vl = scipy.signal.lfilter([1 - coeff['a'], 0], [1, -coeff['a']], np.square(signal - ml)) + sl = ml - coeff['S'] * np.sqrt(vl) + Ni = len(signal) + points = np.array(np.where(signal<=sl)[0]) + to_pop=np.array([]) + for i in range(1,len(points)): + if points[i] - points[i - 1] == 1: + to_pop=np.append(to_pop, i) + points = np.delete(points, to_pop) + RoughEventLocations = [] + NumberOfEvents=0 + + for i in points: + if NumberOfEvents is not 0: + if i >= RoughEventLocations[NumberOfEvents-1][0] and i <= RoughEventLocations[NumberOfEvents-1][1]: + continue + NumberOfEvents += 1 + start = i + El = ml[i] - coeff['E'] * np.sqrt(vl[i]) + Mm = ml[i] + Vv = vl[i] + duration = 0 + while signal[i + 1] < El and i < (Ni - 2) and duration < coeff['eventlengthLimit']: + duration += 1 + i += 1 + if duration >= coeff['eventlengthLimit'] or i > (Ni - 10): + NumberOfEvents -= 1 + else: + k = start + while signal[k] < Mm and k > 1: + k -= 1 + start = k - 1 + k2 = i + 1 + while signal[k2] > Mm: + k2 -= 1 + endp = k2 + if start<0: + start=0 + RoughEventLocations.append((start, endp, ml[start], vl[start])) + + return np.array(RoughEventLocations) + +def RecursiveLowPassFastUp(signal, coeff): + ml = scipy.signal.lfilter([1 - coeff['a'], 0], [1, -coeff['a']], signal) + vl = scipy.signal.lfilter([1 - coeff['a'], 0], [1, -coeff['a']], np.square(signal - ml)) + sl = ml + coeff['S'] * np.sqrt(vl) + Ni = len(signal) + points = np.array(np.where(signal>=sl)[0]) + to_pop=np.array([]) + for i in range(1,len(points)): + if points[i] - points[i - 1] == 1: + to_pop=np.append(to_pop, i) + points = np.delete(points, to_pop) + + points =np.delete(points, np.array(np.where(points == 0)[0])) + + RoughEventLocations = [] + NumberOfEvents=0 + for i in points: + if NumberOfEvents is not 0: + if i >= RoughEventLocations[NumberOfEvents-1][0] and i <= RoughEventLocations[NumberOfEvents-1][1]: + continue + NumberOfEvents += 1 + start = i + El = ml[i] + coeff['E'] * np.sqrt(vl[i]) + Mm = ml[i] + duration = 0 + while signal[i + 1] > El and i < (Ni - 2) and duration < coeff['eventlengthLimit']: + duration += 1 + i += 1 + if duration >= coeff['eventlengthLimit'] or i > (Ni - 10): + NumberOfEvents -= 1 + else: + k = start + while signal[k] > Mm and k > 2: + k -= 1 + start = k - 1 + k2 = i + 1 + while signal[k2] > Mm: + k2 -= 1 + endp = k2 + RoughEventLocations.append((start, endp, ml[start], vl[start])) + + return np.array(RoughEventLocations) + +def AddInfoAfterRecursive(self): + startpoints = np.uint64(self.AnalysisResults[self.sig]['RoughEventLocations'][:, 0]) + endpoints = np.uint64(self.AnalysisResults[self.sig]['RoughEventLocations'][:, 1]) + localBaseline = self.AnalysisResults[self.sig]['RoughEventLocations'][:, 2] + localVariance = self.AnalysisResults[self.sig]['RoughEventLocations'][:, 3] + + CusumBaseline=500 + numberofevents = len(startpoints) + self.AnalysisResults[self.sig]['StartPoints'] = startpoints + self.AnalysisResults[self.sig]['EndPoints'] = endpoints + self.AnalysisResults[self.sig]['LocalBaseline'] = localBaseline + self.AnalysisResults[self.sig]['LocalVariance'] = localVariance + self.AnalysisResults[self.sig]['NumberOfEvents'] = len(startpoints) + + #### Now we want to move the endpoints to be the last minimum for each #### + #### event so we find all minimas for each event, and set endpoint to last #### + + deli = np.zeros(numberofevents) + dwell = np.zeros(numberofevents) + limit=500e-6*self.out['samplerate'] + AllFits={} + + for i in range(numberofevents): + length = endpoints[i] - startpoints[i] + if length <= limit and length>3: + # Impulsion Fit to minimal value + deli[i] = localBaseline[i] - np.min(self.out[self.sig][startpoints[i]+1:endpoints[i]-1]) + dwell[i] = (endpoints[i] - startpoints[i]) / self.out['samplerate'] + elif length > limit: + deli[i] = localBaseline[i] - np.mean(self.out[self.sig][startpoints[i]+5:endpoints[i]-5]) + dwell[i] = (endpoints[i] - startpoints[i]) / self.out['samplerate'] + # # Cusum Fit + # sigma = np.sqrt(localVariance[i]) + # delta = 2e-9 + # h = 1 * delta / sigma + # (mc, kd, krmv) = CUSUM(self.out[self.sig][startpoints[i]-CusumBaseline:endpoints[i]+CusumBaseline], delta, h) + # zeroPoint = startpoints[i]-CusumBaseline + # krmv = krmv+zeroPoint+1 + # AllFits['Event' + str(i)] = {} + # AllFits['Event' + str(i)]['mc'] = mc + # AllFits['Event' + str(i)]['krmv'] = krmv + else: + deli[i] = localBaseline[i] - np.min(self.out[self.sig][startpoints[i]:endpoints[i]]) + dwell[i] = (endpoints[i] - startpoints[i]) / self.out['samplerate'] + + frac = deli / localBaseline + dt = np.array(0) + dt = np.append(dt, np.diff(startpoints) / self.out['samplerate']) + numberofevents = len(dt) + + #self.AnalysisResults[self.sig]['CusumFits'] = AllFits + AnalysisResults[self.sig]['FractionalCurrentDrop'] = frac + AnalysisResults[self.sig]['DeltaI'] = deli + AnalysisResults[self.sig]['DwellTime'] = dwell + self.AnalysisResults[self.sig]['Frequency'] = dt + +def SavingAndPlottingAfterRecursive(self): + + startpoints=self.AnalysisResults[self.sig]['StartPoints'] + endpoints=self.AnalysisResults[self.sig]['EndPoints'] + numberofevents=self.AnalysisResults[self.sig]['NumberOfEvents'] + deli=self.AnalysisResults[self.sig]['DeltaI'] + dwell=self.AnalysisResults[self.sig]['DwellTime'] + frac=self.AnalysisResults[self.sig]['FractionalCurrentDrop'] + dt=self.AnalysisResults[self.sig]['Frequency'] + localBaseline=self.AnalysisResults[self.sig]['LocalBaseline'] + + if not self.ui.actionDon_t_Plot_if_slow.isChecked(): + self.p1.clear() + # Event detection plot, Main Window + self.p1.plot(self.t, self.out[self.sig], pen='b') + self.p1.plot(self.t[startpoints], self.out[self.sig][startpoints], pen=None, symbol='o', symbolBrush='g', symbolSize=10) + self.p1.plot(self.t[endpoints], self.out[self.sig][endpoints], pen=None, symbol='o', symbolBrush='r', symbolSize=10) + #self.p1.plot(self.t[startpoints-10], localBaseline, pen=None, symbol='x', symbolBrush='y', symbolSize=10) + + try: + self.p2.data = self.p2.data[np.where(np.array(self.sdf.fn) != self.matfilename)] + except: + IndexError + self.sdf = self.sdf[self.sdf.fn != self.matfilename] + + fn = pd.Series([self.matfilename, ] * numberofevents) + color = pd.Series([self.cb.color(), ] * numberofevents) + + self.sdf = self.sdf.append(pd.DataFrame({'fn': fn, 'color': color, 'deli': deli, + 'frac': frac, 'dwell': dwell, + 'dt': dt, 'startpoints': startpoints, + 'endpoints': endpoints, 'baseline': localBaseline}), ignore_index=True) + + self.p2.addPoints(x=np.log10(dwell), y=frac, + symbol='o', brush=(self.cb.color()), pen=None, size=10) + + self.w1.addItem(self.p2) + self.w1.setLogMode(x=True, y=False) + self.p1.autoRange() + self.w1.autoRange() + self.ui.scatterplot.update() + self.w1.setRange(yRange=[0, 1]) + + colors = self.sdf.color + for i, x in enumerate(colors): + fracy, fracx = np.histogram(self.sdf.frac[self.sdf.color == x], + bins=np.linspace(0, 1, int(self.ui.fracbins.text()))) + deliy, delix = np.histogram(self.sdf.deli[self.sdf.color == x], + bins=np.linspace(float(self.ui.delirange0.text()) * 10 ** -9, + float(self.ui.delirange1.text()) * 10 ** -9, + int(self.ui.delibins.text()))) + bins_dwell=np.linspace(float(self.ui.dwellrange0.text()), float(self.ui.dwellrange1.text()), int(self.ui.dwellbins.text())) + dwelly, dwellx = np.histogram(np.log10(self.sdf.dwell[self.sdf.color == x]), + bins=bins_dwell,range=(bins_dwell.min(),bins_dwell.max())) + dty, dtx = np.histogram(self.sdf.dt[self.sdf.color == x], + bins=np.linspace(float(self.ui.dtrange0.text()), float(self.ui.dtrange1.text()), + int(self.ui.dtbins.text()))) + + # hist = pg.PlotCurveItem(fracy, fracx , stepMode = True, fillLevel=0, brush = x, pen = 'k') + # self.w2.addItem(hist) + + hist = pg.BarGraphItem(height=fracy, x0=fracx[:-1], x1=fracx[1:], brush=x) + self.w2.addItem(hist) + + # hist = pg.PlotCurveItem(delix, deliy , stepMode = True, fillLevel=0, brush = x, pen = 'k') + # self.w3.addItem(hist) + + hist = pg.BarGraphItem(height=deliy, x0=delix[:-1], x1=delix[1:], brush=x) + self.w3.addItem(hist) + # self.w3.autoRange() + self.w3.setRange( + xRange=[float(self.ui.delirange0.text()) * 10 ** -9, float(self.ui.delirange1.text()) * 10 ** -9]) + + # hist = pg.PlotCurveItem(dwellx, dwelly , stepMode = True, fillLevel=0, brush = x, pen = 'k') + # self.w4.addItem(hist) + + hist = pg.BarGraphItem(height=dwelly, x0=dwellx[:-1], x1=dwellx[1:], brush=x) + self.w4.addItem(hist) + + # hist = pg.PlotCurveItem(dtx, dty , stepMode = True, fillLevel=0, brush = x, pen = 'k') + # self.w5.addItem(hist) + + hist = pg.BarGraphItem(height=dty, x0=dtx[:-1], x1=dtx[1:], brush=x) + self.w5.addItem(hist) + +def save(self): + np.savetxt(self.matfilename + 'DB.txt', np.column_stack((self.deli, self.frac, self.dwell, self.dt)), + delimiter='\t') + +def PlotEventSingle(self, clicked=[]): + f = h5py.File(self.matfilename + '_OriginalDB.hdf5', "r") + sig='i1' + + startpoints=self.AnalysisResults[sig]['StartPoints'] + endpoints=self.AnalysisResults[sig]['EndPoints'] + localBaseline=self.AnalysisResults[sig]['LocalBaseline'] + + # Reset plot + self.p3.setLabel('bottom', text='Time', units='s') + self.p3.setLabel('left', text='Current', units='A') + self.p3.clear() + eventnumber = np.int(self.ui.eventnumberentry.text()) + eventbuffer = np.int(self.ui.eventbufferentry.value()) + + # plot event trace + self.p3.plot(self.t[startpoints[eventnumber] - eventbuffer:endpoints[eventnumber] + eventbuffer], + self.out[sig][startpoints[eventnumber] - eventbuffer:endpoints[eventnumber] + eventbuffer], + pen='b') + + # plot event fit + self.p3.plot(self.t[startpoints[eventnumber] - eventbuffer:endpoints[eventnumber] + eventbuffer], np.concatenate(( + np.repeat(np.array([localBaseline[eventnumber]]), eventbuffer), + np.repeat(np.array([localBaseline[eventnumber] - self.AnalysisResults['i1']['DeltaI'][eventnumber + ]]), endpoints[eventnumber] - startpoints[eventnumber]), + np.repeat(np.array([localBaseline[eventnumber]]), eventbuffer)), 0), + pen=pg.mkPen(color=(173, 27, 183), width=3)) + + self.p3.autoRange() + +def PlotEventDouble(self, clicked=[]): + f = h5py.File(self.matfilename + '_OriginalDB.hdf5', "r") + + if self.ui.actionPlot_i1_detected_only.isChecked(): + indexes = f['LowPassSegmentation/i1/OnlyIndex'] + i = f['LowPassSegmentation/i1/'] + sig = 'i1' + sig2 = 'i2' + leftlabel = "Ionic Current" + rightlabel = "Transverse Current" + if self.ui.actionPlot_i2_detected_only.isChecked(): + indexes = f['LowPassSegmentation/i2/OnlyIndex'] + i = f['LowPassSegmentation/i2/'] + sig = 'i2' + sig2 = 'i1' + rightlabel = "Ionic Current" + leftlabel = "Transverse Current" + self.p3.clear() + self.transverseAxisEvent.clear() + + p1 = self.p3.plotItem + p1.getAxis('left').setLabel(text=leftlabel, color='#0000FF', units='A') + ## create a new ViewBox, link the right axis to its coordinate system + p1.showAxis('right') + p1.scene().addItem(self.transverseAxisEvent) + p1.getAxis('right').linkToView(self.transverseAxisEvent) + self.transverseAxisEvent.setXLink(p1) + self.transverseAxisEvent.show() + p1.getAxis('right').setLabel(text=rightlabel, color='#FF0000', units='A') + + def updateViews(): + ## view has resized; update auxiliary views to match + self.transverseAxisEvent.setGeometry(p1.vb.sceneBoundingRect()) + self.transverseAxisEvent.linkedViewChanged(p1.vb, self.transverseAxisEvent.XAxis) + + updateViews() + p1.vb.sigResized.connect(updateViews) + + # Correct for user error if non-extistent number is entered + eventbuffer = np.int(self.ui.eventbufferentry.value()) + maxEvents=self.NumberOfEvents + + eventnumber = np.int(self.ui.eventnumberentry.text()) + if eventnumber >= maxEvents: + eventnumber=0 + self.ui.eventnumberentry.setText(str(eventnumber)) + elif eventnumber < 0: + eventnumber=maxEvents + self.ui.eventnumberentry.setText(str(eventnumber)) + + # plot event trace + parttoplot=np.arange(i['StartPoints'][indexes[eventnumber]] - eventbuffer, i['EndPoints'][indexes[eventnumber]] + eventbuffer,1, dtype=np.uint64) + + p1.plot(self.t[parttoplot], self.out[sig][parttoplot], pen='b') + + # plot event fit + p1.plot(self.t[parttoplot], + np.concatenate(( + np.repeat(np.array([i['LocalBaseline'][indexes[eventnumber]]]), eventbuffer), + np.repeat(np.array([i['LocalBaseline'][indexes[eventnumber]] - i['DeltaI'][indexes[eventnumber] + ]]), i['EndPoints'][indexes[eventnumber]] - i['StartPoints'][indexes[eventnumber]]), + np.repeat(np.array([i['LocalBaseline'][indexes[eventnumber]]]), eventbuffer)), 0), + pen=pg.mkPen(color=(173, 27, 183), width=3)) + + # plot 2nd Channel + if self.Derivative == 'i2': + self.transverseAxisEvent.addItem(pg.PlotCurveItem(self.t[parttoplot][:-1], np.diff(self.out[sig][parttoplot]), pen='r')) + p1.getAxis('right').setLabel(text='Derivative of i2', color='#FF0000', units='A') + print('In if...') + #plt.plot(t[:-1], np.diff(i1part), 'y') + else: + self.transverseAxisEvent.addItem(pg.PlotCurveItem(self.t[parttoplot], self.out[sig2][parttoplot], pen='r')) + + min1 = np.min(self.out[sig][parttoplot]) + max1 = np.max(self.out[sig][parttoplot]) + self.p3.setYRange(min1-(max1-min1), max1) + self.p3.enableAutoRange(axis='x') + min2 = np.min(self.out[sig2][parttoplot]) + max2 = np.max(self.out[sig2][parttoplot]) + self.transverseAxisEvent.setYRange(min2, max2+(max2-min2)) + + # Mark event start and end points + p1.plot([self.t[i['StartPoints'][indexes[eventnumber]]], self.t[i['StartPoints'][indexes[eventnumber]]]], + [self.out[sig][i['StartPoints'][indexes[eventnumber]]], self.out[sig][i['StartPoints'][indexes[eventnumber]]]], + pen=None, + symbol='o', symbolBrush='g', symbolSize=12) + p1.plot([self.t[i['EndPoints'][indexes[eventnumber]]], self.t[i['EndPoints'][indexes[eventnumber]]]], + [self.out[sig][i['EndPoints'][indexes[eventnumber]]], self.out[sig][i['EndPoints'][indexes[eventnumber]]]], + pen=None, + symbol='o', symbolBrush='r', symbolSize=12) + + dtime=pg.siFormat(i['DwellTime'][indexes[eventnumber]], precision=5, suffix='s', space=True, error=None, minVal=1e-25, allowUnicode=True) + dI=pg.siFormat(i['DwellTime'][indexes[eventnumber]], precision=5, suffix='A', space=True, error=None, minVal=1e-25, allowUnicode=True) + self.ui.eventinfolabel.setText(leftlabel + ': Dwell Time=' + dtime + ', Deli=' + dI) + +def PlotEventDoubleFit(self, clicked=[]): + f = h5py.File(self.matfilename + '_OriginalDB.hdf5', "r") + i1_indexes=f['LowPassSegmentation/i1/CommonIndex'] + i2_indexes=f['LowPassSegmentation/i2/CommonIndex'] + i1=f['LowPassSegmentation/i1/'] + i2=f['LowPassSegmentation/i2/'] + + self.p3.clear() + self.transverseAxisEvent.clear() + + leftlabel="Ionic Current" + rightlabel="Transverse Current" + + p1 = self.p3.plotItem + p1.getAxis('left').setLabel(text=leftlabel, color='#0000FF', units='A') + ## create a new ViewBox, link the right axis to its coordinate system + p1.showAxis('right') + p1.scene().addItem(self.transverseAxisEvent) + p1.getAxis('right').linkToView(self.transverseAxisEvent) + self.transverseAxisEvent.setXLink(p1) + self.transverseAxisEvent.show() + p1.getAxis('right').setLabel(text=rightlabel, color='#FF0000', units='A') + + def updateViews(): + ## view has resized; update auxiliary views to match + self.transverseAxisEvent.setGeometry(p1.vb.sceneBoundingRect()) + self.transverseAxisEvent.linkedViewChanged(p1.vb, self.transverseAxisEvent.XAxis) + + updateViews() + p1.vb.sigResized.connect(updateViews) + + # Correct for user error if non-extistent number is entered + eventbuffer = np.int(self.ui.eventbufferentry.value()) + maxEvents=len(i1_indexes) + + eventnumber = np.int(self.ui.eventnumberentry.text()) + if eventnumber >= maxEvents: + eventnumber=0 + self.ui.eventnumberentry.setText(str(eventnumber)) + elif eventnumber < 0: + eventnumber=maxEvents + self.ui.eventnumberentry.setText(str(eventnumber)) + + # plot event trace + parttoplot=np.arange(i1['StartPoints'][i1_indexes[eventnumber]] - eventbuffer, i1['EndPoints'][i1_indexes[eventnumber]] + eventbuffer,1, dtype=np.uint64) + parttoplot2=np.arange(i2['StartPoints'][i2_indexes[eventnumber]] - eventbuffer, i2['EndPoints'][i2_indexes[eventnumber]] + eventbuffer,1, dtype=np.uint64) + + p1.plot(self.t[parttoplot], self.out['i1'][parttoplot], pen='b') + + # plot event fit + p1.plot(self.t[parttoplot], + np.concatenate(( + np.repeat(np.array([i1['LocalBaseline'][i1_indexes[eventnumber]]]), eventbuffer), + np.repeat(np.array([i1['LocalBaseline'][i1_indexes[eventnumber]] - i1['DeltaI'][i1_indexes[eventnumber] + ]]), i1['EndPoints'][i1_indexes[eventnumber]] - i1['StartPoints'][i1_indexes[eventnumber]]), + np.repeat(np.array([i1['LocalBaseline'][i1_indexes[eventnumber]]]), eventbuffer)), 0), + pen=pg.mkPen(color=(173, 27, 183), width=3)) + + # plot 2nd Channel + self.transverseAxisEvent.addItem(pg.PlotCurveItem(self.t[parttoplot2], self.out['i2'][parttoplot2], pen='r')) + self.transverseAxisEvent.addItem(pg.PlotCurveItem(self.t[parttoplot2], np.concatenate((np.repeat( + np.array([i2['LocalBaseline'][i2_indexes[eventnumber]]]), eventbuffer), np.repeat( + np.array([i2['LocalBaseline'][i2_indexes[eventnumber]] - i2['DeltaI'][i2_indexes[eventnumber]]]), + i2['EndPoints'][i2_indexes[eventnumber]] - i2['StartPoints'][i2_indexes[eventnumber]]), np.repeat( + np.array([i2['LocalBaseline'][i2_indexes[eventnumber]]]), eventbuffer)), 0), pen=pg.mkPen(color=(173, 27, 183), width=3))) + + min1 = np.min(self.out['i1'][parttoplot]) + max1 = np.max(self.out['i1'][parttoplot]) + self.p3.setYRange(min1-(max1-min1), max1) + self.p3.enableAutoRange(axis='x') + min2 = np.min(self.out['i2'][parttoplot2]) + max2 = np.max(self.out['i2'][parttoplot2]) + self.transverseAxisEvent.setYRange(min2, max2+(max2-min2)) + + # Mark event start and end points + p1.plot([self.t[i1['StartPoints'][i1_indexes[eventnumber]]], self.t[i1['StartPoints'][i1_indexes[eventnumber]]]], + [self.out['i1'][i1['StartPoints'][i1_indexes[eventnumber]]], self.out['i1'][i1['StartPoints'][i1_indexes[eventnumber]]]], + pen=None, + symbol='o', symbolBrush='g', symbolSize=12) + p1.plot([self.t[i1['EndPoints'][i1_indexes[eventnumber]]], self.t[i1['EndPoints'][i1_indexes[eventnumber]]]], + [self.out['i1'][i1['EndPoints'][i1_indexes[eventnumber]]], self.out['i1'][i1['EndPoints'][i1_indexes[eventnumber]]]], + pen=None, + symbol='o', symbolBrush='r', symbolSize=12) + + self.transverseAxisEvent.addItem(pg.PlotCurveItem([self.t[i2['StartPoints'][i2_indexes[eventnumber]]], self.t[i2['StartPoints'][i2_indexes[eventnumber]]]], + [self.out['i2'][i2['StartPoints'][i2_indexes[eventnumber]]], self.out['i2'][i1['StartPoints'][i2_indexes[eventnumber]]]], + pen=None, + symbol='o', symbolBrush='g', symbolSize=12)) + self.transverseAxisEvent.addItem(pg.PlotCurveItem([self.t[i2['EndPoints'][i2_indexes[eventnumber]]], self.t[i2['EndPoints'][i2_indexes[eventnumber]]]], + [self.out['i2'][i1['EndPoints'][i2_indexes[eventnumber]]], self.out['i2'][i1['EndPoints'][i2_indexes[eventnumber]]]], + pen=None, + symbol='o', symbolBrush='r', symbolSize=12)) + + dtime=pg.siFormat(i1['DwellTime'][i1_indexes[eventnumber]], precision=5, suffix='s', space=True, error=None, minVal=1e-25, allowUnicode=True) + dI=pg.siFormat(i1['DwellTime'][i1_indexes[eventnumber]], precision=5, suffix='A', space=True, error=None, minVal=1e-25, allowUnicode=True) + dtime2=pg.siFormat(i2['DwellTime'][i2_indexes[eventnumber]], precision=5, suffix='s', space=True, error=None, minVal=1e-25, allowUnicode=True) + dI2=pg.siFormat(i2['DwellTime'][i2_indexes[eventnumber]], precision=5, suffix='A', space=True, error=None, minVal=1e-25, allowUnicode=True) + + self.ui.eventinfolabel.setText('Ionic Dwell Time=' + dtime + ', Ionic Deli=' + dI + ', ' 'Trans Dwell Time=' + dtime2 + ', Trans Deli=' + dI2) + +def SaveEventPlotMatplot(self): + eventbuffer = np.int(self.ui.eventbufferentry.value()) + eventnumber = np.int(self.ui.eventnumberentry.text()) + + parttoplot = np.arange(i['StartPoints'][indexes[eventnumber]][eventnumber] - eventbuffer, i['EndPoints'][indexes[eventnumber]][eventnumber] + eventbuffer, 1, + dtype=np.uint64) + + t=np.arange(0,len(parttoplot)) + t=t/self.out['samplerate']*1e3 + + fig1=plt.figure(1, figsize=(20,7)) + plt.subplot(2, 1, 1) + plt.cla + plt.plot(t, self.out['i1'][parttoplot]*1e9, 'b') + plt.ylabel('Ionic Current [nA]') + ax = plt.gca() + ax.set_xticklabels([]) + + plt.subplot(2, 1, 2) + plt.cla + plt.plot(t, self.out['i2'][parttoplot]*1e9, 'r') + plt.ylabel('Transverse Current [nA]') + plt.xlabel('time (ms)') + self.pp.savefig() + fig1.clear() + +def CombineTheTwoChannels(file): + f = h5py.File(file, 'a') + i1 = f['LowPassSegmentation/i1/'] + i2 = f['LowPassSegmentation/i2/'] + i1StartP = i1['StartPoints'][:] + i2StartP = i2['StartPoints'][:] + + # Common Events + # Take Longer + CommonEventsi1Index = np.array([], dtype=np.uint64) + CommonEventsi2Index = np.array([], dtype=np.uint64) + DelayLimit = 10e-3*f['General/Samplerate/'].value + + for k in i1StartP: + val = i2StartP[(i2StartP > k - DelayLimit) & (i2StartP < k + DelayLimit)] + if len(val)==1: + CommonEventsi2Index = np.append(CommonEventsi2Index, np.where(i2StartP == val)[0]) + CommonEventsi1Index = np.append(CommonEventsi1Index, np.where(i1StartP == k)[0]) + if len(val) > 1: + diff=np.absolute(val-k) + minIndex=np.where(diff == np.min(diff)) + CommonEventsi2Index = np.append(CommonEventsi2Index, np.where(i2StartP == val[minIndex])[0]) + CommonEventsi1Index = np.append(CommonEventsi1Index, np.where(i1StartP == k)[0]) + + + # for i in range(len(i1StartP)): + # for j in range(len(i2StartP)): + # if np.absolute(i1StartP[i] - i2StartP[j]) < DelayLimit: + # CommonEventsi1Index = np.append(CommonEventsi1Index, i) + # CommonEventsi2Index = np.append(CommonEventsi2Index, j) + + # Only i1 + Onlyi1Indexes = np.delete(range(len(i1StartP)), CommonEventsi1Index) + # Only i2 + Onlyi2Indexes = np.delete(range(len(i2StartP)), CommonEventsi2Index) + + e = "CommonIndex" in i1 + if e: + del i1['CommonIndex'] + i1.create_dataset('CommonIndex', data=CommonEventsi1Index) + del i2['CommonIndex'] + i2.create_dataset('CommonIndex', data=CommonEventsi2Index) + del i1['OnlyIndex'] + i1.create_dataset('OnlyIndex', data=Onlyi1Indexes) + del i2['OnlyIndex'] + i2.create_dataset('OnlyIndex', data=Onlyi2Indexes) + else: + i1.create_dataset('CommonIndex', data=CommonEventsi1Index) + i2.create_dataset('CommonIndex', data=CommonEventsi2Index) + i1.create_dataset('OnlyIndex', data=Onlyi1Indexes) + i2.create_dataset('OnlyIndex', data=Onlyi2Indexes) + + CommonIndexes={} + CommonIndexes['i1']=CommonEventsi1Index + CommonIndexes['i2']=CommonEventsi2Index + OnlyIndexes={} + OnlyIndexes['i1'] = Onlyi1Indexes + OnlyIndexes['i2'] = Onlyi2Indexes + return (CommonIndexes, OnlyIndexes) + +def PlotEvent(t1, t2, i1, i2, fit1 = np.array([]), fit2 = np.array([])): + fig1 = plt.figure(1, figsize=(20, 7)) + ax1 = fig1.add_subplot(211) + ax2 = fig1.add_subplot(212, sharex=ax1) + ax1.plot(t1, i1*1e9, 'b') + if len(fit1) is not 0: + ax1.plot(t1, fit1*1e9, 'y') + ax2.plot(t2, i2*1e9, 'r') + if len(fit2) is not 0: + ax2.plot(t2, fit2*1e9, 'y') + ax1.set_ylabel('Ionic Current [nA]') + #ax1.set_xticklabels([]) + ax2.set_ylabel('Transverse Current [nA]') + ax2.set_xlabel('Time [ms]') + ax2.ticklabel_format(useOffset=False) + ax2.ticklabel_format(useOffset=False) + ax1.ticklabel_format(useOffset=False) + ax1.ticklabel_format(useOffset=False) + return fig1 + +def EditInfoText(self): + text2='ionic: {} events, trans: {} events\n'.format(str(self.AnalysisResults['i1']['RoughEventLocations'].shape[0]), str(self.AnalysisResults['i2']['RoughEventLocations'].shape[0])) + text1='The file contains:\n{} Common Events\n{} Ionic Only Events\n{} Transverse Only Events'.format(len(self.CommonIndexes['i1']), len(self.OnlyIndexes['i1']), len(self.OnlyIndexes['i2'])) + self.ui.InfoTexts.setText(text2+text1) + +def creation_date(path_to_file): + if platform.system() == 'Windows': + return os.path.getctime(path_to_file) + else: + stat = os.stat(path_to_file) + try: + return stat.st_mtime + except AttributeError: + # We're probably on Linux. No easy way to get creation dates here, + # so we'll settle for when its content was last modified. + return stat.st_mtime + +def CUSUM(input, delta, h): + Nd = 0 + kd = len(input) + krmv = len(input) + k0 = 0 + k = 0 + l = len(input) + m = np.zeros(l) + m[k0] = input[k0] + v = np.zeros(l) + sp = np.zeros(l) + Sp = np.zeros(l) + gp = np.zeros(l) + sn = np.zeros(l) + Sn = np.zeros(l) + gn = np.zeros(l) + + while k < l: + m[k] = np.mean(input[k0:k]) + v[k] = np.var(input[k0:k]) + + sp[k] = delta / v[k] * (input[k] - m[k] - delta / 2) + sn[k] = -delta / v[k] * (input[k] - m[k] + delta / 2) + + Sp[k] = Sp[k - 1] + sp[k] + Sn[k] = Sn[k - 1] + sn[k] + + gp[k] = np.max(gp[k - 1] + sp[k], 0) + gn[k] = np.max(gn[k - 1] + sn[k], 0) + + if gp[k] > h or gn[k] > h: + kd[Nd] = k + kmin = int(np.where(Sn == np.min(Sn[k0:k]))[0]) + krmv[Nd] = kmin + k0 - 1 + if gp(k) > h: + kmin = int(np.where(Sn == np.min(Sn[k0:k]))[0]) + krmv[Nd] = kmin + k0 - 1 + k0 = k + m[k0] = input[k0] + v[k0] = 0 + sp[k0] = 0 + Sp[k0] = 0 + gp[k0] = 0 + sn[k0] = 0 + Sn[k0] = 0 + gn[k0] = 0 + Nd = Nd + 1 + k += 1 + + if Nd == 0: + mc = np.mean(input) * np.ones(k) + elif Nd == 1: + mc = [m[krmv[0]] * np.ones(krmv[0]), m[k] * np.ones(k - krmv[0])] + else: + mc = m[krmv[0]] * np.ones(krmv[0]) + for ii in range(1, Nd): + mc = [mc, m[krmv[ii]] * np.ones(krmv[ii] - krmv[ii - 1])] + mc = [mc, m(k) * np.ones(k - krmv[Nd])] + return (mc, kd, krmv) + +def CombineEventDatabases(filename, DBfiles): + f = h5py.File(filename, "w") + + general = f.create_group("RawDB") + general.create_dataset('FileName', data=self.out['filename']) + general.create_dataset('Samplerate', data=self.out['samplerate']) + general.create_dataset('Machine', data=self.out['type']) + general.create_dataset('TransverseRecorded', data=self.out['graphene']) + segmentation_LP = f.create_group("LowPassSegmentation") + for k, l in self.AnalysisResults.items(): + set1 = segmentation_LP.create_group(k) + lpset1 = set1.create_group('LowPassSettings') + for o, p in self.coefficients[k].items(): + lpset1.create_dataset(o, data=p) + for m, l in self.AnalysisResults[k].items(): + set1.create_dataset(m, data=l) + +def MakeIV(filenames, directory, conductance=10, title=''): + if len(conductance) is 1: + conductance=np.ones(len(filenames))*conductance + for idx,filename in enumerate(filenames): + print(filename) + + # Make Dir to save images + output = OpenFile(filename) + if not os.path.exists(directory): + os.makedirs(directory) + + AllData = MakeIVData(output, delay=2) + if AllData == 0: + print('!!!! No Sweep in: ' + filename) + continue + + # Plot Considered Part + # figExtracteParts = plt.figure(1) + # ax1 = figExtracteParts.add_subplot(211) + # ax2 = figExtracteParts.add_subplot(212, sharex=ax1) + # (ax1, ax2) = uf.PlotExtractedPart(output, AllData, current = 'i1', unit=1e9, axis = ax1, axis2=ax2) + # plt.show() + # figExtracteParts.savefig(directory + os.sep + 'PlotExtracted_' + str(os.path.split(filename)[1])[:-4] + '.eps') + # figExtracteParts.savefig(directory + os.sep + 'PlotExtracted_' + str(os.path.split(filename)[1])[:-4] + '.png', dpi=150) + + + # Plot IV + if output['graphene']: + figIV2 = plt.figure(3) + ax2IV = figIV2.add_subplot(111) + ax2IV = PlotIV(output, AllData, current='i2', unit=1e9, axis=ax2IV, WithFit=1, title=title[idx]) + figIV2.tight_layout() + figIV2.savefig(directory + os.sep + str(os.path.split(filename)[1]) + '_IV_i2.png', dpi=300) + figIV2.savefig(directory + os.sep + str(os.path.split(filename)[1]) + '_IV_i2.eps') + figIV2.clear() + + figIV = plt.figure(2) + ax1IV = figIV.add_subplot(111) + ax1IV = PlotIV(output, AllData, current='i1', unit=1e9, axis=ax1IV, WithFit=1, PoreSize=[conductance[idx], 20e-9], title=title[idx]) + figIV.tight_layout() + + # Save Figures + figIV.savefig(directory + os.sep + str(os.path.split(filename)[1]) + 'IV_i1.png', dpi=300) + figIV.savefig(directory + os.sep + str(os.path.split(filename)[1]) + 'IV_i1.eps') + figIV.clear() + +def TwoChannelAnalysis(filenames, outdir, UpwardsOn=0): + + for filename in filenames: + print(filename) + inp = OpenFile(filename) + file = os.sep + str(os.path.split(filename)[1][:-4]) + + if not os.path.exists(outdir): + os.makedirs(outdir) + + # Low Pass Event Detection + AnalysisResults = {} + + if inp['graphene']: + chan = ['i1', 'i2'] # For looping over the channels + else: + chan = ['i1'] + + for sig in chan: + AnalysisResults[sig] = {} + AnalysisResults[sig]['RoughEventLocations'] = RecursiveLowPassFast(inp[sig], pm.coefficients[sig], + inp['samplerate']) + if UpwardsOn: # Upwards detection can be turned on or off + AnalysisResults[sig + '_Up'] = {} + AnalysisResults[sig + '_Up']['RoughEventLocations'] = RecursiveLowPassFastUp(inp[sig], + pm.coefficients[sig], + inp['samplerate']) + + # f.PlotRecursiveLPResults(AnalysisResults['i2'], inp, directory, 1e-3, channel='i2') + # f.PlotRecursiveLPResults(AnalysisResults['i2_Up'], inp, directory+'UP_', 1e-3, channel='i2') + + # Refine the Rough Event Detection done by the LP filter and Add event infos + AnalysisResults = RefinedEventDetection(inp, AnalysisResults, signals=chan, + limit=pm.MinimalFittingLimit * inp['samplerate']) + + # Correlate the two channels + if inp['graphene']: + (CommonIndexes, OnlyIndexes) = f.CorrelateTheTwoChannels(AnalysisResults, 10e-3 * inp['samplerate']) + print('\n\nAnalysis Done...\nThere are {} common events\n{} Events on i1 only\n{} Events on i2 only'.format( + len(CommonIndexes['i1']), len(OnlyIndexes['i1']), len(OnlyIndexes['i2']))) + # Plot The Events + f.SaveAllPlots(CommonIndexes, OnlyIndexes, AnalysisResults, directory, inp, pm.PlotBuffer) + else: + # Plot The Events + f.SaveAllAxopatchEvents(AnalysisResults, directory, inp, pm.PlotBuffer) \ No newline at end of file