Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
29 changes: 15 additions & 14 deletions unityvr/analysis/align2img.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,8 @@ def debugAlignmentPlots(uvrDat, imgMetadat, imgInd, volFramePos, lims=[0,100]):

# sanity check to see the difference in frame start times
fps = imgMetadat['fpsscan'] #frame rate of scanimage
sampling_rate = len(uvrDat.nidDf.dropna())/(uvrDat.nidDf.dropna()['time'].iloc[-1]-uvrDat.nidDf.dropna()['time'].iloc[0])
nid_valid = uvrDat.nidDf.dropna(subset=['time'])
sampling_rate = len(nid_valid)/(nid_valid['time'].iloc[-1]-nid_valid['time'].iloc[0])
axs[2].axvline(int(np.round(sampling_rate/fps)), color='r', linestyle='-')
axs[2].axvline(int(np.round(sampling_rate/fps))+1, color='r', linestyle='--')
axs[2].axvline(int(np.round(sampling_rate/fps))-1, color='r', linestyle='--')
Expand Down Expand Up @@ -101,18 +102,18 @@ def generateUnityExpDf(imgVolumeTimes, uvrDat, imgMetadat, suppressDepugPlot = F

#use volume start frames to downsample unityDfs
for i,unityDfstr in enumerate(unityDfs):
unityDf = getattr(uvrDat,unityDfstr)
if (frameStr in unityDf):
if len(unityDf[frameStr].unique())==len(unityDf[frameStr]):
volFrameId = np.array([np.where(volFrame[i] == unityDf.frame.values)[0][0] for i in range(len(volFrame)) if volFrame[i] in unityDf.frame.values])
# try: volFrameId = np.array([np.where(volFrame[i] == unityDf.frame.values)[0][0] for i in range(len(volFrame))])
# except IndexError:
# volFrameId = np.where(np.in1d(unityDf.frame.values,volFrame, ))[0]
# print('errored out in :', unityDfstr) #in 1d gives true when the element of the 1st array is in the second array
#volFrameId = np.where(np.in1d(unityDf.frame.values,volFrame, ))[0] #in 1d gives true when the element of the 1st array is in the second array
framesinPos = np.where(np.in1d(uvrDat.posDf.frame.values[volFramePos], unityDf.frame.values[volFrameId]))[0] #which volume start frames of current Df are in posDf
unityDfsDS[i] = unityDf.iloc[volFrameId,:].copy()
unityDfsDS[i][timeStr] = imgVolumeTimes[framesinPos].copy() #get the volume start time for the appropriate volumes in the unity array
unityDf = getattr(uvrDat,unityDfstr)
if (frameStr in unityDf) and len(unityDf) > 0:
if len(unityDf[frameStr].unique())==len(unityDf[frameStr]):
volFrameId = np.array([np.where(volFrame[i] == unityDf.frame.values)[0][0] for i in range(len(volFrame)) if volFrame[i] in unityDf.frame.values])
# try: volFrameId = np.array([np.where(volFrame[i] == unityDf.frame.values)[0][0] for i in range(len(volFrame))])
# except IndexError:
# volFrameId = np.where(np.in1d(unityDf.frame.values,volFrame, ))[0]
# print('errored out in :', unityDfstr) #in 1d gives true when the element of the 1st array is in the second array
#volFrameId = np.where(np.in1d(unityDf.frame.values,volFrame, ))[0] #in 1d gives true when the element of the 1st array is in the second array
framesinPos = np.where(np.in1d(uvrDat.posDf.frame.values[volFramePos], unityDf.frame.values[volFrameId]))[0] #which volume start frames of current Df are in posDf
unityDfsDS[i] = unityDf.iloc[volFrameId,:].copy()
unityDfsDS[i][timeStr] = imgVolumeTimes[framesinPos].copy() #get the volume start time for the appropriate volumes in the unity array

expDf = mergeUnityDfs([x for x in unityDfsDS if x is not None],**mergeUnityDfs_params)
return expDf
Expand Down Expand Up @@ -270,7 +271,7 @@ def addImagingTimeToUvrDat(imgDataTime, uvrDat, imgMetadat, timeStr = 'volumes [
if dataframeAppend in f:

unityDf = getattr(uvrDat,f)
if frameStr in unityDf:
if frameStr in unityDf and len(unityDf) > 0:
unityDf[timeStr] = interpF(unityDf['frame'])
setattr(uvrDat,f,unityDf)
return uvrDat
Expand Down
111 changes: 103 additions & 8 deletions unityvr/preproc/logproc.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,14 +87,57 @@ def saveData(self, saveDir, saveName):

return savepath

def isReplayLog(dat):
"""Detect replay log via isReplaySession keypair."""
replayKeys = ['isReplaySession']
matching = [s for s in dat if any(key in s for key in replayKeys)]
return len(matching) > 0 and matching[0].get("isReplaySession", False)


# constructor for unityVRexperiment
def constructUnityVRexperiment(dirName,fileName,computePDtrace = True,enforce_cm = False,**kwargs):
def constructUnityVRexperiment(dirName,fileName,enforce_cm = False,colKeyPairs={'imgFrameTrigger':'imgfsig', 'tracePD':'pdsig'},**kwargs):

dat = openUnityLog(dirName, fileName)

metadat = makeMetaDict(dat, fileName)
objDf = objDfFromLog(dat, enforce_cm=enforce_cm)
posDf, ftDf, nidDf = timeseriesDfFromLog(dat, computePDtrace, enforce_cm=enforce_cm, **kwargs)

isReplay = metadat.get('isReplay', False)

if isReplay:
# Replay logs use worldPositionReplay keys instead of attemptedTranslation/worldPosition
posDf = replayPosDfFromLog(dat, enforce_cm=enforce_cm)
ftDf = pd.DataFrame(columns=ftDfCols)
dtDf = dtDfFromLog(dat)
try:
pdDf = pdDfFromLog(dat, colKeyPairs=colKeyPairs)
except:
pdDf = pd.DataFrame()

if len(posDf) > 0:
posDf.time = posDf.time - posDf.time[0]
if dtDf is not None and len(dtDf) > 0:
dtDf.time = dtDf.time - dtDf.time[0]
if len(pdDf) > 0:
pdDf.time = pdDf.time - pdDf.time[0]

if dtDf is not None and len(dtDf) > 0 and len(posDf) > 0:
posDf = pd.merge(dtDf, posDf, on="frame", how='outer').rename(
columns={'time_x':'time'}).drop(['time_y'], axis=1)

if len(pdDf) > 0 and dtDf is not None and len(dtDf) > 0:
nidDf = pd.merge(dtDf, pdDf, on="frame", how='left').rename(
columns={'time_x':'time'}).drop(['time_y'], axis=1)
if 'pdsig' in nidDf.columns:
nidDf["pdFilt"] = nidDf.pdsig.values
nidDf.pdFilt.values[np.isfinite(nidDf.pdsig.values)] = medfilt(
nidDf.pdsig.values[np.isfinite(nidDf.pdsig.values)])
nidDf = generateInterTime(nidDf)
else:
nidDf = pd.DataFrame()
else:
posDf, ftDf, nidDf = timeseriesDfFromLog(dat, colKeyPairs=colKeyPairs, enforce_cm=enforce_cm, **kwargs)

texDf = texDfFromLog(dat)
vidDf = vidDfFromLog(dat)
attmptDf = attmptDfFromLog(dat, enforce_cm=enforce_cm)
Expand Down Expand Up @@ -190,7 +233,7 @@ def makeMetaDict(dat, fileName):
except: translationalGain = 1.0

matching = [s for s in dat if "refreshRateHz" in s]
setFrameRate = matching[0]["refreshRateHz"]
setFrameRate = matching[0]["refreshRateHz"] if len(matching) > 0 else 0

metadata = {
'expid': metadat[0],
Expand All @@ -206,7 +249,8 @@ def makeMetaDict(dat, fileName):
'setFrameRate': setFrameRate,
'notes': metadat[5],
'temperature': metadat[6],
'angle_convention':"right-handed"
'angle_convention':"right-handed",
'isReplay': isReplayLog(dat)
}

return metadata
Expand Down Expand Up @@ -308,6 +352,52 @@ def posDfFromLog(dat, posDfKey='attemptedTranslation', fictracSubject=None, igno
return pd.DataFrame()


def replayPosDfFromLog(dat, enforce_cm=False):
"""Extract position data from replay logs.
Primary (x, y, angle) = fly's attempted position (behavioral output).
Secondary (replayed_x, replayed_y, replayed_angle) = what was displayed on screen."""
replayKeys = ['worldPositionReplay']
matching = [s for s in dat if any(key in s for key in replayKeys)]

matchingRad = [s for s in dat if "ficTracBallRadius" in s]
if len(matchingRad) > 0 and 'translationalGain' in matchingRad[0]:
gainVal = matchingRad[0]['translationalGain']
else:
gainVal = 1.0
if gainVal == 0:
gainVal = 1.0
convf = 10.0 if enforce_cm else 1.0

entries = [None] * len(matching)
for entry, match in enumerate(matching):
framedat = {
'frame': match['frame'],
'time': match['timeSecs'],
# What was displayed on screen (replay)
'replayed_x': match['worldPositionReplay']['x'] / gainVal * convf,
'replayed_y': match['worldPositionReplay']['z'] / gainVal * convf,
'replayed_angle': (-match['worldRotationDegsReplay']['y']) % 360,
}
# Primary: fly's attempted position (behavioral output)
if 'worldPositionAttempt' in match:
framedat['x'] = match['worldPositionAttempt']['x'] / gainVal * convf
framedat['y'] = match['worldPositionAttempt']['z'] / gainVal * convf
else:
framedat['x'] = np.nan
framedat['y'] = np.nan
if 'worldRotationDegsAttempt' in match:
framedat['angle'] = (-match['worldRotationDegsAttempt']['y']) % 360
else:
framedat['angle'] = np.nan
entries[entry] = pd.Series(framedat).to_frame().T

if len(entries) > 0:
print('correcting for Unity angle convention.')
return pd.concat(entries, ignore_index=True)
else:
return pd.DataFrame()


def ftDfFromLog(dat):
# get fictrac data
matching = [s for s in dat if "ficTracDeltaRotationVectorLab" in s]
Expand Down Expand Up @@ -342,7 +432,8 @@ def attmptDfFromLog(dat, enforce_cm = False):
'dyattempt_ft': -match['fictracAttempt']['x']*matchingRad[0]['ficTracBallRadius']*convf,
#scale by ball radius but not by translational gain to get true x,y in unity units (dm or if enforced cm), rightward motion
'dxattempt_ft': match['fictracAttempt']['y']*matchingRad[0]['ficTracBallRadius']*convf, #forward motion
'angleattempt_ft': (-np.rad2deg(match['fictracAttempt']['z']))%360} #convert to degrees and flip to align with unity convention
'angleattempt_ft': np.rad2deg(match['fictracAttempt']['z'])} #convert to degrees
# This is a conflict from Shivam's UnityVR. 'angleattempt_ft': (-np.rad2deg(match['fictracAttempt']['z']))%360} #convert to degrees and flip to align with unity convention
entries[entry] = pd.Series(framedat).to_frame().T

if len(entries) > 0:
Expand Down Expand Up @@ -397,6 +488,8 @@ def texDfFromLog(dat):

# get texture names
matchingSessionParams = [s for s in dat if "sessionParameters" in s]
if len(matchingSessionParams) == 0:
return pd.DataFrame()
#get texture names
textureMatches = list(pd.Series([dict(l.split(':', 1) for l in matchingSessionParams[0]['sessionParameters']
)[m] for m in dict(l.split(':', 1) for l in matchingSessionParams[0]['sessionParameters']
Expand Down Expand Up @@ -469,9 +562,7 @@ def tempDfFromLog(dat):
if len(entries) > 0:
tempDf = pd.concat(entries,ignore_index = True).groupby('frame').mean().reset_index() #average over multiple temperature readings per unity frame
dtDf = dtDfFromLog(dat) #get the frame times
if len(dtDf)>0:
tempDf = pd.merge(dtDf, tempDf, on="frame", how='outer')
tempDf.time = tempDf.time-tempDf.time[0]
if len(dtDf)>0: tempDf = pd.merge(dtDf, tempDf, on="frame", how='outer')
return tempDf
else:
print('No temperature data was recorded.')
Expand Down Expand Up @@ -500,9 +591,13 @@ def timeseriesDfFromLog(dat, colKeyPairs={'imgFrameTrigger':'imgfsig', 'tracePD'
try: pdDf = pdDfFromLog(dat, colKeyPairs=colKeyPairs)
except: print("No analog input data was recorded.")



if len(posDf) > 0: posDf.time = posDf.time-posDf.time[0]
if len(dtDf) > 0: dtDf.time = dtDf.time-dtDf.time[0]
if len(pdDf) > 0: pdDf.time = pdDf.time-pdDf.time[0]
if ((len(posDf)>0)&(len(pdDf)>0)&(len(dtDf)>0)):
print('pdDf time:', pdDf.time[0], 'posDf time:', posDf.time[0], 'dtDf time:', dtDf.time[0])

if len(ftDf) > 0:
ftDf.ficTracTReadMs = ftDf.ficTracTReadMs-ftDf.ficTracTReadMs[0]
Expand Down
Loading