diff --git a/.gitignore b/.gitignore index afa0a4d730..80fac41705 100644 --- a/.gitignore +++ b/.gitignore @@ -1,4 +1,9 @@ +# Documentation files Packages/doc/html +Packages/doc/IPNWB +Packages/doc/doctrees +Packages/doc/sphinx-output.log + Packages/MIES/backup *.orig *.rej diff --git a/Packages/Conversion/MIES_MassExperimentProcessing.ipf b/Packages/Conversion/MIES_MassExperimentProcessing.ipf new file mode 100644 index 0000000000..d38d4f7d44 --- /dev/null +++ b/Packages/Conversion/MIES_MassExperimentProcessing.ipf @@ -0,0 +1,361 @@ +#pragma rtGlobals=3 // Use modern global access method. + +/// @file MIES_MassExperimentProcessing.ipf +/// @brief __MEP__ Process multiple MIES pxps to convert data into NWBv2 +/// +/// Installation: +/// - Stop Igor Pro +/// - Create a shortcut to this file and place it in the `Igor Procedures` folder +/// - Ensure that only MIES is installed and no other Igor Pro packages +/// - In the MIES installation folder (All Users: `C:\Program Files\MIES`, User: `C:\Users\$User\Documents\MIES`) +/// create an empty file named `UserConfig.txt`. +/// +/// Running: +/// - Start Igor Pro +/// - Select "Macros" -> "Mass convert PXPs to NWBv2" +/// - Enter an input and output folder for the conversion +/// - Wait until it's done +/// +/// In the output folder there will be a `conversion.json` file with results of +/// the conversion process. Search for the `error` key for failed conversions. + +// #define MEP_DEBUGGING + +#ifdef MEP_DEBUGGING + +static StrConstant INPUT_FOLDER = "C:tim-data:pxp_examples_for_nwb_2:" +static StrConstant OUTPUT_FOLDER = "C:tim-data:output:" + +#else + +static StrConstant INPUT_FOLDER = "" +static StrConstant OUTPUT_FOLDER = "" + +#endif + +Menu "Macros" + "Mass convert PXPs to NWBv2", /Q, StartMultiExperimentProcess() +End + +// NOTE: If you use these procedures for your own purposes, change the package name +// to a distinctive name so that you don't clash with other people's preferences. +static StrConstant kPackageName = "MIES PXP to NWBv2" +static StrConstant kPreferencesFileName = "ProcessPrefsMIESNWBv2.bin" +static Constant kPrefsRecordID = 0 // The recordID is a unique number identifying a record within the preference file. +// In this example we store only one record in the preference file. + +// The structure stored in preferences to keep track of what experiment to load next. +// If you add, remove or change fields you must delete your old prefs file. See the help +// topic "Saving Package Preferences" for details. +static Structure MultiExperimentProcessPrefs + uint32 version // Prefs version + uint32 processRunning // Truth that we are running the mult-experiment process + char settingsFile[256] +EndStructure + +// In version 101 of the prefs structure we increased folderPath from 100 to 256 bytes +static Constant kPrefsVersionNumber = 102 + +// Loads preferences into our structure. +static Function LoadPackagePrefs(prefs) + STRUCT MultiExperimentProcessPrefs &prefs + + Variable currentPrefsVersion = kPrefsVersionNumber + + // This loads preferences from disk if they exist on disk. + LoadPackagePreferences /MIS=1 kPackageName, kPreferencesFileName, kPrefsRecordID, prefs + // Printf "%d byte loaded\r", V_bytesRead + + // If error or prefs not found or not valid, initialize them. + if (V_flag!=0 || V_bytesRead==0 || prefs.version!=currentPrefsVersion) + prefs.version = currentPrefsVersion + + prefs.processRunning = 0 + prefs.settingsFile = "" + + SavePackagePrefs(prefs) // Create default prefs file. + endif +End + +// Saves our structure to preferences. +static Function SavePackagePrefs(prefs) + STRUCT MultiExperimentProcessPrefs &prefs + + SavePackagePreferences kPackageName, kPreferencesFileName, kPrefsRecordID, prefs +End + +// This is the routine that you would need to change to use this procedure file for your own purposes. +// See comments about labeled "TO USE FOR YOUR OWN PURPOSES". +static Function ProcessCurrentExperiment(prefs) + STRUCT MultiExperimentProcessPrefs &prefs + + variable jsonID, index + string outputFilePath, inputFile, outputFolder + + jsonID = GetJSON(prefs) + + if(IsAppropriateExperiment()) + + outputFolder = JSON_GetString(jsonID, "/outputFolder") + + PathInfo home + inputFile = S_path + GetExperimentName() + ".pxp" + + outputFilePath = outputFolder + S_path + GetExperimentName() + ".nwb" + + index = JSON_GetVariable(jsonID, "/index") + JSON_AddString(jsonID, "/log/" + num2str(index) + "/from", inputFile) + JSON_AddString(jsonID, "/log/" + num2str(index) + "/to", outputFilePath) + + DoWindow/K HistoryCarbonCopy + NewNotebook/V=0/F=0 /N=HistoryCarbonCopy + + try + PerformMiesTasks(outputFilePath); AbortOnRTE + catch + print "Caught an RTE" + JSON_AddBoolean(jsonID, "/log/" + num2str(index) + "/error", 1) + JSON_SetVariable(jsonID, "/errors", JSON_GetVariable(jsonID, "/errors") + 1) + HDF5CloseFile/A/Z 0 + DeleteFile/Z outputFilePath + endtry + + Notebook HistoryCarbonCopy getData=1 + JSON_AddString(jsonID, "/log/" + num2str(index) + "/output", trimstring(S_Value)) + + JSON_SetVariable(jsonID, "/processed", JSON_GetVariable(jsonID, "/processed") + 1) + else + JSON_SetVariable(jsonID, "/skipped", JSON_GetVariable(jsonID, "/skipped") + 1) + endif + + JSON_SetVariable(jsonID, "/index", JSON_GetVariable(jsonID, "/index") + 1) + + StoreJSON(prefs, jsonID) +End + +static Function PerformMiesTasks(outputFilePath) + string outputFilePath + + string folder, message + variable nwbVersion, error + + printf "Free Memory: %g GB\r", GetFreeMemory() + + if(FileExists(outputFilePath)) + print "Output file already exists, skipping!" + return 0 + endif + + folder = GetFolder(outputFilePath) + + if(!FolderExists(folder)) + CreateFolderOnDisk(folder) + endif + + ClearRTError() + + nwbVersion = 2 + NWB_ExportAllData(nwbVersion, overrideFilePath=outputFilePath) + HDF5CloseFile/A/Z 0 + + message = GetRTErrMessage() + error = GetRTError(1) + ASSERT(error == 0, "Encountered lingering RTE of " + num2str(error) + "(message: " + message + ") after executing NWB_ExportAllData.") +End + +static Function IsAppropriateExperiment() + + return ItemsInList(GetAllDevicesWithContent()) > 0 +End + +// Returns full path to the next experiment file to be loaded or "" if we are finished. +static Function/S FindNextExperiment(prefs) + STRUCT MultiExperimentProcessPrefs &prefs + + variable jsonID, index + + jsonID = GetJSON(prefs) + + WAVE/T inputFiles = JSON_GetTextWave(jsonID, "inputFiles") + index = JSON_GetVariable(jsonID, "/index") + JSON_Release(jsonID) + + if(!(index >= DimSize(inputFiles, ROWS))) + return inputFiles[index] + endif + + return "" +End + +// Caller needs to release json +static Function GetJSON(prefs) + STRUCT MultiExperimentProcessPrefs &prefs + + string data, fname + + [data, fname] = LoadTextFile(prefs.settingsFile) + + return JSON_Parse(data) +End + +// json will be released +static Function StoreJSON(prefs, jsonID) + STRUCT MultiExperimentProcessPrefs &prefs + variable jsonID + + string data = JSON_Dump(jsonID, indent=2) + + SaveTextFile(data, prefs.settingsFile) + + ASSERT(!JSON_Release(jsonID), "Could not release json") +End + +// Posts commands to Igor's operation queue to close the current experiment and open the next one. +// Igor executes operation queue commands when it is idling - that is, when it is not running a +// function or operation. +static Function PostLoadNextExperiment(nextExperimentFullPath) + String nextExperimentFullPath + + ASSERT(FileExists(nextExperimentFullPath), "Experiment must exist") + + Execute/P/Q "NEWEXPERIMENT " // Post command to close this experiment. + + Execute/P/Q "SetIgorOption poundDefine=MIES_PXP_NWB_CONVERSION_SKIP_SAVING" + + // Post command to open next experiment. + String cmd + sprintf cmd "Execute/P/Q \"LOADFILE %s\"", nextExperimentFullPath + Execute/Q cmd +End + +// This is the hook function that Igor calls whenever a file is opened. We use it to +// detect the opening of an experiment and to call our ProcessCurrentExperiment function. +static Function AfterFileOpenHook(refNum,file,pathName,type,creator,kind) + Variable refNum,kind + String file,pathName,type,creator + + STRUCT MultiExperimentProcessPrefs prefs + + LoadPackagePrefs(prefs) // Load our prefs into our structure + if (prefs.processRunning == 0) + return 0 // Process not yet started. + endif + + // Check file type + if (CmpStr(type,"IGsU") != 0) + return 0 // This is not a packed experiment file + endif + + ProcessCurrentExperiment(prefs) + + // See if there are more experiments to process. + String nextExperimentFullPath = FindNextExperiment(prefs) + if (strlen(nextExperimentFullPath) == 0) + // Process is finished + prefs.processRunning = 0 // Flag process is finished. + Execute/P "NEWEXPERIMENT " // Post command to close this experiment. + print "Multi-experiment process is finished." + else + // Load the next experiment in the designated folder, if any. + PostLoadNextExperiment(nextExperimentFullPath) // Post operation queue commands to load next experiment + endif + + SavePackagePrefs(prefs) + + return 0 // Tell Igor to handle file in default fashion. +End + +// This function enables our special Igor hooks which skip saving the experiment +Function StartMultiExperimentProcess() + + Execute/P/Q "SetIgorOption poundDefine=MIES_PXP_NWB_CONVERSION_SKIP_SAVING" + Execute/P/Q "COMPILEPROCEDURES " + Execute/P/Q "StartMultiExperimentProcessWrapper()" +End + +// Allow user to choose the folder containing the experiment files and start the process. +Function StartMultiExperimentProcessWrapper() + + string message, settingsFile, inputFolder, outputFolder, files + variable jsonID + + STRUCT MultiExperimentProcessPrefs prefs + LoadPackagePrefs(prefs) + + message = "Choose input folder with MIES pxps" + if(!cmpstr(INPUT_FOLDER, "")) + NewPath/O/Q/M=message MultiExperimentInputFolder + else + NewPath/O/Q/M=message MultiExperimentInputFolder, INPUT_FOLDER + endif + + if (V_flag != 0) + return -1 // User canceled from New Path dialog + endif + + PathInfo MultiExperimentInputFolder + inputFolder = S_Path + ASSERT(V_flag, "Invalid path") + + message = "Choose output folder for NWBv2 files" + if(!cmpstr(OUTPUT_FOLDER, "")) + NewPath/O/Q/M=message MultiExperimentOutputFolder + else + NewPath/O/Q/M=message MultiExperimentOutputFolder, OUTPUT_FOLDER + endif + + if (V_flag != 0) + return -1 // User canceled from New Path dialog + endif + + PathInfo MultiExperimentOutputFolder + outputFolder = S_Path + ASSERT(V_flag, "Invalid path") + + files = GetAllFilesRecursivelyFromPath("MultiExperimentInputFolder", extension=".pxp") + + // 16: Case-insensitive alphanumeric sort that sorts wave0 and wave9 before wave10. + // ... + // 64: Ignore + and - in the alphanumeric sort so that "Text-09" sorts before "Text-10". Set options to 80 or 81. + files = SortList(files, "|", 80) + + WAVE/T/Z inputPXPs = ListToTextWave(files, "|") + + jsonID = JSON_New() + JSON_AddWave(jsonID, "/inputFiles", inputPXPs) + JSON_AddString(jsonID, "/inputFolder", inputFolder) + JSON_AddString(jsonID, "/outputFolder", outputFolder) + JSON_AddVariable(jsonID, "/index", 0) + JSON_AddVariable(jsonID, "/processed", 0) + JSON_AddVariable(jsonID, "/errors", 0) + JSON_AddVariable(jsonID, "/skipped", 0) + JSON_AddVariable(jsonID, "/total", DimSize(inputPXPs, ROWS)) + + JSON_AddTreeArray(jsonID, "/log") + JSON_AddObjects(jsonID, "/log", objCount = DimSize(inputPXPs, ROWS)) + + prefs.settingsFile = outputFolder + "conversion.json" + StoreJSON(prefs, jsonID) + + prefs.processRunning = 1 // Flag process is started. + + // Start the process off by loading the first experiment. + String nextExperimentFullPath = FindNextExperiment(prefs) + PostLoadNextExperiment(nextExperimentFullPath) // Start the process off + + SavePackagePrefs(prefs) + + return 0 +End + +#ifdef MEP_DEBUGGING + +Function TestMe() + + STRUCT MultiExperimentProcessPrefs prefs + + LoadPackagePrefs(prefs) + ProcessCurrentExperiment(prefs) +End + +#endif diff --git a/Packages/IPNWB b/Packages/IPNWB index 44cb0b1fe9..e66fdcebc7 160000 --- a/Packages/IPNWB +++ b/Packages/IPNWB @@ -1 +1 @@ -Subproject commit 44cb0b1fe9ad9fb188d95720573a09cc7825094a +Subproject commit e66fdcebc7ab07338c96794d6b0672c250a0ede5 diff --git a/Packages/MIES/MIES_AnalysisBrowser.ipf b/Packages/MIES/MIES_AnalysisBrowser.ipf index d4b966f678..9e5c28595c 100644 --- a/Packages/MIES/MIES_AnalysisBrowser.ipf +++ b/Packages/MIES/MIES_AnalysisBrowser.ipf @@ -570,8 +570,7 @@ End /// @brief Analyse data in NWB file and sort as sweeps. /// -/// Function uses source attribute of /acquisition/timeseries -/// and /stimulus/presentation +/// @todo: Update this function for the use with SweepTable /// /// @param discLocation location of NWB File on Disc. /// ID in AnalysisBrowserMap @@ -580,7 +579,7 @@ End static Function AB_LoadSweepsFromNWB(discLocation, dataFolder, device) string discLocation, dataFolder, device - variable h5_fileID, h5_groupID + variable h5_fileID, nwbVersion string channelList Wave/I sweeps = GetAnalysisChannelSweepWave(dataFolder, device) @@ -588,34 +587,32 @@ static Function AB_LoadSweepsFromNWB(discLocation, dataFolder, device) // open hdf5 file h5_fileID = IPNWB#H5_OpenFile(discLocation) - // load from /acquisition/timeseries - channelList = IPNWB#ReadAcquisition(h5_fileID) - h5_groupID = IPNWB#OpenAcquisition(h5_fileID) + // load from /acquisition + nwbVersion = IPNWB#GetNWBMajorVersion(IPNWB#ReadNWBVersion(h5_fileID)) + channelList = IPNWB#ReadAcquisition(h5_fileID, nwbVersion) Wave/T acquisition = GetAnalysisChannelAcqWave(dataFolder, device) - AB_StoreChannelsBySweep(h5_groupID, channelList, sweeps, acquisition) - HDF5CloseGroup/Z h5_groupID + AB_StoreChannelsBySweep(h5_fileID, nwbVersion, channelList, sweeps, acquisition) // load from /stimulus/presentation channelList = IPNWB#ReadStimulus(h5_fileID) - h5_groupID = IPNWB#OpenStimulus(h5_fileID) Wave/T stimulus = GetAnalysisChannelStimWave(dataFolder, device) - AB_StoreChannelsBySweep(h5_groupID, channelList, sweeps, stimulus) - HDF5CloseGroup/Z h5_groupID + AB_StoreChannelsBySweep(h5_fileID, nwbVersion, channelList, sweeps, stimulus) // close hdf5 file IPNWB#H5_CloseFile(h5_fileID) End /// @brief Store channelList in storage wave according to index in sweeps wave -static Function AB_StoreChannelsBySweep(groupID, channelList, sweeps, storage) - variable groupID +/// +/// @todo Update this function for the use with SweepTable +static Function AB_StoreChannelsBySweep(groupID, nwbVersion, channelList, sweeps, storage) + variable groupID, nwbVersion string channelList Wave/I sweeps Wave/T storage - variable numChannels, numSweeps, i + variable numChannels, numSweeps, i, sweepNo, sweep_table_id string channelString - STRUCT IPNWB#ReadChannelParams channel numChannels = ItemsInList(channelList) numSweeps = GetNumberFromWaveNote(sweeps, NOTE_INDEX) @@ -623,15 +620,28 @@ static Function AB_StoreChannelsBySweep(groupID, channelList, sweeps, storage) EnsureLargeEnoughWave(storage, minimumSize = numSweeps, dimension = ROWS) storage = "" + WAVE/Z SweepTableNumber + WAVE/Z/T SweepTableSeries + if(nwbVersion == 2) + [SweepTableNumber, SweepTableSeries] = IPNWB#LoadSweepTable(groupID, nwbVersion) + endif + for(i = 0; i < numChannels; i += 1) channelString = StringFromList(i, channelList) - IPNWB#LoadSourceAttribute(groupID, channelString, channel) - FindValue/I=(channel.sweep)/S=0 sweeps + if(nwbVersion == 2) + WAVE indices = FindIndizes(SweepTableSeries, col = 0, str = channelString) + ASSERT(DimSize(indices, ROWS) == 1, "Invalid Amount of Sweep Number Associated in " + channelString) + sweepNo = SweepTableNumber[indices[0]] + else + sweepNo = IPNWB#LoadSweepNumber(groupID, channelString, nwbVersion) + endif + FindValue/I=(sweepNo)/S=0 sweeps + ASSERT(isFinite(sweepNo), "Invalid Sweep Number Associated in " + channelString) if(V_Value == -1) numSweeps += 1 EnsureLargeEnoughWave(sweeps, minimumSize = numSweeps, dimension = ROWS, initialValue = -1) EnsureLargeEnoughWave(storage, minimumSize = numSweeps, dimension = ROWS) - sweeps[numSweeps - 1] = channel.sweep + sweeps[numSweeps - 1] = sweepNo storage[numSweeps - 1] = AddListItem(channelString, "") else storage[V_Value] = AddListItem(channelString, storage[V_Value]) @@ -933,11 +943,6 @@ static Function/S AB_LoadLabNotebookFromNWB(discLocation) return "" endtry - if(!IPNWB#CheckIntegrity(h5_fileID)) - IPNWB#H5_CloseFile(h5_fileID) - return "" - endif - notebookList = IPNWB#ReadLabNoteBooks(h5_fileID) h5_notebooksID = IPNWB#H5_OpenGroup(h5_fileID, "/general/labnotebook") @@ -1567,7 +1572,7 @@ static Function AB_LoadSweepFromNWB(discLocation, sweepDFR, device, sweep) variable sweep string channelList - variable h5_fileID, h5_groupID, numSweeps + variable h5_fileID, h5_groupID, numSweeps, version Wave/T nwb = AB_GetMap(discLocation) @@ -1593,12 +1598,13 @@ static Function AB_LoadSweepFromNWB(discLocation, sweepDFR, device, sweep) // open NWB file h5_fileID = IPNWB#H5_OpenFile(discLocation) + version = IPNWB#GetNWBMajorVersion(IPNWB#ReadNWBVersion(h5_fileID)) // load acquisition Wave/T acquisition = GetAnalysisChannelAcqWave(nwb[%DataFolder], device) channelList = acquisition[V_Value] - h5_groupID = IPNWB#OpenAcquisition(h5_fileID) - if(AB_LoadSweepFromNWBgeneric(h5_groupID, channelList, sweepDFR, configSweep)) + h5_groupID = IPNWB#OpenAcquisition(h5_fileID, version) + if(AB_LoadSweepFromNWBgeneric(h5_groupID, version, channelList, sweepDFR, configSweep)) return 1 endif @@ -1606,7 +1612,7 @@ static Function AB_LoadSweepFromNWB(discLocation, sweepDFR, device, sweep) Wave/T stimulus = GetAnalysisChannelStimWave(nwb[%DataFolder], device) channelList = stimulus[V_Value] h5_groupID = IPNWB#OpenStimulus(h5_fileID) - if(AB_LoadSweepFromNWBgeneric(h5_groupID, channelList, sweepDFR, configSweep)) + if(AB_LoadSweepFromNWBgeneric(h5_groupID, version, channelList, sweepDFR, configSweep)) return 1 endif @@ -1616,8 +1622,8 @@ static Function AB_LoadSweepFromNWB(discLocation, sweepDFR, device, sweep) return 0 End -static Function AB_LoadSweepFromNWBgeneric(h5_groupID, channelList, sweepDFR, configSweep) - variable h5_groupID +static Function AB_LoadSweepFromNWBgeneric(h5_groupID, nwbVersion, channelList, sweepDFR, configSweep) + variable h5_groupID, nwbVersion string channelList DFREF sweepDFR Wave/I configSweep @@ -1631,10 +1637,7 @@ static Function AB_LoadSweepFromNWBgeneric(h5_groupID, channelList, sweepDFR, co for(i = 0; i < numChannels; i += 1) channel = StringFromList(i, channelList) - - // use AnalyseChannelName as a fallback if properties from the source attribute are missing IPNWB#AnalyseChannelName(channel, p) - IPNWB#LoadSourceAttribute(h5_groupID, channel, p) switch(p.channelType) case ITC_XOP_CHANNEL_TYPE_DAC: @@ -1645,7 +1648,7 @@ static Function AB_LoadSweepFromNWBgeneric(h5_groupID, channelList, sweepDFR, co break case ITC_XOP_CHANNEL_TYPE_ADC: channelName = "AD" - wave loaded = IPNWB#LoadTimeseries(h5_groupID, channel) + wave loaded = IPNWB#LoadTimeseries(h5_groupID, channel, nwbVersion) channelName += "_" + num2str(p.channelNumber) fakeConfigWave = 1 break @@ -1681,6 +1684,7 @@ static Function AB_LoadSweepFromNWBgeneric(h5_groupID, channelList, sweepDFR, co default: ASSERT(0, "unknown channel type " + num2str(p.channelType)) endswitch + ASSERT(WaveExists(loaded), "No Wave loaded") if(waveNoteLoaded == 0) SVAR/Z test = sweepDFR:note diff --git a/Packages/MIES/MIES_Constants.ipf b/Packages/MIES/MIES_Constants.ipf index 17226eadc8..d9aa76b15f 100644 --- a/Packages/MIES/MIES_Constants.ipf +++ b/Packages/MIES/MIES_Constants.ipf @@ -666,6 +666,7 @@ Constant AMPLIFIER_CONNECTION_MCC_FAILED = 2 ///< calling MCC_SelectMultiClamp70 /// Additional entry in the NWB source attribute for TTL data StrConstant NWB_SOURCE_TTL_BIT = "TTLBit" +StrConstant IPNWB_PLACEHOLDER = "PLACEHOLDER" /// @name Convenience constants for DAP_UpdateClampmodeTabs() and DAP_ChangeHeadStageMode() /// @anchor MCCSyncOverrides @@ -793,6 +794,7 @@ StrConstant PRESSURE_STARTSEAL = "Initial Seal Pressure (psi)" StrConstant PRESSURE_MAXSEAL = "Maximum Seal Pressure (psi)" StrConstant TP_AFTER_DAQ = "Activate TP after DAQ" StrConstant EXPORT_NWB = "Export to NWB" +StrConstant NWB_VERSION = "NWB Version" StrConstant APPEND_ASYNC = "Append Asynchronous reading to wave note" StrConstant SYNC_MIES_MCC = "Sync MIES to MCC" StrConstant ENABLE_I_EQUAL_ZERO = "Switch clamp modes via I equal zero" diff --git a/Packages/MIES/MIES_DAEphys.ipf b/Packages/MIES/MIES_DAEphys.ipf index 6908690cf8..568ec98ebc 100644 --- a/Packages/MIES/MIES_DAEphys.ipf +++ b/Packages/MIES/MIES_DAEphys.ipf @@ -450,6 +450,7 @@ Function DAP_EphysPanelStartUpSettings() CheckBox check_Settings_TPAfterDAQ WIN = $panelTitle, value= 0 CheckBox Check_Settings_NwbExport WIN = $panelTitle,value= 0 + PopupMenu Popup_Settings_NwbVersion WIN = $panelTitle, mode=2, popvalue="2" PopupMenu Popup_Settings_DecMethod, mode=2, popvalue="MinMax" SetVariable setvar_Settings_DecMethodFac, WIN = $panelTitle, value = _NUM:-1 @@ -2464,7 +2465,7 @@ Function DAP_CheckSettings(panelTitle, mode) endfor if(DAG_GetNumericalValue(panelTitle, "Check_Settings_NwbExport")) - NWB_PrepareExport() + NWB_PrepareExport(DAG_GetNumericalValue(panelTitle, "Popup_Settings_NwbVersion")) endif return 0 diff --git a/Packages/MIES/MIES_DAEphys_Macro.ipf b/Packages/MIES/MIES_DAEphys_Macro.ipf index af5dcba003..0c098991de 100644 --- a/Packages/MIES/MIES_DAEphys_Macro.ipf +++ b/Packages/MIES/MIES_DAEphys_Macro.ipf @@ -3683,6 +3683,11 @@ Window DA_Ephys() : Panel CheckBox Check_Settings_NwbExport,userdata(ResizeControlsInfo) += A"zzzzzzzzzzzz!!#u:Du]k 0) end -Function TestLabnotebooks(fileID, device) +static Function TestLabnotebooks(fileID, device) variable fileID string device @@ -41,7 +44,7 @@ Function TestLabnotebooks(fileID, device) CHECK_EQUAL_WAVES(textualValuesNWB, textualValues) End -Function TestTPStorage(fileID, device) +static Function TestTPStorage(fileID, device) variable fileID string device @@ -53,7 +56,7 @@ Function TestTPStorage(fileID, device) CHECK_EQUAL_WAVES(TPStorageNWB, TPStorage) End -Function TestStoredTestPulses(fileID, device) +static Function TestStoredTestPulses(fileID, device) variable fileID string device @@ -88,7 +91,7 @@ Function TestStoredTestPulses(fileID, device) endfor End -Function TestStimsetParamWaves(fileID, device, sweeps) +static Function TestStimsetParamWaves(fileID, device, sweeps) variable fileID string device WAVE sweeps @@ -144,7 +147,7 @@ Function TestStimsetParamWaves(fileID, device, sweeps) endfor End -Function TestTimeSeriesProperties(groupID, channel) +static Function TestTimeSeriesProperties(groupID, channel) variable groupID string channel @@ -175,7 +178,7 @@ Function TestTimeSeriesProperties(groupID, channel) HDF5CloseGroup/Z channelGroupID End -Function/S GetChannelNameFromChannelType(groupID, device, channel, sweep, params) +static Function/S GetChannelNameFromChannelType(groupID, device, channel, sweep, params) variable groupID string device string channel @@ -207,7 +210,7 @@ Function/S GetChannelNameFromChannelType(groupID, device, channel, sweep, params break case ITC_XOP_CHANNEL_TYPE_ADC: channelName = "AD" - WAVE loadedFromNWB = IPNWB#LoadTimeseries(groupID, channel) + WAVE loadedFromNWB = IPNWB#LoadTimeseries(groupID, channel, NWB_VERSION) channelName += "_" + num2str(params.channelNumber) if(IsNaN(params.electrodeNumber)) @@ -241,7 +244,7 @@ Function/S GetChannelNameFromChannelType(groupID, device, channel, sweep, params return channelName End -Function/WAVE LoadTimeSeries(groupID, channel, channelType) +static Function/WAVE LoadTimeSeries(groupID, channel, channelType) variable groupID, channelType string channel @@ -250,7 +253,7 @@ Function/WAVE LoadTimeSeries(groupID, channel, channelType) return IPNWB#LoadStimulus(groupID, channel) break case ITC_XOP_CHANNEL_TYPE_ADC: - return IPNWB#LoadTimeseries(groupID, channel) + return IPNWB#LoadTimeseries(groupID, channel, NWB_VERSION) break case ITC_XOP_CHANNEL_TYPE_TTL: return IPNWB#LoadStimulus(groupID, channel) @@ -261,7 +264,8 @@ Function/WAVE LoadTimeSeries(groupID, channel, channelType) endswitch End -Function TestSourceAttribute(groupID, device, channel, sweep, pxpSweepsDFR) +/// @brief Test NWBv1 specific source attribute (dropped since NWBv2) +static Function TestSourceAttribute(groupID, device, channel, sweep, pxpSweepsDFR) variable groupID, sweep string device, channel DFREF pxpSweepsDFR @@ -290,7 +294,7 @@ Function TestSourceAttribute(groupID, device, channel, sweep, pxpSweepsDFR) CHECK(params.groupIndex >= 0) End -Function TestTimeSeries(fileID, device, groupID, channel, sweep, pxpSweepsDFR) +static Function TestTimeSeries(fileID, device, groupID, channel, sweep, pxpSweepsDFR) variable fileID, groupID, sweep string channel, device DFREF pxpSweepsDFR @@ -343,7 +347,7 @@ Function TestTimeSeries(fileID, device, groupID, channel, sweep, pxpSweepsDFR) // stimulus_description stimulus = IPNWB#ReadTextDataSetAsString(channelGroupID, "stimulus_description") - if(params.channelType == ITC_XOP_CHANNEL_TYPE_DAC) + if(params.channelType == ITC_XOP_CHANNEL_TYPE_DAC && IsNaN(params.electrodeNumber)) stimulus_expected = "PLACEHOLDER" elseif(params.channelType == ITC_XOP_CHANNEL_TYPE_ADC && IsNaN(params.electrodeNumber)) // unassoc AD stimulus_expected = "PLACEHOLDER" @@ -475,7 +479,7 @@ Function TestTimeSeries(fileID, device, groupID, channel, sweep, pxpSweepsDFR) endif End -Function/DF TestSweepData(entry, device, sweep) +static Function/DF TestSweepData(entry, device, sweep) WAVE/T entry string device variable sweep @@ -541,21 +545,21 @@ Function/DF TestSweepData(entry, device, sweep) return pxpSweepsDFR End -Function/S TestFileExport() +static Function/S TestFileExport() string baseFolder, nwbFile, discLocation PathInfo home baseFolder = S_path - nwbFile = GetExperimentName() + ".nwb" + nwbFile = GetExperimentName() + "-V1.nwb" discLocation = baseFolder + nwbFile HDF5CloseFile/Z/A 0 DeleteFile/Z/P=home nwbFile KillOrMoveToTrash(dfr = GetAnalysisFolder()) - NWB_ExportAllData(compressionMode = IPNWB#GetNoCompression(), writeStoredTestPulses = 1) + NWB_ExportAllData(NWB_VERSION, compressionMode = IPNWB#GetNoCompression(), writeStoredTestPulses = 1, overrideFilePath=discLocation) CloseNWBFile() GetFileFolderInfo/P=home/Q/Z nwbFile @@ -566,7 +570,7 @@ Function/S TestFileExport() return discLocation End -Function TestListOfGroups(groupList, wv) +static Function TestListOfGroups(groupList, wv) string groupList WAVE/T wv @@ -584,7 +588,7 @@ Function TestListOfGroups(groupList, wv) CHECK_EQUAL_STR(groupList, list) End -Function TestNwbExport() +Function TestNwbExportV1() string discLocation, device, acquisition string channel variable fileID, numEntries, i, sweep, numGroups, j, groupID @@ -611,6 +615,7 @@ Function TestNwbExport() CHECK_WAVE(stimuluses, TEXT_WAVE) fileID = IPNWB#H5_OpenFile(discLocation) + CHECK_EQUAL_VAR(IPNWB#GetNWBmajorVersion(IPNWB#ReadNWBVersion(fileID)), NWB_VERSION) // check history TestHistory(fileID) @@ -628,7 +633,7 @@ Function TestNwbExport() TestStimsetParamWaves(fileID, device, sweeps) // check all acquisitions - TestListOfGroups(IPNWB#ReadAcquisition(fileID), acquisitions) + TestListOfGroups(IPNWB#ReadAcquisition(fileID, NWB_VERSION), acquisitions) // check all stimulus TestListOfGroups(IPNWB#ReadStimulus(fileID), stimuluses) @@ -648,7 +653,7 @@ Function TestNwbExport() numGroups = ItemsInList(acquisitions[i]) for(j = 0; j < numGroups; j += 1) channel = StringFromList(j, acquisitions[i]) - groupID = IPNWB#OpenAcquisition(fileID) + groupID = IPNWB#OpenAcquisition(fileID, NWB_VERSION) // test all of ReadChannelParams aka source TestSourceAttribute(groupID, device, channel, sweep, pxpSweepsDFR) diff --git a/Packages/Testing-MIES/UTF_TestNWBExportV2.ipf b/Packages/Testing-MIES/UTF_TestNWBExportV2.ipf new file mode 100644 index 0000000000..5ed118b411 --- /dev/null +++ b/Packages/Testing-MIES/UTF_TestNWBExportV2.ipf @@ -0,0 +1,735 @@ +#pragma TextEncoding = "UTF-8" +#pragma rtGlobals=3 // Use modern global access method and strict wave access. +#pragma rtFunctionErrors=1 +#pragma ModuleName=TestNWBExportV2 + +static Constant NWB_VERSION = 2 + + // This file does not hold test suites +static Function NoTestSuite() + FAIL() +End + +// We want to check that the stored specification versions are the correct ones compared to the +// `/nwb_version` attribute and the path components in `/specifications/core/X.Y.Z` and `/specifications/hdmf-common/A.B.C` +// +// In case that fails here check NWB_SPEC_VERSION, HDMF_SPEC_VERSION, NWB_VERSION in IPNWB +static Function TestSpecVersions(fileID) + variable fileID + + string groups, expected, version, group, groupVersion, namespaceVersion, globalVersion + string path, spec + variable numEntries, i, jsonID + + globalVersion = IPNWB#ReadTextAttributeAsString(fileID, "/", "nwb_version") + + groups = IPNWB#H5_ListGroups(fileID, "/specifications") + groups = SortList(groups) + expected = "core;hdmf-common;ndx-mies;" + CHECK_EQUAL_STR(groups, expected) + + numEntries = ItemsInList(groups) + for(i = 0; i < numEntries; i += 1) + group = StringFromList(i, groups) + + path = "/specifications/" + group + groupVersion = IPNWB#H5_ListGroups(fileID, path) + groupVersion = RemoveEnding(groupVersion, ";") + + if(!cmpstr(group, "core")) + CHECK_EQUAL_STR(groupVersion, globalVersion) + endif + + path += "/" + groupVersion + "/namespace" + spec = IPNWB#ReadTextDataSetAsString(fileID, path) + jsonID = JSON_Parse(spec) + namespaceVersion = JSON_GetString(jsonID, "/namespaces/0/version") + CHECK_EQUAL_STR(groupVersion, namespaceVersion) + JSON_Release(jsonID) + endfor +End + +static Function TestHistory(fileID) + variable fileID + + WAVE/Z/T history = IPNWB#H5_LoadDataSet(fileID, "/general/data_collection") + CHECK_WAVE(history, TEXT_WAVE) + CHECK(DimSize(history, ROWS) > 0) +End + +static Function TestLabnotebooks(fileID, device) + variable fileID + string device + + string lbnDevices, prefix + + WAVE numericalValues = GetLBNumericalValues(device) + WAVE/T numericalKeys = GetLBNumericalKeys(device) + WAVE/T textualValues = GetLBTextualValues(device) + WAVE/T textualKeys = GetLBTextualKeys(device) + + lbnDevices = RemoveEnding(IPNWB#ReadLabNoteBooks(fileID), ";") + WARN_EQUAL_STR(lbnDevices, device) + lbnDevices = StringFromList(0, lbnDevices) + CHECK_EQUAL_STR(lbnDevices, device) + + prefix = "/general/labnotebook/" + device + "/" + + WAVE/Z numericalKeysNWB = IPNWB#H5_LoadDataSet(fileID, prefix + "numericalKeys") + CHECK_EQUAL_WAVES(numericalKeysNWB, numericalKeys) + WAVE/Z numericalValuesNWB = IPNWB#H5_LoadDataSet(fileID, prefix + "numericalValues") + CHECK_EQUAL_WAVES(numericalValuesNWB, numericalValues) + WAVE/Z textualKeysNWB = IPNWB#H5_LoadDataSet(fileID, prefix + "textualKeys") + CHECK_EQUAL_WAVES(textualKeysNWB, textualKeys) + WAVE/Z textualValuesNWB = IPNWB#H5_LoadDataSet(fileID, prefix + "textualValues") + CHECK_EQUAL_WAVES(textualValuesNWB, textualValues) +End + +static Function TestTPStorage(fileID, device) + variable fileID + string device + + string prefix + + prefix = "/general/testpulse/" + device + "/" + WAVE/Z TPStorageNWB = IPNWB#H5_LoadDataSet(fileID, prefix + "TPStorage") + WAVE TPStorage = GetTPStorage(device) + CHECK_EQUAL_WAVES(TPStorageNWB, TPStorage) +End + +static Function TestStoredTestPulses(fileID, device) + variable fileID + string device + + string prefix, datasets, dataset, idxstr + variable numPulses, i, numEntries, idx + + WAVE/WAVE storedTestPulses = GetStoredTestPulseWave(device) + numPulses = GetNumberFromWaveNote(storedTestPulses, NOTE_INDEX) + + prefix = "/general/testpulse/" + device + "/" + + datasets = IPNWB#H5_ListGroupMembers(fileID, prefix) + // remove TPStorage entries + datasets = GrepList(datasets, TP_STORAGE_REGEXP, 1) + + numEntries = ItemsInList(datasets) + CHECK_EQUAL_VAR(numEntries, numPulses) + + for(i = 0; i < numEntries; i += 1) + dataset = StringFromList(i, datasets) + + WAVE/Z TestPulseNWB = IPNWB#H5_LoadDataSet(fileID, prefix + dataset) + + SplitString/E=STORED_TESTPULSES_REGEXP dataset, idxStr + CHECK_EQUAL_VAR(V_Flag, 1) + + idx = str2num(idxStr) + CHECK(idx >= 0) + + WAVE/Z TestPulsePXP = storedTestPulses[idx] + CHECK_EQUAL_WAVES(TestPulseNWB, TestPulsePXP) + endfor +End + +static Function TestStimsetParamWaves(fileID, device, sweeps) + variable fileID + string device + WAVE sweeps + + variable i, j, numEntries, sweep + string stimsetParamsNWB, stimset, prefix, name + + WAVE/T textualValues = GetLBTextualValues(device) + + stimsetParamsNWB = IPNWB#H5_ListGroupMembers(fileID, "/general/stimsets") + CHECK(ItemsInList(stimsetParamsNWB) > 0) + + numEntries = DimSize(sweeps, ROWS) + for(i = 0; i < numEntries; i += 1) + sweep = sweeps[i] + + if(!IsValidSweepNumber(sweep)) + break + endif + + WAVE/T/Z stimsets = GetLastSetting(textualValues, sweep, "Stim Wave Name", DATA_ACQUISITION_MODE) + CHECK_WAVE(stimsets, TEXT_WAVE) + + for(j = 0; j < NUM_HEADSTAGES; j += 1) + stimset = stimsets[j] + + if(IsEmpty(stimset)) + break + endif + + if(!cmpstr(stimset, STIMSET_TP_WHILE_DAQ)) + continue + endif + + WAVE/Z WP = WB_GetWaveParamForSet(stimset) + WAVE/Z WPT = WB_GetWaveTextParamForSet(stimset) + WAVE/Z SegWvType = WB_GetSegWvTypeForSet(stimset) + + prefix = "/general/stimsets/" + + name = WB_GetParameterWaveName(stimset, STIMSET_PARAM_WP, nwbFormat = 1) + WAVE/Z WP_NWB = IPNWB#H5_LoadDataSet(fileID, prefix + name) + CHECK_EQUAL_WAVES(WP_NWB, WP) + + name = WB_GetParameterWaveName(stimset, STIMSET_PARAM_WPT, nwbFormat = 1) + WAVE/Z WPT_NWB = IPNWB#H5_LoadDataSet(fileID, prefix + name) + CHECK_EQUAL_WAVES(WPT_NWB, WPT) + + name = WB_GetParameterWaveName(stimset, STIMSET_PARAM_SEGWVTYPE, nwbFormat = 1) + WAVE/Z SegWvType_NWB = IPNWB#H5_LoadDataSet(fileID, prefix + name) + CHECK_EQUAL_WAVES(SegWvType_NWB, SegWvType) + endfor + endfor +End + +static Function TestTimeSeriesProperties(groupID, channel) + variable groupID + string channel + + variable numEntries, i, value, channelGroupID + + channelGroupID = IPNWB#H5_OpenGroup(groupID, channel) + + // TimeSeries properties + STRUCT IPNWB#TimeSeriesProperties tsp + IPNWB#ReadTimeSeriesProperties(groupID, channel, tsp) + + numEntries = DimSize(tsp.names, ROWS) + for(i = 0; i < numEntries; i += 1) + value = IPNWB#ReadDatasetAsNumber(channelGroupID, tsp.names[i]) + CHECK_EQUAL_VAR(value, tsp.data[i]) + endfor + + HDF5CloseGroup/Z channelGroupID +End + +static Function/S GetChannelNameFromChannelType(groupID, device, channel, sweep, params) + variable groupID + string device + string channel + variable sweep + STRUCT IPNWB#ReadChannelParams ¶ms + + WAVE numericalValues = GetLBNumericalValues(device) + + string channelName, key + variable entry, index + + switch(params.channelType) + case ITC_XOP_CHANNEL_TYPE_DAC: + channelName = "DA" + WAVE loadedFromNWB = IPNWB#LoadStimulus(groupID, channel) + channelName += "_" + num2str(params.channelNumber) + + if(IsNaN(params.electrodeNumber)) + WAVE/Z settings + [settings, index] = GetLastSettingChannel(numericalValues, $"", sweep, "DAC", params.channelNumber, params.channelType, DATA_ACQUISITION_MODE) + entry = settings[index] + else + WAVE/Z settings = GetLastSetting(numericalValues, sweep, "DAC", DATA_ACQUISITION_MODE) + CHECK_WAVE(settings, NUMERIC_WAVE) + entry = settings[params.electrodeNumber] + endif + + CHECK_EQUAL_VAR(entry, params.channelNumber) + break + case ITC_XOP_CHANNEL_TYPE_ADC: + channelName = "AD" + WAVE loadedFromNWB = IPNWB#LoadTimeseries(groupID, channel, NWB_VERSION) + channelName += "_" + num2str(params.channelNumber) + + if(IsNaN(params.electrodeNumber)) + WAVE/Z settings + [settings, index] = GetLastSettingChannel(numericalValues, $"", sweep, "ADC", params.channelNumber, params.channelType, DATA_ACQUISITION_MODE) + entry = settings[index] + else + WAVE/Z settings = GetLastSetting(numericalValues, sweep, "ADC", DATA_ACQUISITION_MODE) + CHECK_WAVE(settings, NUMERIC_WAVE) + entry = settings[params.electrodeNumber] + endif + + CHECK_EQUAL_VAR(entry, params.channelNumber) + break + case ITC_XOP_CHANNEL_TYPE_TTL: + channelName = "TTL" + WAVE loadedFromNWB = IPNWB#LoadStimulus(groupID, channel) + channelName += "_" + num2str(params.channelNumber) + + if(IsFinite(params.ttlBit)) + channelName += "_" + num2str(log(params.ttlBit)/log(2)) + endif + + CHECK_EQUAL_VAR(str2num(params.channelSuffix), params.ttlBit) + break + default: + ASSERT(0, "unknown channel type " + num2str(params.channelType)) + break + endswitch + + return channelName +End + +static Function/WAVE LoadTimeSeries(groupID, channel, channelType) + variable groupID, channelType + string channel + + switch(channelType) + case ITC_XOP_CHANNEL_TYPE_DAC: + return IPNWB#LoadStimulus(groupID, channel) + break + case ITC_XOP_CHANNEL_TYPE_ADC: + return IPNWB#LoadTimeseries(groupID, channel, NWB_VERSION) + break + case ITC_XOP_CHANNEL_TYPE_TTL: + return IPNWB#LoadStimulus(groupID, channel) + break + default: + ASSERT(0, "unknown channel type " + num2str(channelType)) + break + endswitch +End + +static Function TestTimeSeries(fileID, filepath, device, groupID, channel, sweep, pxpSweepsDFR) + variable fileID, groupID, sweep + string filepath + string channel, device + DFREF pxpSweepsDFR + + variable channelGroupID, starting_time, session_start_time, actual + variable clampMode, gain, gain_ref, resolution, conversion, headstage, rate_ref, rate, samplingInterval, samplingInterval_ref + string stimulus, stimulus_expected, channelName, str, path, neurodata_type + string electrode_name, electrode_name_ref, key, unit_ref, unit, base_unit_ref + + STRUCT IPNWB#ReadChannelParams params + IPNWB#InitReadChannelParams(params) + IPNWB#AnalyseChannelName(channel, params) + + channelGroupID = IPNWB#H5_OpenGroup(groupID, channel) + + string headstageDesc = IPNWB#ReadTextDataSetAsString(channelGroupID, "electrode/description") + if(!cmpstr(headstageDesc, "PLACEHOLDER")) + headstage = NaN + else + headstage = str2num(RemovePrefix(headstageDesc, startStr="Headstage ")) + REQUIRE(headstage >= 0 && headstage < NUM_HEADSTAGES) + endif + + params.electrodeNumber = headstage + + channelName = GetChannelNameFromChannelType(groupID, device, channel, sweep, params) + + WAVE numericalValues = GetLBNumericalValues(device) + WAVE/T textualValues = GetLBTextualValues(device) + + WAVE loadedFromNWB = LoadTimeSeries(groupID, channel, params.channelType) + + // starting_time + starting_time = IPNWB#ReadDataSetAsNumber(channelGroupID, "starting_time") + session_start_time = ParseISO8601Timestamp(IPNWB#ReadTextDataSetAsString(fileID, "/session_start_time")) + actual = ParseISO8601Timestamp(GetLastSettingTextIndep(textualValues, sweep, HIGH_PREC_SWEEP_START_KEY, DATA_ACQUISITION_MODE)) + CHECK_EQUAL_VAR(session_start_time + starting_time, actual) + + // its attributes: unit + unit = IPNWB#ReadTextAttributeAsString(groupID, channel + "/starting_time", "unit") + unit_ref = "Seconds" + CHECK_EQUAL_STR(unit, unit_ref) + + unit = WaveUnits(loadedFromNWB, ROWS) + unit_ref = "ms" + CHECK_EQUAL_STR(unit, unit_ref) + + // and rate + rate = IPNWB#ReadAttributeAsNumber(groupID, channel + "/starting_time", "rate") + rate_ref = 1 / (DimDelta(loadedFromNWB, ROWS)/1000) + CHECK_CLOSE_VAR(rate, rate_ref, tol=1e-7) + + samplingInterval = GetLastSettingIndep(numericalValues, sweep, "Sampling interval", DATA_ACQUISITION_MODE) + samplingInterval_ref = DimDelta(loadedFromNWB, ROWS) + CHECK_CLOSE_VAR(samplingInterval, samplingInterval_ref, tol=1e-7) + + // stimulus_description + stimulus = IPNWB#ReadTextAttributeAsString(channelGroupID, ".", "stimulus_description") + if(params.channelType == ITC_XOP_CHANNEL_TYPE_DAC && IsNaN(params.electrodeNumber)) + stimulus_expected = "PLACEHOLDER" + elseif(params.channelType == ITC_XOP_CHANNEL_TYPE_ADC && IsNaN(params.electrodeNumber)) // unassoc AD + stimulus_expected = "PLACEHOLDER" + elseif(params.channelType == ITC_XOP_CHANNEL_TYPE_TTL) + WAVE/T/Z TTLStimsets = GetTTLStimSets(numericalValues, textualValues, sweep) + CHECK_WAVE(TTLStimsets, TEXT_WAVE) + + if(IsNaN(params.ttlBit)) + stimulus_expected = TTLStimsets[params.channelNumber] + else + stimulus_expected = TTLStimsets[log(params.ttlBit)/log(2)] + endif + else + WAVE/Z/T wvText = GetLastSetting(textualValues, sweep, "Stim Wave Name", DATA_ACQUISITION_MODE) + CHECK_WAVE(wvText, TEXT_WAVE) + stimulus_expected = wvText[params.electrodeNumber] + endif + CHECK_EQUAL_STR(stimulus, stimulus_expected) + + // electrode_name, only present for associated channels + if(IsFinite(params.electrodeNumber)) + electrode_name = IPNWB#ReadElectrodeName(filePath, channel, NWB_VERSION) + electrode_name_ref = num2str(params.electrodeNumber) + CHECK_EQUAL_STR(electrode_name, electrode_name_ref) + endif + + // neurodata_type + WAVE/Z wv = GetLastSetting(numericalValues, sweep, "Clamp Mode", DATA_ACQUISITION_MODE) + CHECK_WAVE(wv, NUMERIC_WAVE) + + clampMode = IsFinite(params.electrodeNumber) ? wv[params.electrodeNumber] : NaN + + neurodata_type = IPNWB#ReadNeuroDataType(groupID, channel) + switch(clampMode) + case V_CLAMP_MODE: + if(params.channelType == ITC_XOP_CHANNEL_TYPE_ADC) + str = "VoltageClampSeries" + CHECK_EQUAL_STR(neurodata_type, str) + elseif(params.channelType == ITC_XOP_CHANNEL_TYPE_DAC) + str = "VoltageClampStimulusSeries" + CHECK_EQUAL_STR(neurodata_type, str) + else + FAIL() + endif + break + case I_CLAMP_MODE: + if(params.channelType == ITC_XOP_CHANNEL_TYPE_ADC) + str = "CurrentClampSeries" + CHECK_EQUAL_STR(neurodata_type, str) + elseif(params.channelType == ITC_XOP_CHANNEL_TYPE_DAC) + str = "CurrentClampStimulusSeries" + CHECK_EQUAL_STR(neurodata_type, str) + else + FAIL() + endif + break + case I_EQUAL_ZERO_MODE: + if(params.channelType == ITC_XOP_CHANNEL_TYPE_ADC) + str = "IZeroClampSeries" + CHECK_EQUAL_STR(neurodata_type, str) + else + FAIL() + endif + break + default: + if(IsNaN(clampMode)) + str = "TimeSeries" + CHECK_EQUAL_STR(neurodata_type, str) + else + ASSERT(0, "unknown clamp mode") + endif + break + endswitch + + // gain + if(IsFinite(params.electrodeNumber)) + REQUIRE_NEQ_VAR(params.channelType, NaN) + key = StringFromList(params.channelType, ITC_CHANNEL_NAMES) + " Gain" + WAVE/Z gains = GetLastSetting(numericalValues, sweep, key, DATA_ACQUISITION_MODE) + CHECK_WAVE(gains, NUMERIC_WAVE) + + gain_ref = gains[params.electrodeNumber] + gain = IPNWB#ReadDatasetAsNumber(channelGroupID, "gain") + CHECK_EQUAL_VAR(gain, gain_ref) + endif + + // data.resolution + resolution = IPNWB#ReadDatasetAsNumber(channelGroupID, "resolution") + CHECK_EQUAL_VAR(resolution, NaN) + + // data.conversion + // data.unit + WAVE/Z/SDFR=pxpSweepsDFR pxpWave = $channelName + REQUIRE_WAVE(pxpWave, NUMERIC_WAVE) + unit_ref = WaveUnits(pxpWave, -1) + + if(!cmpstr(unit_ref, "pA")) + conversion = IPNWB#ReadAttributeAsNumber(channelGroupID, "data", "conversion") + CHECK_CLOSE_VAR(conversion, 1e-12) + + unit = IPNWB#ReadTextAttributeAsString(channelGroupID, "data", "unit") + + // translate back to hardcoded units + base_unit_ref = "amperes" + + CHECK_EQUAL_STR(unit, base_unit_ref) + elseif(!cmpstr(unit_ref, "mV")) + conversion = IPNWB#ReadAttributeAsNumber(channelGroupID, "data", "conversion") + CHECK_CLOSE_VAR(conversion, 1e-3, tol = 1e-5) + + unit = IPNWB#ReadTextAttributeAsString(channelGroupID, "data", "unit") + + // translate back to hardcoded units + base_unit_ref = "volts" + + CHECK_EQUAL_STR(unit, base_unit_ref) + elseif(IsEmpty(unit_ref)) // TTL data + conversion = IPNWB#ReadAttributeAsNumber(channelGroupID, "data", "conversion") + CHECK_CLOSE_VAR(conversion, 1) + + unit = IPNWB#ReadTextAttributeAsString(channelGroupID, "data", "unit") + base_unit_ref = "a.u." + CHECK_EQUAL_STR(unit, base_unit_ref) + else + FAIL() + endif +End + +static Function/DF TestSweepData(entry, device, sweep) + WAVE/T entry + string device + variable sweep + + variable ret, i, numEntries, headstage + string nwbSweeps, pxpSweeps, pxpSweepsClean, name, channelTypeStr, channelNumberStr, channelSuffix + + WAVE numericalValues = GetLBNumericalValues(device) + WAVE/T textualValues = GetLBTextualValues(device) + + ret = MIES_AB#AB_LoadSweepFromFile(entry[%DiscLocation], entry[%DataFolder], entry[%FileType], device, sweep) + CHECK_EQUAL_VAR(ret, 0) + + DFREF nwbSweepsDFR = GetAnalysisSweepDataPath(entry[%DataFolder], device, sweep) + + // sweep waves in the PXP + WAVE/Z sweepWave = GetSweepWave(device, sweep) + CHECK_WAVE(sweepWave, NORMAL_WAVE) + + WAVE/Z configWave = GetConfigWave(sweepWave) + CHECK_WAVE(sweepWave, NORMAL_WAVE) + + DFREF pxpSweepsDFR = NewFreeDataFolder() + SplitSweepIntoComponents(numericalValues, sweep, sweepWave, configWave, TTL_RESCALE_OFF, targetDFR=pxpSweepsDFR) + + nwbSweeps = SortList(GetListOfObjects(nwbSweepsDFR, ".*")) + pxpSweeps = SortList(GetListOfObjects(pxpSweepsDFR, ".*")) + + // remove IZero DA channels as we don't save these in NWB + pxpSweepsClean = "" + numEntries = ItemsInList(pxpSweeps) + for(i = 0; i < numEntries; i += 1) + name = StringFromList(i, pxpSweeps) + + SplitString/E="^([[:alpha:]]+)_([[:digit:]]+)(?:_.*)?$" name, channelTypeStr, channelNumberStr, channelSuffix + CHECK_EQUAL_VAR(V_Flag, 2) + + WAVE DAC = GetLastSetting(numericalValues, sweep, "DAC", DATA_ACQUISITION_MODE) + headstage = GetRowIndex(DAC, val=str2num(channelNumberStr)) + if(IsFinite(headstage)) + WAVE clampMode = GetLastSetting(numericalValues, sweep, "Clamp Mode", DATA_ACQUISITION_MODE) + + if(clampMode[headstage] == I_EQUAL_ZERO_MODE \ + && !cmpstr(channelTypeStr, StringFromList(ITC_XOP_CHANNEL_TYPE_DAC, ITC_CHANNEL_NAMES))) + continue + endif + endif + + pxpSweepsClean = AddListItem(name, pxpSweepsClean, ";", inf) + endfor + + CHECK_EQUAL_STR(nwbSweeps, pxpSweepsClean) + + numEntries = ItemsInList(nwbSweeps) + for(i = 0; i < numEntries; i += 1) + WAVE/Z/SDFR=nwbSweepsDFR nwbWave = $StringFromList(i, nwbSweeps) + CHECK_WAVE(nwbWave, NORMAL_WAVE) + WAVE/Z/SDFR=pxpSweepsDFR pxpWave = $StringFromList(i, pxpSweepsClean) + CHECK_WAVE(pxpWave, FREE_WAVE) + CHECK_EQUAL_WAVES(nwbWave, pxpWave, mode = WAVE_DATA | WAVE_DATA_TYPE | WAVE_SCALING | DATA_UNITS | DIMENSION_UNITS | DIMENSION_LABELS | DATA_FULL_SCALE | DIMENSION_SIZES) // all except WAVE_NOTE + endfor + + return pxpSweepsDFR +End + +static Function/S TestFileExport() + + string nwbFile, discLocation, baseFolder + + PathInfo home + REQUIRE(V_flag) + baseFolder = S_path + + nwbFile = UniqueFileOrFolder("home", GetExperimentName(), suffix = "-V2.nwb") + discLocation = baseFolder + nwbFile + + HDF5CloseFile/Z/A 0 + KillOrMoveToTrash(dfr = GetAnalysisFolder()) + + NWB_ExportAllData(NWB_VERSION, compressionMode = IPNWB#GetNoCompression(), writeStoredTestPulses = 1, overrideFilePath=discLocation) + CloseNWBFile() + + GetFileFolderInfo/P=home/Q/Z nwbFile + REQUIRE(V_IsFile) + + CHECK_EQUAL_VAR(MIES_AB#AB_AddFile(baseFolder, discLocation), 0) + + return discLocation +End + +static Function TestListOfGroups(groupList, wv) + string groupList + WAVE/T wv + + variable index + string list + + index = GetNumberFromWaveNote(wv, NOTE_INDEX) + CHECK(index >= 1) + + groupList = SortList(groupList) + + Duplicate/FREE/T/R=[0, index - 1] wv, wvFilled + wvFilled[] = RemoveEnding(wvFilled[p], ";") + list = SortList(TextWaveToList(wvFilled, ";")) + CHECK_EQUAL_STR(groupList, list) +End + +Function TestNwbExportV2() + string discLocation, device + string channel + variable fileID, numEntries, i, sweep, numGroups, j, groupID + + discLocation = TestFileExport() + + WAVE/T/Z entry = AB_GetMap(discLocation) + CHECK_WAVE(entry, FREE_WAVE) + + WAVE/T/Z devices = GetAnalysisDeviceWave(entry[%DataFolder]) + CHECK_WAVE(devices, NORMAL_WAVE) + CHECK_EQUAL_VAR(GetNumberFromWaveNote(devices, NOTE_INDEX), ItemsInList(Getalldevices())) + WARN_EQUAL_VAR(GetNumberFromWaveNote(devices, NOTE_INDEX), 1) + + device = devices[0] + + WAVE/Z sweeps = GetAnalysisChannelSweepWave(entry[%DataFolder], device) + CHECK_WAVE(sweeps, NUMERIC_WAVE) + CHECK(GetNumberFromWaveNote(sweeps, NOTE_INDEX) > 0) + + WAVE/Z/T acquisitions = GetAnalysisChannelAcqWave(entry[%DataFolder], device) + CHECK_WAVE(acquisitions, TEXT_WAVE) + + WAVE/Z/T stimuluses = GetAnalysisChannelStimWave(entry[%DataFolder], device) + CHECK_WAVE(stimuluses, TEXT_WAVE) + + fileID = IPNWB#H5_OpenFile(discLocation) + CHECK_EQUAL_VAR(IPNWB#GetNWBMajorVersion(IPNWB#ReadNWBVersion(fileID)), NWB_VERSION) + + // check stored specification versions + TestSpecVersions(fileID) + + // check history + TestHistory(fileID) + + // check LBNs + TestLabnotebooks(fileID, device) + + // check TPStorage + TestTpStorage(fileID, device) + + // check stored test pulses (if available) + TestStoredTestPulses(fileID, device) + + // check stimset parameter waves + TestStimsetParamWaves(fileID, device, sweeps) + + // check all acquisitions + TestListOfGroups(IPNWB#ReadAcquisition(fileID, NWB_VERSION), acquisitions) + + // check all stimulus + TestListOfGroups(IPNWB#ReadStimulus(fileID), stimuluses) + + // check sweep data + numEntries = DimSize(sweeps, ROWS) + for(i = 0; i < numEntries; i += 1) + sweep = sweeps[i] + + if(!IsValidSweepNumber(sweep)) + break + endif + + DFREF pxpSweepsDFR = TestSweepData(entry, device, sweep) + + // check acquisition TimeSeries of NWB + numGroups = ItemsInList(acquisitions[i]) + for(j = 0; j < numGroups; j += 1) + channel = StringFromList(j, acquisitions[i]) + groupID = IPNWB#OpenAcquisition(fileID, NWB_VERSION) + + // TimeSeries properties + TestTimeSeriesProperties(groupID, channel) + + TestTimeSeries(fileID, discLocation, device, groupID, channel, sweep, pxpSweepsDFR) + endfor + + // check presentation/stimulus TimeSeries of NWB + numGroups = ItemsInList(stimuluses[i]) + for(j = 0; j < numGroups; j += 1) + channel = StringFromList(j, stimuluses[i]) + groupID = IPNWB#OpenStimulus(fileID) + + // TimeSeries properties + TestTimeSeriesProperties(groupID, channel) + + TestTimeSeries(fileID, discLocation, device, groupID, channel, sweep, pxpSweepsDFR) + endfor + endfor + + HDF5CloseFile/Z fileID +End + +static Function/WAVE NWBVersionStrings() + variable i, numEntries + string name + + Make/T/FREE data = {"2.0b", "2.0.1", "2.1.0", "2.2.0"} + return data +End + +// UTF_TD_GENERATOR NWBVersionStrings +Function TestNWBVersionStrings([str]) + string str + + variable version0, version1, version2 + + IPNWB#AnalyzeNWBVersion(str, version0, version1, version2) + REQUIRE_NEQ_VAR(version0, NaN) + IPNWB#EnsureValidNWBVersion(version0) + + REQUIRE_NEQ_VAR(version1, NaN) +End + +static Function/WAVE NeuroDataRefTree() + variable i, numEntries + string name + + Make/T/FREE data = {"VoltageClampSeries:TimeSeries;PatchClampSeries;VoltageClampSeries;", \ + "CurrentClampSeries:TimeSeries;PatchClampSeries;CurrentClampSeries;", \ + "IZeroClampSeries:TimeSeries;PatchClampSeries;CurrentClampSeries;IZeroClampSeries;" \ + } + return data +End + +// UTF_TD_GENERATOR NeuroDataRefTree +Function TestNeuroDataRefTree([str]) + string str + + string neurodata_type, ancestry + + neurodata_type = StringFromList(0, str, ":") + ancestry = StringFromList(1, str, ":") + + str = IPNWB#DetermineDataTypeRefTree(neurodata_type) + REQUIRE_EQUAL_STR(ancestry, str) + + str = StringFromList(ItemsInList(ancestry) - 1, ancestry) + REQUIRE_EQUAL_STR(neurodata_type, str) +End diff --git a/Packages/doc/Doxyfile b/Packages/doc/Doxyfile index 58cd549a3e..e9896e2308 100644 --- a/Packages/doc/Doxyfile +++ b/Packages/doc/Doxyfile @@ -805,6 +805,7 @@ WARN_LOGFILE = # Note: If this tag is empty the current directory is searched. INPUT = . \ + ../Conversion \ ../MIES \ ../IPNWB \ .. \ diff --git a/Packages/doc/Makefile b/Packages/doc/Makefile new file mode 100644 index 0000000000..1e76990935 --- /dev/null +++ b/Packages/doc/Makefile @@ -0,0 +1,23 @@ +# Minimal makefile for Sphinx documentation +# + +# You can set these variables from the command line. +SPHINXOPTS = --keep-going -a -P +SPHINXBUILD = sphinx-build +SPHINXPROJ = MIES +SOURCEDIR = . +BUILDDIR = . + +# Put it first so that "make" without argument is like "make help". +help: + @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + +.PHONY: Makefile autobuild + +autobuild: + sphinx-autobuild -b html --ignore "*~" --ignore ".git" --ignore "*T0" --ignore "*.un~" --open-browser "$(SOURCEDIR)" "$(BUILDDIR)/html" $(O) + +# Catch-all target: route all unknown targets to Sphinx using the new +# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). +%: Makefile + @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) diff --git a/Packages/doc/conf.py b/Packages/doc/conf.py index 83d93b3d1d..190fd9768f 100644 --- a/Packages/doc/conf.py +++ b/Packages/doc/conf.py @@ -15,13 +15,12 @@ def get_version(): return "({branch}) {version}".format(branch=branchString.decode('ascii'), version=revString.decode('ascii')) - # sphinx config extensions = ['sphinx.ext.mathjax', 'sphinx.ext.todo', 'breathe', 'sphinxcontrib.fulltoc', 'sphinxcontrib.images'] master_doc = "index" project= "MIES Igor" -exclude_patterns = [ 'releasenotes_template.rst'] +exclude_patterns = [ 'releasenotes_template.rst', 'IPNWB/specifications'] cpp_id_attributes = [ 'threadsafe' ] diff --git a/Packages/doc/index.rst b/Packages/doc/index.rst index f1cc3daa98..373207277b 100644 --- a/Packages/doc/index.rst +++ b/Packages/doc/index.rst @@ -14,7 +14,7 @@ Table of Contents namespacelist CalculateTPLikePropsFromSweep SweepFormula - IPNWB + IPNWB/index TPAnalysis_algorithm asyncframework ZeroMQ-XOP-Readme diff --git a/tools/build-documentation.sh b/tools/build-documentation.sh index c3c67a2731..946c155ee6 100755 --- a/tools/build-documentation.sh +++ b/tools/build-documentation.sh @@ -51,7 +51,16 @@ else Failed fi -cp "$top_level/Packages/IPNWB/Readme.rst" "$top_level/Packages/doc/IPNWB.rst" +ln -s "${top_level}/Packages/IPNWB" "${top_level}/Packages/doc/" +rm -rf "${top_level}/Packages/doc/IPNWB/ndx-MIES" +trap "rm -rf ${top_level}/Packages/doc/IPNWB" EXIT + +# IPNWB script has dependencies. Disabling it for CI. +# +# cd "${top_level}/Packages/IPNWB" +# bash "${top_level}/Packages/IPNWB/update_doc.sh" +# cd "${top_level}/Packages/doc" # submodule rev-parse + cp "$top_level/Packages/ZeroMQ/Readme.rst" "$top_level/Packages/doc/ZeroMQ-XOP-Readme.rst" if hash breathe-apidoc 2>/dev/null; then diff --git a/tools/nwb-read-tests/Dockerfile b/tools/nwb-read-tests/Dockerfile new file mode 100644 index 0000000000..fdf047b94d --- /dev/null +++ b/tools/nwb-read-tests/Dockerfile @@ -0,0 +1,26 @@ +FROM debian:buster +MAINTAINER Thomas Braun thomas.braun@byte-physics.de + +RUN DEBIAN_FRONTEND=noninteractive \ + apt-get update && \ + apt-get install -y \ + python3 \ + git \ + python3-pip && \ + apt-get clean + +ARG PACKAGE_WITH_VERSION + +RUN DEBIAN_FRONTEND=noninteractive \ + pip3 install $PACKAGE_WITH_VERSION + +ARG USERID +ARG GROUPID + +# add normal user +RUN groupadd -g $USERID ci +RUN useradd -u $USERID -g $GROUPID -ms /bin/bash ci + +USER ci + +WORKDIR /home/ci diff --git a/tools/nwb-read-tests/nwbv2-read-test.py b/tools/nwb-read-tests/nwbv2-read-test.py new file mode 100755 index 0000000000..9a4bbf1dd8 --- /dev/null +++ b/tools/nwb-read-tests/nwbv2-read-test.py @@ -0,0 +1,81 @@ +#!/usr/bin/env python3 + +from pynwb import NWBHDF5IO +import h5py +import sys +import os +from subprocess import run, PIPE, STDOUT +from argparse import ArgumentParser + +vers = sys.version_info +if vers < (3, 7): + print("Unsupported python version: {}".format(vers), file=sys.stderr) + sys.exit(1) + +def checkFile(path): + + if not os.path.isfile(path): + print(f"The file {path} does not exist.", file=sys.stderr) + return 1 + + # 1.) Validation + comp = run(["python3", "-m", "pynwb.validate", "--cached-namespace", path], + stdout=PIPE, stderr=STDOUT, universal_newlines=True, timeout=20) + + if comp.returncode != 0: + print(f"Validation output: {comp.stdout}", file=sys.stderr) + return 1 + + print(f"Validation output: {comp.stdout}", file=sys.stdout) + + # 2.) Read test + with NWBHDF5IO(path, mode='r', load_namespaces=True) as io: + nwbfile = io.read() + + print(nwbfile) + print(nwbfile.ic_electrodes) + print(nwbfile.sweep_table) + print(nwbfile.lab_meta_data) + print(nwbfile.devices) + print(nwbfile.acquisition) + print(nwbfile.stimulus) + print(nwbfile.epochs) + + object_ids = nwbfile.objects.keys() + print(object_ids) + + # check that pynwb/hdmf can read our object IDs + with h5py.File(path, 'r') as f: + root_object_id_hdf5 = f["/"].attrs["object_id"] + + if root_object_id_hdf5 not in object_ids: + print(f"object IDs don't match", file=sys.stderr) + return 1 + + return 0 + + +def main(): + + parser = ArgumentParser(description="Validate and read an NWB file") + parser.add_argument("paths", type=str, nargs='+', help="NWB file paths") + args = parser.parse_args() + ret = 0 + + for path in args.paths: + ret = ret or checkFile(path) + + + if ret == 0: + print("Success!") + + return ret + + +if __name__ == '__main__': + + try: + sys.exit(main()) + except Exception as e: + print(e, file=sys.stderr) + sys.exit(1) diff --git a/tools/nwb-read-tests/run.sh b/tools/nwb-read-tests/run.sh new file mode 100755 index 0000000000..827bf8cba8 --- /dev/null +++ b/tools/nwb-read-tests/run.sh @@ -0,0 +1,28 @@ +#/bin/bash + +# checks for correct installation +if [ ! $(docker -v | grep -c -w version) -eq 1 ]; then + echo "docker not found." + exit 1 +fi +if [ ! $(groups | grep -c -w docker) -eq 1 ]; then + echo "add current user $(whoami) to docker group!" + exit 1 +fi + +top_level=$(git rev-parse --show-toplevel) + +list_of_files=$(find $top_level -iname "*-V2.nwb") + +tag="nwb-read-tests" + +# build containter +echo "Start building Docker container \"$tag\"" + +docker build --build-arg USERID=$(id -u) \ + --build-arg GROUPID=$(id -g) \ + --build-arg PACKAGE_WITH_VERSION=git+https://github.com/neurodatawithoutborders/pynwb@dev \ + -t $tag $top_level/tools/nwb-read-tests + +# use 'docker run -it ..' for interactive debugging + docker run --rm -v $HOME/bamboo-agent-home:/home/ci/bamboo-agent-home -v $top_level:/home/ci $tag python3 $top_level/tools/nwb-read-tests/nwbv2-read-test.py $list_of_files