From aa11293162ebebbc13ac431e6014e276445fcbfa Mon Sep 17 00:00:00 2001 From: Jamie Mac Date: Thu, 13 Jun 2024 16:55:47 +0100 Subject: [PATCH] Working out on exporter --- readme.md | 22 +- src/export/MLExport/MLDetectionsManager.java | 9 +- src/export/MLExport/MLWhistleMoanExport.java | 2 +- src/export/PamExporterManager.java | 8 +- src/export/swing/ExportProcessDialog.java | 219 ++++++------------- src/export/swing/ExportTask.java | 18 +- 6 files changed, 94 insertions(+), 184 deletions(-) diff --git a/readme.md b/readme.md index 721b10db..10d133e1 100644 --- a/readme.md +++ b/readme.md @@ -1,14 +1,15 @@ # PAMGuard + +## Introduction PAMGuard is a bioacoustics analysis program designed for use in real time research contexts and for the processing of large datasets. PAMGuard provides users access to a suite of state-of-the-art auotmated analysis algorithms alongside displays for visualisation data and a comprehensive data management systems. -# Why do we need PAMGuard? +## Why do we need PAMGuard? PAMGuard fufills two main requirements within marine bioacoustics -1) **Real time operation** - Almost all PAMGuard features and modules work in real time - this allows scientists and industry to detect, classify and loclaise animals in real time on a standard consumer laptop, enabling mitigations and research survey without expensive bespoke software solutions and the transparncy of open source software. +1) **Real time operation**: Almost all PAMGuard features and modules work in real time - this allows scientists and industry to detect, classify and loclaise animals in real time on a standard consumer laptop, enabling mitigations and research survey without expensive bespoke software solutions and the transparncy of open source software. -2) **Processing and visuslisation of large datasets** - +2) **Processing and visuslisation of large datasets**: Off-the-shelf autonomous PAM devices, large scale surveys involving drifters, towed arrays and bottom mounted devices and real time continuous monitoring system all generate huge volumes of data whcih requires automated analysis approaches. PAMGuard allows the processing of very large passive acoustic datasets using automated algorothms and provides visualisation tools for a manual analyst to check the results. - ## Installation PAMGuard is available on Windows and can be downloaded from the [PAMGuard website](www.pamguard.org). Note that we are considering MacOS installers but they are not available at this time. @@ -20,7 +21,6 @@ Upon opening PAMGuard for the first time you are greeted with a blank screen. Yo Make sure to add the database and binary file storage modules **_File->Add Modules->Utilities->..._**) to save data then press the run button (red button) and data will process. PAMGuard can handle huge datasets so runing might take hours or even days. Progress is shown on the bottom of the screen. ## Features - ### Hardware integration PAMGuard connects with hardware such as various GPS and AIS systems and a multitude of different sound cards (e.g. [National Instruments](www.ni.com) devices, [SAIL DAQ cards](www.smruconsulting.com/contact-us), almost all ASIO sound cards and standard computer sound cards) for real time data collection and processing. PAMGuard also works with some very bespoke hardware such as [DIFAR Sonobuoys](); @@ -36,24 +36,26 @@ PAMGuard is designed to collect/process data from large acosutic datasets. PAMGu ### Access to detection and classification algorithms PAMGuard allows users to inegrate automated detection and classification algorithms directly into their acosutic workflow. There are a multitude of differwent algorothms to choose from, including a basic click detector, whislte and moan detector, GPL detector, click train detectors and many others. The idea behind PAMGuard is allow researchers to access open source state-of-the-art algorithms devleoped within the scientific community - if you want to contribute and get your algorithm into PAMGuard get in touch. -###Localisation +### Localisation PAMGuard has a mutltude of different options for acoustic loclaisation. There's a comprehesnive beam forming module for beam forming arrays, a large aperture localiser for 3D loclaisation and target motion analysis for towed hydrophone arrays. -###Soundscape analysis +### Soundscape analysis PAMGuard has a noise band (which supports third octave noise bands) and long term spectral average module for soundscape analysis. ### GIS Almsot all detection data can be visualised on a map. PAMGaurd also supports plotting GPS and AIS data. ### Suite of data visualisation tools -An important aspect of PAMGuard is the ability for users to explore porcessed data. This is +An important aspect of PAMGuard is the ability for users to explore porcessed data. PAMGuard allows users to visualise data at multiple different times scales, from inspecting individual waveforms microseconds long to datagrams showing detector output or soundscape metrics over days, weeks or even years. ### Advanced manual annotation -The displays within PAMGuard support a variety of manual annottion tools. A simple spectrogram +The displays within PAMGuard support a variety of manual annottion tools. ### Deep learning integration +PAMGuard allows users to run their own deep learning models using the deep learning module. AI can therfore be integrated into PAMGuard workflows, allowing for much more efficient analysis of data. -### Meatadata standard and Tethys compatibility +### Metadata standard and Tethys compatibility +PAMGuard Integrates with Tethys ## Feature roadmap There's lots of features we would like to add to PAMGuard. If you want to add a feature you can either code it up yourself in Java and submit a pull request or get in touch with us to discuss how to it might be integrated. Some smaller features might be in our roadmap anyway but larger features usually require funding. Some features we are thinking about (but do not necassarily have time for yet) are; diff --git a/src/export/MLExport/MLDetectionsManager.java b/src/export/MLExport/MLDetectionsManager.java index 3cdb1a9e..eb6a1fbc 100644 --- a/src/export/MLExport/MLDetectionsManager.java +++ b/src/export/MLExport/MLDetectionsManager.java @@ -4,14 +4,11 @@ import java.io.File; import java.io.IOException; import java.text.SimpleDateFormat; import java.util.ArrayList; -import java.util.Comparator; import java.util.Date; import java.util.List; -import java.util.NoSuchElementException; import java.util.zip.Deflater; import PamUtils.PamArrayUtils; -import PamUtils.PamCalendar; import PamguardMVC.PamDataUnit; import export.PamDataUnitExporter; import us.hebi.matlab.mat.format.Mat5; @@ -21,7 +18,6 @@ import us.hebi.matlab.mat.types.Matrix; import us.hebi.matlab.mat.types.Sink; import us.hebi.matlab.mat.types.Sinks; import us.hebi.matlab.mat.types.Struct; -import us.hebi.matlab.mat.util.Casts; /** @@ -106,10 +102,11 @@ public class MLDetectionsManager implements PamDataUnitExporter { matFile.writeTo(sink); - matFile.close(); +// matFile.close(); //CAUSES AN EXCEPTION IF WRITING AGAIN } else { + //write to the mat file without loading all contents into memory. Mat5Writer writer = Mat5.newWriter(sink); @@ -257,7 +254,7 @@ public class MLDetectionsManager implements PamDataUnitExporter { @Override public void close() { - //handled in the mian funtion + //handled in the main function if (sink!=null) { try { sink.close(); diff --git a/src/export/MLExport/MLWhistleMoanExport.java b/src/export/MLExport/MLWhistleMoanExport.java index 124628ca..cdaba823 100644 --- a/src/export/MLExport/MLWhistleMoanExport.java +++ b/src/export/MLExport/MLWhistleMoanExport.java @@ -82,7 +82,7 @@ public class MLWhistleMoanExport extends MLDataUnitExport dataUnit, boolean force) { boolean exportOK = true; + System.out.println("Add data unit " + dataUnit + " to: "+ currentFile); + if (dataUnit==null) { if (force) { -// System.out.println("Write data 1!!" + dataUnitBuffer.size() ); + System.out.println("Write data 1!!" + dataUnitBuffer.size() ); //finish off saving any buffered data exportOK = pamExporters.get(exportParams.exportChoice).exportData(currentFile, dataUnitBuffer, true); dataUnitBuffer.clear(); @@ -87,7 +89,7 @@ public class PamExporterManager { if (currentFile == null || isNeedsNewFile(currentFile, pamExporters.get(exportParams.exportChoice))) { Date date = new Date(dataUnit.getTimeMilliseconds()); - String newFileName = "PAM_" + dataFormat.format(date) + dataUnit.getParentDataBlock().getDataName().replace(" ", "_"); + String newFileName = "PAM_" + dataFormat.format(date) + "_" + dataUnit.getParentDataBlock().getDataName().replace(" ", "_"); //create a new file - note each exporter is responsible for closing the file after writing //so previous files should already be closed @@ -99,7 +101,7 @@ public class PamExporterManager { dataUnitBuffer.add(dataUnit); -// System.out.println("Write data unit " + dataUnitBuffer.size() + " to: "+ currentFile); + System.out.println("Write data unit " + dataUnitBuffer.size() + " to: "+ currentFile); if (dataUnitBuffer.size()>=BUFFER_SIZE || force) { // System.out.println("Write data 2!!" + dataUnitBuffer.size()); diff --git a/src/export/swing/ExportProcessDialog.java b/src/export/swing/ExportProcessDialog.java index 08dff701..7008b7f8 100644 --- a/src/export/swing/ExportProcessDialog.java +++ b/src/export/swing/ExportProcessDialog.java @@ -37,6 +37,9 @@ import export.PamExporterManager; import export.layoutFX.ExportParams; import offlineProcessing.OLProcessDialog; import offlineProcessing.OfflineTaskGroup; +import offlineProcessing.TaskMonitor; +import offlineProcessing.TaskMonitorData; +import offlineProcessing.TaskStatus; /** * Handles an offline dialog for processing offline data and exporting to bespoke file types. @@ -352,20 +355,69 @@ public class ExportProcessDialog { } + + class ExportTaskMonitor implements TaskMonitor { + + private int taskIndex; + + private ExportTaskGroup exportTaskGroup; + + private boolean started = false; - - class ExportTaskGroup extends OfflineTaskGroup { - - public ExportTaskGroup(String settingsName) { - super(null, settingsName); - + public ExportTaskMonitor(int i, ExportTaskGroup exportTaskGroup) { + this.taskIndex = i; + this.exportTaskGroup = exportTaskGroup; } + + @Override + public void setTaskStatus(TaskMonitorData taskMonitorData) { + if (taskMonitorData.taskStatus== TaskStatus.COMPLETE && !started) { + System.out.println(" TASK COMPLETE:"); + if (taskIndex it = primaryDataBlock.getListIterator(0); -// long procDataStart = Long.MAX_VALUE; -// long procDataEnd = 0; -// int nToProcess = 0; -// while (it.hasNext()) { -// dataUnit = it.next(); -// /** -// * Make sure we only process data units within the current time interval. -// */ -// if (dataUnit.getTimeMilliseconds() < processStartTime) { -// continue; -// } -// if (dataUnit.getTimeMilliseconds() > processEndTime) { -// break; -// } -//// if (shouldProcess(dataUnit) == false) { -//// continue; -//// } -// procDataStart = Math.min(procDataStart, dataUnit.getTimeMilliseconds()); -// procDataEnd = Math.max(procDataEnd, dataUnit.getEndTimeInMilliseconds()); -// // do this one too - just to make sure in case end time returns zero. -// procDataEnd = Math.max(procDataEnd, dataUnit.getTimeMilliseconds()); -// nToProcess++; // increase toprocess counter -// } -// if (nToProcess == 0) { -// return; -// } -// PamDataBlock aDataBlock; -// RequiredDataBlockInfo blockInfo; -// /* -// * if the data interval is < 1 hour, then load it all now -// * otherwise we'll do it on a data unit basis. -// */ -////// long maxSecondaryLoad = 1800L*1000L; -////// if (procDataEnd - procDataStart < maxSecondaryLoad) { -//// loadSecondaryData(procDataStart, procDataEnd); -////// } -// // remember the end time of the data so we can use the "new data" selection flag. -// taskGroupParams.lastDataTime = Math.min(primaryDataBlock.getCurrentViewDataEnd(),processEndTime); -// // synchronized(primaryDataBlock) { -// /* -// * Call newDataLoaded for each task before getting on with processing individual data units. -// */ -// -// /** -// * Now process the data -// */ -// it = primaryDataBlock.getListIterator(0); -// unitChanged = false; -// int totalUnits = 0; -// int unitsChanged = 0; -// boolean doTasks = false; -// while (it.hasNext()) { -// dataUnit = it.next(); -// totalUnits++; -// doTasks = true; -// /** -// * Make sure we only process data units within the current time interval. -// */ -// if (dataUnit.getTimeMilliseconds() < processStartTime) { -// continue; -// } -// if (dataUnit.getTimeMilliseconds() > processEndTime) { -// break; -// } -// -// if (shouldProcess(dataUnit) == false) { -// doTasks = false; -// } -// -// if (doTasks) { -// /* -// * load the secondary datablock data. this can be called even if -// * it was called earlier on since it wont' reload if data are already -// * in memory. -// */ -//// loadSecondaryData(dataUnit.getTimeMilliseconds(), dataUnit.getEndTimeInMilliseconds()); -// -// for (int iTask = 0; iTask < nTasks; iTask++) { -// aTask = getTask(iTask); -// if (aTask.isDoRun() == false || !isInTimeChunk(dataUnit, taskGroupParams.timeChunks)) { -// continue; -// } -// cpuMonitor.start(); -// unitChanged |= aTask.processDataUnit(dataUnit); -// cpuMonitor.stop(); -// } -// if (unitChanged) { -// fileInfo = dataUnit.getDataUnitFileInformation(); -// if (fileInfo != null) { -// fileInfo.setNeedsUpdate(true); -// } -// dataUnit.updateDataUnit(System.currentTimeMillis()); -// } -// dataUnit.freeData(); -// } -// if (instantKill) { -// break; -// } -// unitsChanged++; -// if (totalUnits%nSay == 0) { -// publish(new TaskMonitorData(TaskStatus.RUNNING, TaskActivity.PROCESSING, nToProcess, totalUnits, dataName, -// dataUnit.getTimeMilliseconds())); -// } -// } -// for (int iTask = 0; iTask < nTasks; iTask++) { -// aTask = getTask(iTask); -// if (aTask.isDoRun() == false) { -// continue; -// } -// aTask.loadedDataComplete(); -// } -// // } -// publish(new TaskMonitorData(TaskStatus.RUNNING, TaskActivity.SAVING, nToProcess, totalUnits, dataName, -// processEndTime)); -// for (int i = 0; i < affectedDataBlocks.size(); i++) { -// //System.out.println("SAVE VIEWER DATA FOR: " + affectedDataBlocks.get(i) ); -// aDataBlock = affectedDataBlocks.get(i); -// aDataBlock.saveViewerData(); -// } -// Debug.out.printf("Processd %d out of %d data units at " + mapPoint + "\n", unitsChanged, totalUnits); -// commitDatabase(); -// } + } diff --git a/src/export/swing/ExportTask.java b/src/export/swing/ExportTask.java index 1342e729..61858455 100644 --- a/src/export/swing/ExportTask.java +++ b/src/export/swing/ExportTask.java @@ -4,6 +4,7 @@ import PamController.PamController; import PamguardMVC.PamDataBlock; import PamguardMVC.PamDataUnit; import PamguardMVC.dataSelector.DataSelectDialog; +import PamguardMVC.dataSelector.DataSelectParams; import PamguardMVC.dataSelector.DataSelector; import dataMap.OfflineDataMapPoint; import export.PamExporterManager; @@ -45,12 +46,17 @@ public class ExportTask extends OfflineTask>{ @Override public boolean processDataUnit(PamDataUnit dataUnit) { - System.out.println("Huh? " + this.getDataBlock().getDataName() + " " + dataUnit + " " + dataUnit.getParentDataBlock().equals(this.getDataBlock())); +// System.out.println("Huh? " + this.getDataBlock().getDataName() + " " + dataUnit + " " + dataUnit.getParentDataBlock().equals(this.getDataBlock())); + if (dataUnit.getParentDataBlock().equals(this.getDataBlock())) { - //this is very important because the way the expoeter works is that it iterates through multiple parent data blocks - System.out.println(this.getDataBlock().getDataName() + " " + dataUnit); - if (dataSelector==null) exporter.exportDataUnit(dataUnit, false); - else if (dataSelector.scoreData(dataUnit)>0) { + //this is very important because the way the exporter works is that it iterates through multiple parent data blocks + System.out.println(this.getDataBlock().getDataName() + " " + dataUnit + " " + dataSelector.scoreData(dataUnit)); + if (dataSelector==null) { +// System.out.println("Data selector null: " + this.getDataBlock().getDataName() + " " + dataUnit); + exporter.exportDataUnit(dataUnit, false); + } + else if (dataSelector.scoreData(dataUnit)>=0 || dataSelector.getParams().getCombinationFlag() == DataSelectParams.DATA_SELECT_DISABLE) { +// System.out.println("Data selector OK: " + this.getDataBlock().getDataName() + " " + dataUnit); exporter.exportDataUnit(dataUnit, false); } } @@ -66,7 +72,7 @@ public class ExportTask extends OfflineTask>{ @Override public void loadedDataComplete() { - // System.out.println("EXPORTER: loaded data complete"); + System.out.println("EXPORTER: loaded data complete"); //force the exporter so save any renaming data units in the buffer exporter.exportDataUnit(null, true); exporter.close();