mirror of
https://github.com/PAMGuard/PAMGuard.git
synced 2024-11-21 22:52:22 +00:00
Working out on exporter
This commit is contained in:
parent
53d78a638a
commit
aa11293162
18
readme.md
18
readme.md
@ -1,13 +1,14 @@
|
||||
# PAMGuard
|
||||
|
||||
## Introduction
|
||||
PAMGuard is a bioacoustics analysis program designed for use in real time research contexts and for the processing of large datasets. PAMGuard provides users access to a suite of state-of-the-art auotmated analysis algorithms alongside displays for visualisation data and a comprehensive data management systems.
|
||||
|
||||
# Why do we need PAMGuard?
|
||||
## Why do we need PAMGuard?
|
||||
PAMGuard fufills two main requirements within marine bioacoustics
|
||||
|
||||
1) **Real time operation** - Almost all PAMGuard features and modules work in real time - this allows scientists and industry to detect, classify and loclaise animals in real time on a standard consumer laptop, enabling mitigations and research survey without expensive bespoke software solutions and the transparncy of open source software.
|
||||
|
||||
2) **Processing and visuslisation of large datasets** -
|
||||
1) **Real time operation**: Almost all PAMGuard features and modules work in real time - this allows scientists and industry to detect, classify and loclaise animals in real time on a standard consumer laptop, enabling mitigations and research survey without expensive bespoke software solutions and the transparncy of open source software.
|
||||
|
||||
2) **Processing and visuslisation of large datasets**: Off-the-shelf autonomous PAM devices, large scale surveys involving drifters, towed arrays and bottom mounted devices and real time continuous monitoring system all generate huge volumes of data whcih requires automated analysis approaches. PAMGuard allows the processing of very large passive acoustic datasets using automated algorothms and provides visualisation tools for a manual analyst to check the results.
|
||||
|
||||
## Installation
|
||||
PAMGuard is available on Windows and can be downloaded from the [PAMGuard website](www.pamguard.org). Note that we are considering MacOS installers but they are not available at this time.
|
||||
@ -20,7 +21,6 @@ Upon opening PAMGuard for the first time you are greeted with a blank screen. Yo
|
||||
Make sure to add the database and binary file storage modules **_File->Add Modules->Utilities->..._**) to save data then press the run button (red button) and data will process. PAMGuard can handle huge datasets so runing might take hours or even days. Progress is shown on the bottom of the screen.
|
||||
|
||||
## Features
|
||||
|
||||
### Hardware integration
|
||||
PAMGuard connects with hardware such as various GPS and AIS systems and a multitude of different sound cards (e.g. [National Instruments](www.ni.com) devices, [SAIL DAQ cards](www.smruconsulting.com/contact-us), almost all ASIO sound cards and standard computer sound cards) for real time data collection and processing. PAMGuard also works with some very bespoke hardware such as [DIFAR Sonobuoys]();
|
||||
|
||||
@ -46,14 +46,16 @@ PAMGuard has a noise band (which supports third octave noise bands) and long ter
|
||||
Almsot all detection data can be visualised on a map. PAMGaurd also supports plotting GPS and AIS data.
|
||||
|
||||
### Suite of data visualisation tools
|
||||
An important aspect of PAMGuard is the ability for users to explore porcessed data. This is
|
||||
An important aspect of PAMGuard is the ability for users to explore porcessed data. PAMGuard allows users to visualise data at multiple different times scales, from inspecting individual waveforms microseconds long to datagrams showing detector output or soundscape metrics over days, weeks or even years.
|
||||
|
||||
### Advanced manual annotation
|
||||
The displays within PAMGuard support a variety of manual annottion tools. A simple spectrogram
|
||||
The displays within PAMGuard support a variety of manual annottion tools.
|
||||
|
||||
### Deep learning integration
|
||||
PAMGuard allows users to run their own deep learning models using the deep learning module. AI can therfore be integrated into PAMGuard workflows, allowing for much more efficient analysis of data.
|
||||
|
||||
### Meatadata standard and Tethys compatibility
|
||||
### Metadata standard and Tethys compatibility
|
||||
PAMGuard Integrates with Tethys
|
||||
|
||||
## Feature roadmap
|
||||
There's lots of features we would like to add to PAMGuard. If you want to add a feature you can either code it up yourself in Java and submit a pull request or get in touch with us to discuss how to it might be integrated. Some smaller features might be in our roadmap anyway but larger features usually require funding. Some features we are thinking about (but do not necassarily have time for yet) are;
|
||||
|
@ -4,14 +4,11 @@ import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Comparator;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
import java.util.NoSuchElementException;
|
||||
import java.util.zip.Deflater;
|
||||
|
||||
import PamUtils.PamArrayUtils;
|
||||
import PamUtils.PamCalendar;
|
||||
import PamguardMVC.PamDataUnit;
|
||||
import export.PamDataUnitExporter;
|
||||
import us.hebi.matlab.mat.format.Mat5;
|
||||
@ -21,7 +18,6 @@ import us.hebi.matlab.mat.types.Matrix;
|
||||
import us.hebi.matlab.mat.types.Sink;
|
||||
import us.hebi.matlab.mat.types.Sinks;
|
||||
import us.hebi.matlab.mat.types.Struct;
|
||||
import us.hebi.matlab.mat.util.Casts;
|
||||
|
||||
|
||||
/**
|
||||
@ -106,10 +102,11 @@ public class MLDetectionsManager implements PamDataUnitExporter {
|
||||
|
||||
matFile.writeTo(sink);
|
||||
|
||||
matFile.close();
|
||||
// matFile.close(); //CAUSES AN EXCEPTION IF WRITING AGAIN
|
||||
|
||||
}
|
||||
else {
|
||||
|
||||
//write to the mat file without loading all contents into memory.
|
||||
Mat5Writer writer = Mat5.newWriter(sink);
|
||||
|
||||
@ -257,7 +254,7 @@ public class MLDetectionsManager implements PamDataUnitExporter {
|
||||
|
||||
@Override
|
||||
public void close() {
|
||||
//handled in the mian funtion
|
||||
//handled in the main function
|
||||
if (sink!=null) {
|
||||
try {
|
||||
sink.close();
|
||||
|
@ -82,7 +82,7 @@ public class MLWhistleMoanExport extends MLDataUnitExport<ConnectedRegionDataUni
|
||||
|
||||
// Struct mlStructure= new MLStructure("sliceData", new int[]{dataUnit.getConnectedRegion().getSliceData().size(), 1});
|
||||
|
||||
Struct mlStructure= Mat5.newStruct();
|
||||
Struct mlStructure= Mat5.newStruct(dataUnit.getConnectedRegion().getSliceData().size(), 1);
|
||||
|
||||
//the start sample.
|
||||
Matrix sliceNumber;
|
||||
|
@ -73,9 +73,11 @@ public class PamExporterManager {
|
||||
public boolean exportDataUnit(PamDataUnit<?, ?> dataUnit, boolean force) {
|
||||
boolean exportOK = true;
|
||||
|
||||
System.out.println("Add data unit " + dataUnit + " to: "+ currentFile);
|
||||
|
||||
if (dataUnit==null) {
|
||||
if (force) {
|
||||
// System.out.println("Write data 1!!" + dataUnitBuffer.size() );
|
||||
System.out.println("Write data 1!!" + dataUnitBuffer.size() );
|
||||
//finish off saving any buffered data
|
||||
exportOK = pamExporters.get(exportParams.exportChoice).exportData(currentFile, dataUnitBuffer, true);
|
||||
dataUnitBuffer.clear();
|
||||
@ -87,7 +89,7 @@ public class PamExporterManager {
|
||||
if (currentFile == null || isNeedsNewFile(currentFile, pamExporters.get(exportParams.exportChoice))) {
|
||||
Date date = new Date(dataUnit.getTimeMilliseconds());
|
||||
|
||||
String newFileName = "PAM_" + dataFormat.format(date) + dataUnit.getParentDataBlock().getDataName().replace(" ", "_");
|
||||
String newFileName = "PAM_" + dataFormat.format(date) + "_" + dataUnit.getParentDataBlock().getDataName().replace(" ", "_");
|
||||
|
||||
//create a new file - note each exporter is responsible for closing the file after writing
|
||||
//so previous files should already be closed
|
||||
@ -99,7 +101,7 @@ public class PamExporterManager {
|
||||
|
||||
dataUnitBuffer.add(dataUnit);
|
||||
|
||||
// System.out.println("Write data unit " + dataUnitBuffer.size() + " to: "+ currentFile);
|
||||
System.out.println("Write data unit " + dataUnitBuffer.size() + " to: "+ currentFile);
|
||||
|
||||
if (dataUnitBuffer.size()>=BUFFER_SIZE || force) {
|
||||
// System.out.println("Write data 2!!" + dataUnitBuffer.size());
|
||||
|
@ -37,6 +37,9 @@ import export.PamExporterManager;
|
||||
import export.layoutFX.ExportParams;
|
||||
import offlineProcessing.OLProcessDialog;
|
||||
import offlineProcessing.OfflineTaskGroup;
|
||||
import offlineProcessing.TaskMonitor;
|
||||
import offlineProcessing.TaskMonitorData;
|
||||
import offlineProcessing.TaskStatus;
|
||||
|
||||
/**
|
||||
* Handles an offline dialog for processing offline data and exporting to bespoke file types.
|
||||
@ -353,19 +356,68 @@ public class ExportProcessDialog {
|
||||
|
||||
}
|
||||
|
||||
class ExportTaskMonitor implements TaskMonitor {
|
||||
|
||||
private int taskIndex;
|
||||
|
||||
private ExportTaskGroup exportTaskGroup;
|
||||
|
||||
private boolean started = false;
|
||||
|
||||
public ExportTaskMonitor(int i, ExportTaskGroup exportTaskGroup) {
|
||||
this.taskIndex = i;
|
||||
this.exportTaskGroup = exportTaskGroup;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public void setTaskStatus(TaskMonitorData taskMonitorData) {
|
||||
if (taskMonitorData.taskStatus== TaskStatus.COMPLETE && !started) {
|
||||
System.out.println(" TASK COMPLETE:");
|
||||
if (taskIndex<exportTaskGroup.getNTasks()) {
|
||||
exportTaskGroup.runTaskFrom(taskIndex+1);
|
||||
started = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Export taks group.
|
||||
*/
|
||||
class ExportTaskGroup extends OfflineTaskGroup {
|
||||
|
||||
|
||||
public ExportTaskGroup(String settingsName) {
|
||||
super(null, settingsName);
|
||||
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public String getUnitType() {
|
||||
return "Export Data";
|
||||
}
|
||||
|
||||
/**
|
||||
* Runs tasks from a specific task number.
|
||||
* @param i - the index
|
||||
*/
|
||||
public void runTaskFrom(int i) {
|
||||
System.out.println("RUN TASK FROM :" + i);
|
||||
|
||||
this.setPrimaryDataBlock(getTask(i).getDataBlock());
|
||||
if (i<getNTasks()-1) {
|
||||
//will start a new thread after this one has finished
|
||||
this.setTaskMonitor(new ExportTaskMonitor(i, this));
|
||||
}
|
||||
super.runTasks();
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Override the tasks o it runs through all tasks for each datablock. Usually
|
||||
@ -376,159 +428,10 @@ public class ExportProcessDialog {
|
||||
*/
|
||||
@Override
|
||||
public boolean runTasks() {
|
||||
boolean OK = true;
|
||||
for (int i=0; i<getNTasks(); i++) {
|
||||
this.setPrimaryDataBlock(getTask(i).getDataBlock());
|
||||
super.runTasks();
|
||||
}
|
||||
return OK;
|
||||
runTaskFrom(0) ;
|
||||
return true;
|
||||
}
|
||||
|
||||
//
|
||||
//
|
||||
// int nDatas = primaryDataBlock.getUnitsCount();
|
||||
// int nSay = Math.max(1, nDatas / 100);
|
||||
//// int nDone = 0;
|
||||
// int nTasks = getNTasks();
|
||||
// PamDataUnit dataUnit;
|
||||
// OfflineTask aTask;
|
||||
// boolean unitChanged;
|
||||
// DataUnitFileInformation fileInfo;
|
||||
// String dataName;
|
||||
// if (mapPoint != null) {
|
||||
// dataName = mapPoint.getName();
|
||||
// }
|
||||
// else {
|
||||
// dataName = "Loaded Data";
|
||||
// }
|
||||
// /**
|
||||
// * Make sure that any data from required data blocks is loaded. First check the
|
||||
// * start and end times of the primary data units we actually WANT to process
|
||||
// * Also get a count of found data - may be able to leave without having to do anything at all
|
||||
// */
|
||||
// ListIterator<PamDataUnit> it = primaryDataBlock.getListIterator(0);
|
||||
// long procDataStart = Long.MAX_VALUE;
|
||||
// long procDataEnd = 0;
|
||||
// int nToProcess = 0;
|
||||
// while (it.hasNext()) {
|
||||
// dataUnit = it.next();
|
||||
// /**
|
||||
// * Make sure we only process data units within the current time interval.
|
||||
// */
|
||||
// if (dataUnit.getTimeMilliseconds() < processStartTime) {
|
||||
// continue;
|
||||
// }
|
||||
// if (dataUnit.getTimeMilliseconds() > processEndTime) {
|
||||
// break;
|
||||
// }
|
||||
//// if (shouldProcess(dataUnit) == false) {
|
||||
//// continue;
|
||||
//// }
|
||||
// procDataStart = Math.min(procDataStart, dataUnit.getTimeMilliseconds());
|
||||
// procDataEnd = Math.max(procDataEnd, dataUnit.getEndTimeInMilliseconds());
|
||||
// // do this one too - just to make sure in case end time returns zero.
|
||||
// procDataEnd = Math.max(procDataEnd, dataUnit.getTimeMilliseconds());
|
||||
// nToProcess++; // increase toprocess counter
|
||||
// }
|
||||
// if (nToProcess == 0) {
|
||||
// return;
|
||||
// }
|
||||
// PamDataBlock aDataBlock;
|
||||
// RequiredDataBlockInfo blockInfo;
|
||||
// /*
|
||||
// * if the data interval is < 1 hour, then load it all now
|
||||
// * otherwise we'll do it on a data unit basis.
|
||||
// */
|
||||
////// long maxSecondaryLoad = 1800L*1000L;
|
||||
////// if (procDataEnd - procDataStart < maxSecondaryLoad) {
|
||||
//// loadSecondaryData(procDataStart, procDataEnd);
|
||||
////// }
|
||||
// // remember the end time of the data so we can use the "new data" selection flag.
|
||||
// taskGroupParams.lastDataTime = Math.min(primaryDataBlock.getCurrentViewDataEnd(),processEndTime);
|
||||
// // synchronized(primaryDataBlock) {
|
||||
// /*
|
||||
// * Call newDataLoaded for each task before getting on with processing individual data units.
|
||||
// */
|
||||
//
|
||||
// /**
|
||||
// * Now process the data
|
||||
// */
|
||||
// it = primaryDataBlock.getListIterator(0);
|
||||
// unitChanged = false;
|
||||
// int totalUnits = 0;
|
||||
// int unitsChanged = 0;
|
||||
// boolean doTasks = false;
|
||||
// while (it.hasNext()) {
|
||||
// dataUnit = it.next();
|
||||
// totalUnits++;
|
||||
// doTasks = true;
|
||||
// /**
|
||||
// * Make sure we only process data units within the current time interval.
|
||||
// */
|
||||
// if (dataUnit.getTimeMilliseconds() < processStartTime) {
|
||||
// continue;
|
||||
// }
|
||||
// if (dataUnit.getTimeMilliseconds() > processEndTime) {
|
||||
// break;
|
||||
// }
|
||||
//
|
||||
// if (shouldProcess(dataUnit) == false) {
|
||||
// doTasks = false;
|
||||
// }
|
||||
//
|
||||
// if (doTasks) {
|
||||
// /*
|
||||
// * load the secondary datablock data. this can be called even if
|
||||
// * it was called earlier on since it wont' reload if data are already
|
||||
// * in memory.
|
||||
// */
|
||||
//// loadSecondaryData(dataUnit.getTimeMilliseconds(), dataUnit.getEndTimeInMilliseconds());
|
||||
//
|
||||
// for (int iTask = 0; iTask < nTasks; iTask++) {
|
||||
// aTask = getTask(iTask);
|
||||
// if (aTask.isDoRun() == false || !isInTimeChunk(dataUnit, taskGroupParams.timeChunks)) {
|
||||
// continue;
|
||||
// }
|
||||
// cpuMonitor.start();
|
||||
// unitChanged |= aTask.processDataUnit(dataUnit);
|
||||
// cpuMonitor.stop();
|
||||
// }
|
||||
// if (unitChanged) {
|
||||
// fileInfo = dataUnit.getDataUnitFileInformation();
|
||||
// if (fileInfo != null) {
|
||||
// fileInfo.setNeedsUpdate(true);
|
||||
// }
|
||||
// dataUnit.updateDataUnit(System.currentTimeMillis());
|
||||
// }
|
||||
// dataUnit.freeData();
|
||||
// }
|
||||
// if (instantKill) {
|
||||
// break;
|
||||
// }
|
||||
// unitsChanged++;
|
||||
// if (totalUnits%nSay == 0) {
|
||||
// publish(new TaskMonitorData(TaskStatus.RUNNING, TaskActivity.PROCESSING, nToProcess, totalUnits, dataName,
|
||||
// dataUnit.getTimeMilliseconds()));
|
||||
// }
|
||||
// }
|
||||
// for (int iTask = 0; iTask < nTasks; iTask++) {
|
||||
// aTask = getTask(iTask);
|
||||
// if (aTask.isDoRun() == false) {
|
||||
// continue;
|
||||
// }
|
||||
// aTask.loadedDataComplete();
|
||||
// }
|
||||
// // }
|
||||
// publish(new TaskMonitorData(TaskStatus.RUNNING, TaskActivity.SAVING, nToProcess, totalUnits, dataName,
|
||||
// processEndTime));
|
||||
// for (int i = 0; i < affectedDataBlocks.size(); i++) {
|
||||
// //System.out.println("SAVE VIEWER DATA FOR: " + affectedDataBlocks.get(i) );
|
||||
// aDataBlock = affectedDataBlocks.get(i);
|
||||
// aDataBlock.saveViewerData();
|
||||
// }
|
||||
// Debug.out.printf("Processd %d out of %d data units at " + mapPoint + "\n", unitsChanged, totalUnits);
|
||||
// commitDatabase();
|
||||
// }
|
||||
}
|
||||
|
||||
|
||||
|
@ -4,6 +4,7 @@ import PamController.PamController;
|
||||
import PamguardMVC.PamDataBlock;
|
||||
import PamguardMVC.PamDataUnit;
|
||||
import PamguardMVC.dataSelector.DataSelectDialog;
|
||||
import PamguardMVC.dataSelector.DataSelectParams;
|
||||
import PamguardMVC.dataSelector.DataSelector;
|
||||
import dataMap.OfflineDataMapPoint;
|
||||
import export.PamExporterManager;
|
||||
@ -45,12 +46,17 @@ public class ExportTask extends OfflineTask<PamDataUnit<?,?>>{
|
||||
@Override
|
||||
public boolean processDataUnit(PamDataUnit<?, ?> dataUnit) {
|
||||
|
||||
System.out.println("Huh? " + this.getDataBlock().getDataName() + " " + dataUnit + " " + dataUnit.getParentDataBlock().equals(this.getDataBlock()));
|
||||
// System.out.println("Huh? " + this.getDataBlock().getDataName() + " " + dataUnit + " " + dataUnit.getParentDataBlock().equals(this.getDataBlock()));
|
||||
|
||||
if (dataUnit.getParentDataBlock().equals(this.getDataBlock())) {
|
||||
//this is very important because the way the expoeter works is that it iterates through multiple parent data blocks
|
||||
System.out.println(this.getDataBlock().getDataName() + " " + dataUnit);
|
||||
if (dataSelector==null) exporter.exportDataUnit(dataUnit, false);
|
||||
else if (dataSelector.scoreData(dataUnit)>0) {
|
||||
//this is very important because the way the exporter works is that it iterates through multiple parent data blocks
|
||||
System.out.println(this.getDataBlock().getDataName() + " " + dataUnit + " " + dataSelector.scoreData(dataUnit));
|
||||
if (dataSelector==null) {
|
||||
// System.out.println("Data selector null: " + this.getDataBlock().getDataName() + " " + dataUnit);
|
||||
exporter.exportDataUnit(dataUnit, false);
|
||||
}
|
||||
else if (dataSelector.scoreData(dataUnit)>=0 || dataSelector.getParams().getCombinationFlag() == DataSelectParams.DATA_SELECT_DISABLE) {
|
||||
// System.out.println("Data selector OK: " + this.getDataBlock().getDataName() + " " + dataUnit);
|
||||
exporter.exportDataUnit(dataUnit, false);
|
||||
}
|
||||
}
|
||||
@ -66,7 +72,7 @@ public class ExportTask extends OfflineTask<PamDataUnit<?,?>>{
|
||||
|
||||
@Override
|
||||
public void loadedDataComplete() {
|
||||
// System.out.println("EXPORTER: loaded data complete");
|
||||
System.out.println("EXPORTER: loaded data complete");
|
||||
//force the exporter so save any renaming data units in the buffer
|
||||
exporter.exportDataUnit(null, true);
|
||||
exporter.close();
|
||||
|
Loading…
Reference in New Issue
Block a user