Working on the exporter

This commit is contained in:
Jamie Mac 2024-06-13 08:56:25 +01:00
parent f7be084e91
commit 53d78a638a
11 changed files with 271 additions and 47 deletions

View File

@ -44,7 +44,7 @@ public class RExportOverlayMenu extends ExportOverlayMenu {
* Create the export overlay
*/
public RExportOverlayMenu(){
rExportManger= new RExportManager();
rExportManger= new RExportManager(null);
buttonNode = createButton();

View File

@ -6,6 +6,10 @@ import java.util.List;
import PamguardMVC.PamDataUnit;
import export.PamDataUnitExporter;
/**
* Export to CSV files which are RAVEN compatible.
*/
public class CSVExportManager implements PamDataUnitExporter{
@Override
@ -41,4 +45,9 @@ public class CSVExportManager implements PamDataUnitExporter{
}
@Override
public boolean isNeedsNewFile() {
return false;
}
}

View File

@ -269,6 +269,12 @@ public class MLDetectionsManager implements PamDataUnitExporter {
}
@Override
public boolean isNeedsNewFile() {
// TODO Auto-generated method stub
return false;
}

View File

@ -41,15 +41,21 @@ public interface PamDataUnitExporter {
public String getIconString();
/**
* Get the name of the exporter
* @return
* Get the name of the exporter.
* @return the name of the exporter.
*/
public String getName();
/**
* Clsoe the exporter
* Close the exporter.
*/
public void close();
/**
* Check whether and exporter needs a new file
* @return true if we need a new file.
*/
public boolean isNeedsNewFile();
}

View File

@ -56,11 +56,12 @@ public class PamExporterManager {
"yyyy_MM_dd_HHmmss");
public PamExporterManager() {
pamExporters = new ArrayList<PamDataUnitExporter>();
//add the MATLAB export
pamExporters.add(new MLDetectionsManager());
pamExporters.add(new RExportManager());
pamExporters.add(new RExportManager(this));
pamExporters.add(new WavFileExportManager());
pamExporters.add(new CSVExportManager());
}
@ -74,7 +75,7 @@ public class PamExporterManager {
if (dataUnit==null) {
if (force) {
System.out.println("Write data 1!!" + dataUnitBuffer.size() );
// System.out.println("Write data 1!!" + dataUnitBuffer.size() );
//finish off saving any buffered data
exportOK = pamExporters.get(exportParams.exportChoice).exportData(currentFile, dataUnitBuffer, true);
dataUnitBuffer.clear();
@ -83,10 +84,10 @@ public class PamExporterManager {
}
//if file is null or too large create another a file for saving.
if (currentFile == null || isFileSizeMax(currentFile)) {
if (currentFile == null || isNeedsNewFile(currentFile, pamExporters.get(exportParams.exportChoice))) {
Date date = new Date(dataUnit.getTimeMilliseconds());
String newFileName = "PAM_" + dataFormat.format(date);
String newFileName = "PAM_" + dataFormat.format(date) + dataUnit.getParentDataBlock().getDataName().replace(" ", "_");
//create a new file - note each exporter is responsible for closing the file after writing
//so previous files should already be closed
@ -98,10 +99,10 @@ public class PamExporterManager {
dataUnitBuffer.add(dataUnit);
System.out.println("Write data unit " + dataUnitBuffer.size() + " to: "+ currentFile);
// System.out.println("Write data unit " + dataUnitBuffer.size() + " to: "+ currentFile);
if (dataUnitBuffer.size()>=BUFFER_SIZE || force) {
System.out.println("Write data 2!!" + dataUnitBuffer.size());
// System.out.println("Write data 2!!" + dataUnitBuffer.size());
exportOK = pamExporters.get(exportParams.exportChoice).exportData(currentFile, dataUnitBuffer, true);
dataUnitBuffer.clear();
}
@ -118,10 +119,14 @@ public class PamExporterManager {
/**
* Check whether the current file is greater than the maximum allowed file size.
* @param currentFile2 - the current file
* @param pamDataUnitExporter
* @return true of greater than or equal to the maximum file size.
*/
private boolean isFileSizeMax(File currentFile2) {
return getFileSizeMegaBytes(currentFile2) >= MAX_FILE_SIZE_MB;
private boolean isNeedsNewFile(File currentFile2, PamDataUnitExporter pamDataUnitExporter) {
if( getFileSizeMegaBytes(currentFile2) >= exportParams.maximumFileSize) {
return true;
};
return pamDataUnitExporter.isNeedsNewFile();
}
/**

View File

@ -18,6 +18,7 @@ import org.renjin.sexp.PairList.Builder;
import PamUtils.PamArrayUtils;
import PamguardMVC.PamDataUnit;
import export.PamDataUnitExporter;
import export.PamExporterManager;
import export.MLExport.MLDetectionsManager;
/**
@ -29,7 +30,6 @@ import export.MLExport.MLDetectionsManager;
public class RExportManager implements PamDataUnitExporter {
/**
*
* All the possible RDataUnit export classes.
*/
ArrayList<RDataUnitExport> rDataExport = new ArrayList<RDataUnitExport>();
@ -37,10 +37,13 @@ public class RExportManager implements PamDataUnitExporter {
private File currentFileName ;
private Builder allData;
private Builder allData;
private PamExporterManager pamExporterManager;
public RExportManager(){
public RExportManager(PamExporterManager pamExporterManager){
this.pamExporterManager=pamExporterManager;
/***Add more options here to export data units****/
rDataExport.add(new RClickExport());
rDataExport.add(new RWhistleExport());
@ -55,17 +58,14 @@ public class RExportManager implements PamDataUnitExporter {
* Note - there is no way to save data units to R files wothout loading the file into memory.
* So everything is stored in memory until saved.
*/
// then
PamDataUnit minByTime = PamArrayUtils.getMinTimeMillis(dataUnits);
//matlab struct must start with a letter.
//MATLAB struct must start with a letter.
Date date = new Date(minByTime.getTimeMilliseconds());
String entryName = "det_" + MLDetectionsManager.dataFormat.format( date);
// System.out.println("Save R data! "+ dataUnits.size());
// System.out.println("Export R file!!" + dataUnits.size());
//is there an existing writer? Is that writer writing to the correct file?
if (allData==null || !fileName.equals(currentFileName)) {
@ -259,5 +259,19 @@ public class RExportManager implements PamDataUnitExporter {
}
@Override
public boolean isNeedsNewFile() {
//Rdata can't be appended to a file so we cannot check file sizes.
// pamExporterManager.getExportParams().maximumFileSize;
//TODO
//check file size against the export params.
System.out.println("RData length: " + allData.length());
return false;
}
}

View File

@ -9,7 +9,6 @@ import java.awt.Window;
import java.awt.event.ActionListener;
import java.io.File;
import java.util.ArrayList;
import javax.swing.BoxLayout;
import javax.swing.ButtonGroup;
import javax.swing.JFileChooser;
@ -38,7 +37,6 @@ import export.PamExporterManager;
import export.layoutFX.ExportParams;
import offlineProcessing.OLProcessDialog;
import offlineProcessing.OfflineTaskGroup;
import offlineProcessing.TaskStatus;
/**
* Handles an offline dialog for processing offline data and exporting to bespoke file types.
@ -87,7 +85,7 @@ public class ExportProcessDialog {
dlOfflineGroup.addTask(new ExportTask(dataBlocks.get(i), exportManager));
}
}
}
////---Swing stuff----/// should not be here but this is how PG works.
@ -129,6 +127,9 @@ public class ExportProcessDialog {
*/
private JFileChooser fc;
/**
* S hows the folder stuff is going to export to.
*/
private JTextField exportTo;
/**
@ -268,7 +269,7 @@ public class ExportProcessDialog {
Ikon icon = null;
/**
* This is nasty but we won't have many exporters and this is the only
* This is NASTY but we won't have many exporters and this is the only
* good way to get this to work in Swing.
*/
switch (iconString) {
@ -353,11 +354,10 @@ public class ExportProcessDialog {
}
class ExportTaskGroup extends OfflineTaskGroup{
class ExportTaskGroup extends OfflineTaskGroup {
public ExportTaskGroup(String settingsName) {
super(null, settingsName);
// TODO Auto-generated constructor stub
}
@ -365,6 +365,170 @@ public class ExportProcessDialog {
public String getUnitType() {
return "Export Data";
}
/**
* Override the tasks o it runs through all tasks for each datablock. Usually
* task groups deal with just one parent datablock but exporters export from
* different data blocks. The only way to deal with this is to let the task run
* again and again through all tasks and letting tasks themselves check the
* correct data units are being exported.
*/
@Override
public boolean runTasks() {
boolean OK = true;
for (int i=0; i<getNTasks(); i++) {
this.setPrimaryDataBlock(getTask(i).getDataBlock());
super.runTasks();
}
return OK;
}
//
//
// int nDatas = primaryDataBlock.getUnitsCount();
// int nSay = Math.max(1, nDatas / 100);
//// int nDone = 0;
// int nTasks = getNTasks();
// PamDataUnit dataUnit;
// OfflineTask aTask;
// boolean unitChanged;
// DataUnitFileInformation fileInfo;
// String dataName;
// if (mapPoint != null) {
// dataName = mapPoint.getName();
// }
// else {
// dataName = "Loaded Data";
// }
// /**
// * Make sure that any data from required data blocks is loaded. First check the
// * start and end times of the primary data units we actually WANT to process
// * Also get a count of found data - may be able to leave without having to do anything at all
// */
// ListIterator<PamDataUnit> it = primaryDataBlock.getListIterator(0);
// long procDataStart = Long.MAX_VALUE;
// long procDataEnd = 0;
// int nToProcess = 0;
// while (it.hasNext()) {
// dataUnit = it.next();
// /**
// * Make sure we only process data units within the current time interval.
// */
// if (dataUnit.getTimeMilliseconds() < processStartTime) {
// continue;
// }
// if (dataUnit.getTimeMilliseconds() > processEndTime) {
// break;
// }
//// if (shouldProcess(dataUnit) == false) {
//// continue;
//// }
// procDataStart = Math.min(procDataStart, dataUnit.getTimeMilliseconds());
// procDataEnd = Math.max(procDataEnd, dataUnit.getEndTimeInMilliseconds());
// // do this one too - just to make sure in case end time returns zero.
// procDataEnd = Math.max(procDataEnd, dataUnit.getTimeMilliseconds());
// nToProcess++; // increase toprocess counter
// }
// if (nToProcess == 0) {
// return;
// }
// PamDataBlock aDataBlock;
// RequiredDataBlockInfo blockInfo;
// /*
// * if the data interval is < 1 hour, then load it all now
// * otherwise we'll do it on a data unit basis.
// */
////// long maxSecondaryLoad = 1800L*1000L;
////// if (procDataEnd - procDataStart < maxSecondaryLoad) {
//// loadSecondaryData(procDataStart, procDataEnd);
////// }
// // remember the end time of the data so we can use the "new data" selection flag.
// taskGroupParams.lastDataTime = Math.min(primaryDataBlock.getCurrentViewDataEnd(),processEndTime);
// // synchronized(primaryDataBlock) {
// /*
// * Call newDataLoaded for each task before getting on with processing individual data units.
// */
//
// /**
// * Now process the data
// */
// it = primaryDataBlock.getListIterator(0);
// unitChanged = false;
// int totalUnits = 0;
// int unitsChanged = 0;
// boolean doTasks = false;
// while (it.hasNext()) {
// dataUnit = it.next();
// totalUnits++;
// doTasks = true;
// /**
// * Make sure we only process data units within the current time interval.
// */
// if (dataUnit.getTimeMilliseconds() < processStartTime) {
// continue;
// }
// if (dataUnit.getTimeMilliseconds() > processEndTime) {
// break;
// }
//
// if (shouldProcess(dataUnit) == false) {
// doTasks = false;
// }
//
// if (doTasks) {
// /*
// * load the secondary datablock data. this can be called even if
// * it was called earlier on since it wont' reload if data are already
// * in memory.
// */
//// loadSecondaryData(dataUnit.getTimeMilliseconds(), dataUnit.getEndTimeInMilliseconds());
//
// for (int iTask = 0; iTask < nTasks; iTask++) {
// aTask = getTask(iTask);
// if (aTask.isDoRun() == false || !isInTimeChunk(dataUnit, taskGroupParams.timeChunks)) {
// continue;
// }
// cpuMonitor.start();
// unitChanged |= aTask.processDataUnit(dataUnit);
// cpuMonitor.stop();
// }
// if (unitChanged) {
// fileInfo = dataUnit.getDataUnitFileInformation();
// if (fileInfo != null) {
// fileInfo.setNeedsUpdate(true);
// }
// dataUnit.updateDataUnit(System.currentTimeMillis());
// }
// dataUnit.freeData();
// }
// if (instantKill) {
// break;
// }
// unitsChanged++;
// if (totalUnits%nSay == 0) {
// publish(new TaskMonitorData(TaskStatus.RUNNING, TaskActivity.PROCESSING, nToProcess, totalUnits, dataName,
// dataUnit.getTimeMilliseconds()));
// }
// }
// for (int iTask = 0; iTask < nTasks; iTask++) {
// aTask = getTask(iTask);
// if (aTask.isDoRun() == false) {
// continue;
// }
// aTask.loadedDataComplete();
// }
// // }
// publish(new TaskMonitorData(TaskStatus.RUNNING, TaskActivity.SAVING, nToProcess, totalUnits, dataName,
// processEndTime));
// for (int i = 0; i < affectedDataBlocks.size(); i++) {
// //System.out.println("SAVE VIEWER DATA FOR: " + affectedDataBlocks.get(i) );
// aDataBlock = affectedDataBlocks.get(i);
// aDataBlock.saveViewerData();
// }
// Debug.out.printf("Processd %d out of %d data units at " + mapPoint + "\n", unitsChanged, totalUnits);
// commitDatabase();
// }
}

View File

@ -16,12 +16,12 @@ import offlineProcessing.OfflineTask;
*
*/
public class ExportTask extends OfflineTask<PamDataUnit<?,?>>{
/**
* Reference to the data exporter which manages exporting of data.
*/
private PamExporterManager exporter;
/**
* The data selector for the data block
*/
@ -32,8 +32,8 @@ public class ExportTask extends OfflineTask<PamDataUnit<?,?>>{
public ExportTask(PamDataBlock<PamDataUnit<?, ?>> parentDataBlock, PamExporterManager exporter) {
super(parentDataBlock);
this.exporter = exporter;
dataSelector=parentDataBlock.getDataSelectCreator().getDataSelector(this.getUnitName() +"_clicks", false, null);
dataSelector=parentDataBlock.getDataSelectCreator().getDataSelector(this.getUnitName() +"_export", false, null);
}
@ -44,9 +44,15 @@ public class ExportTask extends OfflineTask<PamDataUnit<?,?>>{
@Override
public boolean processDataUnit(PamDataUnit<?, ?> dataUnit) {
if (dataSelector==null) exporter.exportDataUnit(dataUnit, false);
else if (dataSelector.scoreData(dataUnit)>0) {
exporter.exportDataUnit(dataUnit, false);
System.out.println("Huh? " + this.getDataBlock().getDataName() + " " + dataUnit + " " + dataUnit.getParentDataBlock().equals(this.getDataBlock()));
if (dataUnit.getParentDataBlock().equals(this.getDataBlock())) {
//this is very important because the way the expoeter works is that it iterates through multiple parent data blocks
System.out.println(this.getDataBlock().getDataName() + " " + dataUnit);
if (dataSelector==null) exporter.exportDataUnit(dataUnit, false);
else if (dataSelector.scoreData(dataUnit)>0) {
exporter.exportDataUnit(dataUnit, false);
}
}
return false; //we don't need to indicate that anything has changed - we are just exporting.
}
@ -54,18 +60,18 @@ public class ExportTask extends OfflineTask<PamDataUnit<?,?>>{
@Override
public void newDataLoad(long startTime, long endTime, OfflineDataMapPoint mapPoint) {
// TODO Auto-generated method stub
// System.out.println("EXPORTER: new data load");
// System.out.println("EXPORTER: new data load");
}
@Override
public void loadedDataComplete() {
System.out.println("EXPORTER: loaded data complete");
// System.out.println("EXPORTER: loaded data complete");
//force the exporter so save any renaming data units in the buffer
exporter.exportDataUnit(null, true);
exporter.close();
exporter.setCurrentFile(null);
}
/**
* task has settings which can be called
@ -80,13 +86,13 @@ public class ExportTask extends OfflineTask<PamDataUnit<?,?>>{
* @return true if settings may have changed.
*/
public boolean callSettings() {
dataSelector.getDialogPanel().setParams();
DataSelectDialog dataSelectDialog = new DataSelectDialog(PamController.getMainFrame(),
this.getDataBlock(), dataSelector, null);
return dataSelectDialog.showDialog();
}
/**
@ -96,17 +102,17 @@ public class ExportTask extends OfflineTask<PamDataUnit<?,?>>{
public boolean canExport(int exportSelection) {
return exporter.getExporter(exportSelection).hasCompatibleUnits(getDataBlock().getUnitClass());
}
@Override
public boolean canRun() {
boolean can = getDataBlock() != null;
if (can) {
//check whether we can export based on the export selection
can = canExport(exporter.getExportParams().exportChoice);
}
return can;
}

View File

@ -465,6 +465,13 @@ public class WavFileExportManager implements PamDataUnitExporter {
@Override
public boolean isNeedsNewFile() {
return false;
}
// hello(){

View File

@ -142,7 +142,7 @@ public class OfflineTaskGroup implements PamSettings {
private ArrayList<OfflineTask> offlineTasks = new ArrayList<OfflineTask>();
private TaskGroupWorker worker;
protected TaskGroupWorker worker;
private OfflineSuperDetFilter superDetectionFilter;
@ -377,7 +377,7 @@ public class OfflineTaskGroup implements PamSettings {
* @author Doug Gillespie
*
*/
class TaskGroupWorker extends SwingWorker<Integer, TaskMonitorData> implements ViewLoadObserver {
public class TaskGroupWorker extends SwingWorker<Integer, TaskMonitorData> implements ViewLoadObserver {
volatile boolean instantKill = false;
@ -437,6 +437,7 @@ public class OfflineTaskGroup implements PamSettings {
/**
* Process all data for a list of time chunks. This is robust to the list
* not being in chronological order.
*
* @param timeChunks - the time chunks.
*/
private void processAllData(ArrayList<long[]> timeChunks){
@ -976,4 +977,6 @@ public class OfflineTaskGroup implements PamSettings {
offlineTasks.clear();
}
}

View File

@ -151,11 +151,15 @@ public class WhistleBinaryDataSource extends BinaryDataSource {
/**
* Bit of mess sorted out on 15/5/2020. Was working because module version went from 1 to 2 at same time
* as file version went from 3 to 4. May have been some middly stuff where file version and module
* as file version went from 3 to 4. May have been some middle stuff where file version and module
* There is some FV 3 with MV 1, in which case data were probably duplicated.
*/
if (fileVersion > 3) { // basic data now in standard format.
firstSliceSample = startSample = binaryObjectData.getDataUnitBaseData().getStartSample();
if (binaryObjectData.getDataUnitBaseData().getStartSample()==null) {
//some very rare circumstances
firstSliceSample =0;
}
else firstSliceSample = startSample = binaryObjectData.getDataUnitBaseData().getStartSample();
// if the DataUnitBaseData contains a sequence map, use it in place of the channel map
if (binaryObjectData.getDataUnitBaseData().getSequenceBitmap()!=null) {