Merge branch 'main' into main

This commit is contained in:
Douglas Gillespie 2023-04-05 19:33:05 +01:00 committed by GitHub
commit 42d560f91b
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
38 changed files with 2317 additions and 37 deletions

View File

@ -62,6 +62,8 @@ import Acquisition.sud.SUDNotificationManager;
import Array.ArrayManager; import Array.ArrayManager;
import Array.PamArray; import Array.PamArray;
import Array.Preamplifier; import Array.Preamplifier;
import PamController.DataInputStore;
import PamController.InputStoreInfo;
import PamController.OfflineFileDataStore; import PamController.OfflineFileDataStore;
import PamController.PamControlledUnit; import PamController.PamControlledUnit;
import PamController.PamControlledUnitGUI; import PamController.PamControlledUnitGUI;
@ -71,6 +73,8 @@ import PamController.PamControllerInterface;
import PamController.PamGUIManager; import PamController.PamGUIManager;
import PamController.PamSettingManager; import PamController.PamSettingManager;
import PamController.PamSettings; import PamController.PamSettings;
import PamController.RawInputControlledUnit;
import PamController.fileprocessing.StoreStatus;
import PamModel.PamModel; import PamModel.PamModel;
import PamModel.SMRUEnable; import PamModel.SMRUEnable;
import PamUtils.FrequencyFormat; import PamUtils.FrequencyFormat;
@ -100,7 +104,7 @@ import PamguardMVC.dataOffline.OfflineDataLoadInfo;
* @see Acquisition.DaqSystem * @see Acquisition.DaqSystem
* *
*/ */
public class AcquisitionControl extends PamControlledUnit implements PamSettings, OfflineFileDataStore { public class AcquisitionControl extends RawInputControlledUnit implements PamSettings, OfflineFileDataStore, DataInputStore {
protected ArrayList<DaqSystem> systemList; protected ArrayList<DaqSystem> systemList;
@ -849,4 +853,24 @@ public class AcquisitionControl extends PamControlledUnit implements PamSettings
return sudNotificationManager; return sudNotificationManager;
} }
@Override
public int getRawInputType() {
DaqSystem system = acquisitionProcess.getRunningSystem();
if (system == null) {
return RAW_INPUT_UNKNOWN;
}
else {
return system.isRealTime() ? RAW_INPUT_REALTIME : RAW_INPUT_FILEARCHIVE;
}
}
@Override
public InputStoreInfo getStoreInfo(boolean detail) {
return getDaqProcess().getStoreInfo(detail);
}
@Override
public boolean setAnalysisStartTime(long startTime) {
return getDaqProcess().setAnalysisStartTime(startTime);
}
} }

View File

@ -1,7 +1,9 @@
package Acquisition; package Acquisition;
import java.awt.Window;
import java.awt.event.ActionEvent; import java.awt.event.ActionEvent;
import java.awt.event.ActionListener; import java.awt.event.ActionListener;
import java.util.ArrayList;
import java.util.Collections; import java.util.Collections;
import java.util.LinkedList; import java.util.LinkedList;
import java.util.List; import java.util.List;
@ -19,7 +21,13 @@ import Filters.FilterBand;
import Filters.FilterParams; import Filters.FilterParams;
import Filters.FilterType; import Filters.FilterType;
import Filters.IirfFilter; import Filters.IirfFilter;
import PamController.DataInputStore;
import PamController.InputStoreInfo;
import PamController.OfflineDataStore;
import PamController.PamControlledUnit;
import PamController.PamController; import PamController.PamController;
import PamController.fileprocessing.ReprocessManager;
import PamController.fileprocessing.StoreStatus;
import PamController.status.BaseProcessCheck; import PamController.status.BaseProcessCheck;
import PamController.status.ProcessCheck; import PamController.status.ProcessCheck;
import PamDetection.RawDataUnit; import PamDetection.RawDataUnit;
@ -36,7 +44,10 @@ import PamguardMVC.PamProcess;
import PamguardMVC.PamRawDataBlock; import PamguardMVC.PamRawDataBlock;
import PamguardMVC.RequestCancellationObject; import PamguardMVC.RequestCancellationObject;
import PamguardMVC.dataOffline.OfflineDataLoadInfo; import PamguardMVC.dataOffline.OfflineDataLoadInfo;
import dataGram.DatagramManager;
import dataMap.OfflineDataMapPoint;
import pamScrollSystem.AbstractScrollManager; import pamScrollSystem.AbstractScrollManager;
import pamScrollSystem.ViewLoadObserver;
/** /**
* Data acquisition process for all types of input device. * Data acquisition process for all types of input device.
@ -54,7 +65,7 @@ import pamScrollSystem.AbstractScrollManager;
* @see PamguardMVC.PamDataUnit * @see PamguardMVC.PamDataUnit
* *
*/ */
public class AcquisitionProcess extends PamProcess { public class AcquisitionProcess extends PamProcess implements DataInputStore {
public static final int LASTDATA = 2; // don't use zero since need to see if no notification has been received. public static final int LASTDATA = 2; // don't use zero since need to see if no notification has been received.
@ -523,12 +534,12 @@ public class AcquisitionProcess extends PamProcess {
System.out.printf("Unable to find daq system %s\n", acquisitionControl.acquisitionParameters.daqSystemType); System.out.printf("Unable to find daq system %s\n", acquisitionControl.acquisitionParameters.daqSystemType);
return; return;
} }
systemPrepared = runningSystem.prepareSystem(acquisitionControl); systemPrepared = runningSystem.prepareSystem(acquisitionControl);
} }
@Override @Override
public void setSampleRate(float sampleRate, boolean notify) { public void setSampleRate(float sampleRate, boolean notify) {
acquisitionControl.acquisitionParameters.sampleRate = sampleRate; acquisitionControl.acquisitionParameters.sampleRate = sampleRate;
@ -1223,6 +1234,28 @@ public class AcquisitionProcess extends PamProcess {
public PamDataBlock<DaqStatusDataUnit> getDaqStatusDataBlock() { public PamDataBlock<DaqStatusDataUnit> getDaqStatusDataBlock() {
return daqStatusDataBlock; return daqStatusDataBlock;
} }
@Override
public InputStoreInfo getStoreInfo(boolean detail) {
if (runningSystem instanceof DataInputStore) {
return ((DataInputStore) runningSystem).getStoreInfo(detail);
}
else {
return null;
}
}
@Override
public boolean setAnalysisStartTime(long startTime) {
if (runningSystem instanceof DataInputStore) {
return ((DataInputStore) runningSystem).setAnalysisStartTime(startTime);
}
else {
return false;
}
}
} }

View File

@ -558,7 +558,10 @@ public class FileInputSystem extends DaqSystem implements ActionListener, PamSe
// if (fileInputParameters.recentFiles == null) return false; // if (fileInputParameters.recentFiles == null) return false;
// if (fileInputParameters.recentFiles.size() < 1) return false; // if (fileInputParameters.recentFiles.size() < 1) return false;
// String fileName = fileInputParameters.recentFiles.get(0); // String fileName = fileInputParameters.recentFiles.get(0);
return runFileAnalysis(); if (runFileAnalysis() == false) {
return false;
}
return true;
} }
public File getCurrentFile() { public File getCurrentFile() {
@ -737,6 +740,8 @@ public class FileInputSystem extends DaqSystem implements ActionListener, PamSe
@Override @Override
public boolean startSystem(AcquisitionControl daqControl) { public boolean startSystem(AcquisitionControl daqControl) {
if (audioStream == null) return false; if (audioStream == null) return false;
dontStop = true; dontStop = true;
@ -1146,6 +1151,7 @@ public class FileInputSystem extends DaqSystem implements ActionListener, PamSe
protected void fileListComplete() { protected void fileListComplete() {
if (GlobalArguments.getParam(PamController.AUTOEXIT) != null) { if (GlobalArguments.getParam(PamController.AUTOEXIT) != null) {
System.out.println("All sound files processed, PAMGuard can close on " + PamController.AUTOEXIT); System.out.println("All sound files processed, PAMGuard can close on " + PamController.AUTOEXIT);
PamController.getInstance().setPamStatus(PamController.PAM_COMPLETE);
PamController.getInstance().batchProcessingComplete(); PamController.getInstance().batchProcessingComplete();
} }
} }

View File

@ -33,6 +33,8 @@ import pamguard.GlobalArguments;
import Acquisition.pamAudio.PamAudioFileManager; import Acquisition.pamAudio.PamAudioFileManager;
import Acquisition.pamAudio.PamAudioFileFilter; import Acquisition.pamAudio.PamAudioFileFilter;
import Acquisition.pamAudio.PamAudioSystem; import Acquisition.pamAudio.PamAudioSystem;
import PamController.DataInputStore;
import PamController.InputStoreInfo;
import PamController.PamControlledUnitSettings; import PamController.PamControlledUnitSettings;
import PamController.PamController; import PamController.PamController;
import PamController.PamSettings; import PamController.PamSettings;
@ -58,7 +60,7 @@ import PamguardMVC.debug.Debug;
* @author Doug Gillespie * @author Doug Gillespie
* *
*/ */
public class FolderInputSystem extends FileInputSystem implements PamSettings{ public class FolderInputSystem extends FileInputSystem implements PamSettings, DataInputStore {
// Timer timer; // Timer timer;
public static final String daqType = "File Folder Acquisition System"; public static final String daqType = "File Folder Acquisition System";
@ -141,6 +143,9 @@ public class FolderInputSystem extends FileInputSystem implements PamSettings{
} }
String[] selList = {globalFolder}; String[] selList = {globalFolder};
// folderInputParameters.setSelectedFiles(selList); // folderInputParameters.setSelectedFiles(selList);
// need to immediately make the allfiles list since it's about to get used by the reprocess manager
// need to worry about how to wait for this since it's starting in a different thread.
//makeSelFileList();
return selList; return selList;
} }
@ -840,5 +845,53 @@ public class FolderInputSystem extends FileInputSystem implements PamSettings{
folderInputPane.setParams(folderInputParameters); folderInputPane.setParams(folderInputParameters);
} }
@Override
public InputStoreInfo getStoreInfo(boolean detail) {
if (allFiles == null || allFiles.size() == 0) {
return null;
}
WavFileType firstFile = allFiles.get(0);
long firstFileStart = getFileStartTime(firstFile.getAbsoluteFile());
WavFileType lastFile = allFiles.get(allFiles.size()-1);
long lastFileStart = getFileStartTime(lastFile.getAbsoluteFile());
lastFile.getAudioInfo();
long lastFileEnd = (long) (lastFileStart + lastFile.getDurationInSeconds()*1000.);
InputStoreInfo storeInfo = new InputStoreInfo(acquisitionControl, allFiles.size(), firstFileStart, lastFileStart, lastFileEnd);
if (detail) {
long[] allFileStarts = new long[allFiles.size()];
for (int i = 0; i < allFiles.size(); i++) {
allFileStarts[i] = getFileStartTime(allFiles.get(i).getAbsoluteFile());
}
storeInfo.setFileStartTimes(allFileStarts);
}
return storeInfo;
}
@Override
public boolean setAnalysisStartTime(long startTime) {
/**
* Called from the reprocess manager just before PAMGuard starts with a time
* we want to process from. This should be equal to the start of one of the files
* so all we have to do (in principle) is to set the currentfile to that index and
* processing will continue from there.
*/
if (allFiles == null || allFiles.size() == 0) {
return false;
}
for (int i = 0; i < allFiles.size(); i++) {
long fileStart = getFileStartTime(allFiles.get(i).getAbsoluteFile());
if (fileStart >= startTime) {
currentFile = i;
PamCalendar.setSoundFile(true);
PamCalendar.setSessionStartTime(startTime);
System.out.printf("Sound Acquisition start processing at file %s time %s\n", allFiles.get(i).getName(),
PamCalendar.formatDBDateTime(fileStart));
return true;
}
}
return false;
}
} }

View File

@ -17,6 +17,7 @@ import Acquisition.pamAudio.PamAudioFileManager;
import Acquisition.pamAudio.PamAudioFileLoader; import Acquisition.pamAudio.PamAudioFileLoader;
import Acquisition.pamAudio.PamAudioFileFilter; import Acquisition.pamAudio.PamAudioFileFilter;
import PamController.OfflineFileDataStore; import PamController.OfflineFileDataStore;
import PamController.fileprocessing.StoreStatus;
import PamguardMVC.PamDataBlock; import PamguardMVC.PamDataBlock;
import PamguardMVC.dataOffline.OfflineDataLoadInfo; import PamguardMVC.dataOffline.OfflineDataLoadInfo;
import dataMap.OfflineDataMap; import dataMap.OfflineDataMap;
@ -188,5 +189,4 @@ public class OfflineWavFileServer extends OfflineFileServer<FileDataMapPoint> {
} }
} }

View File

@ -0,0 +1,31 @@
package PamController;
/**
* Functions for a data input store. There is a fair bit of overlap for this and
* OfflineDataStore, but the OfflineDataStore is really about stuff that can provide
* data offline which needs mapping. This is specifically about data which will be input
* during 'normal operation, i.e. sound acquisition and Tritech sonar data
* (a plugin, but coming down the tracks at us all).
* @author dg50
* @see OfflineDataStore
* @See DataOutputStore
*
*/
public interface DataInputStore {
/**
* Get information about the input store (e.g. start times of all files).
* @param detail
* @return information about data input.
*/
public InputStoreInfo getStoreInfo(boolean detail);
/**
* Set an analysis start time. This might get called just before
* processing starts, in which case
* @param startTime
* @return ok if no problems.
*/
public boolean setAnalysisStartTime(long startTime);
}

View File

@ -0,0 +1,32 @@
package PamController;
import PamController.fileprocessing.StoreStatus;
/**
* Functions for a data output store. there is a fair bit of overlap for this and
* OfflineDataStore, but the OfflineDataStore is really about stuff that can provide
* data offline which needs mapping. This is specifically about data which will be stored
* during 'normal operation, i.e. binary and database modules.
* @author dg50
* @see OfflineDataStore
* @See DataInputStore
*
*/
public interface DataOutputStore extends OfflineDataStore {
/**
* Get the store status, i.e. does it exist, does it contain data, if so over what date range,
* etc.
* @param getDetail
* @return
*/
public StoreStatus getStoreStatus(boolean getDetail);
/**
* Delete all data from a given time, in all data streams.
* @param timeMillis time to delete from (anything >= this time)
* @return true if it seems to have worked OK. False if any errors (e.g. database or file system error).
*/
public boolean deleteDataFrom(long timeMillis);
}

View File

@ -0,0 +1,81 @@
package PamController;
import PamUtils.PamCalendar;
public class InputStoreInfo {
private DataInputStore dataInputStore;
private int nFiles;
private long firstFileStart, lastFileStart, lastFileEnd;
private long[] fileStartTimes;
public InputStoreInfo(DataInputStore dataInputStore, int nFiles, long firstFileStart, long lastFileStart, long lastFileEnd) {
super();
this.dataInputStore = dataInputStore;
this.nFiles = nFiles;
this.firstFileStart = firstFileStart;
this.lastFileStart = lastFileStart;
this.lastFileEnd = lastFileEnd;
}
/**
* @return the nFiles
*/
public int getnFiles() {
return nFiles;
}
/**
* @return the firstFileStart
*/
public long getFirstFileStart() {
return firstFileStart;
}
/**
* @return the lastFileStart
*/
public long getLastFileStart() {
return lastFileStart;
}
/**
* @return the lastFileEnd
*/
public long getLastFileEnd() {
return lastFileEnd;
}
@Override
public String toString() {
return String.format("%s: %d files. First start %s, last start %s, last end %s", dataInputStore.getClass().getName(), nFiles,
PamCalendar.formatDBDateTime(firstFileStart), PamCalendar.formatDBDateTime(lastFileStart),
PamCalendar.formatDBDateTime(lastFileEnd));
}
/**
* @return the dataInputStore
*/
public DataInputStore getDataInputStore() {
return dataInputStore;
}
/**
* Set the start times of all files in data set.
* @param allFileStarts
*/
public void setFileStartTimes(long[] allFileStarts) {
this.fileStartTimes = allFileStarts;
}
/**
* @return the fileStartTimes
*/
public long[] getFileStartTimes() {
return fileStartTimes;
}
}

View File

@ -2,6 +2,7 @@ package PamController;
import java.awt.Window; import java.awt.Window;
import PamController.fileprocessing.StoreStatus;
import dataGram.DatagramManager; import dataGram.DatagramManager;
import dataMap.OfflineDataMapPoint; import dataMap.OfflineDataMapPoint;
import pamScrollSystem.ViewLoadObserver; import pamScrollSystem.ViewLoadObserver;
@ -52,7 +53,7 @@ public interface OfflineDataStore {
/** /**
* Moved this function over from binary data store. * Moved this function over from binary data store.
* Many storage systems may notbe able to do this, but some might ! * Many storage systems may not be able to do this, but some might !
* @param dataBlock * @param dataBlock
* @param dmp * @param dmp
* @return * @return
@ -63,4 +64,5 @@ public interface OfflineDataStore {
* @return the datagramManager * @return the datagramManager
*/ */
public DatagramManager getDatagramManager(); public DatagramManager getDatagramManager();
} }

View File

@ -59,6 +59,7 @@ import PamController.command.MultiportController;
import PamController.command.NetworkController; import PamController.command.NetworkController;
import PamController.command.TerminalController; import PamController.command.TerminalController;
import PamController.command.WatchdogComms; import PamController.command.WatchdogComms;
import PamController.fileprocessing.ReprocessManager;
import PamController.masterReference.MasterReferencePoint; import PamController.masterReference.MasterReferencePoint;
import PamController.settings.output.xml.PamguardXMLWriter; import PamController.settings.output.xml.PamguardXMLWriter;
import PamController.settings.output.xml.XMLWriterDialog; import PamController.settings.output.xml.XMLWriterDialog;
@ -119,6 +120,7 @@ public class PamController implements PamControllerInterface, PamSettings {
public static final int PAM_STALLED = 3; public static final int PAM_STALLED = 3;
public static final int PAM_INITIALISING = 4; public static final int PAM_INITIALISING = 4;
public static final int PAM_STOPPING = 5; public static final int PAM_STOPPING = 5;
public static final int PAM_COMPLETE = 6;
// status' for RunMode = RUN_PAMVIEW // status' for RunMode = RUN_PAMVIEW
public static final int PAM_LOADINGDATA = 2; public static final int PAM_LOADINGDATA = 2;
@ -705,7 +707,7 @@ public class PamController implements PamControllerInterface, PamSettings {
Platform.exit(); Platform.exit();
// terminate the JVM // terminate the JVM
System.exit(0); System.exit(getPamStatus());
} }
/** /**
@ -1021,6 +1023,24 @@ public class PamController implements PamControllerInterface, PamSettings {
} }
return foundUnits; return foundUnits;
} }
/**
* Get an Array list of PamControlledUnits of a particular class (exact matches only).
* @param unitClass PamControlledUnit class
* @return List of current instances of this class.
*/
public ArrayList<PamControlledUnit> findControlledUnits(Class unitClass, boolean includeSubClasses) {
if (includeSubClasses == false) {
return findControlledUnits(unitClass);
}
ArrayList<PamControlledUnit> foundUnits = new ArrayList<>();
for (int i = 0; i < getNumControlledUnits(); i++) {
if (unitClass.isAssignableFrom(pamControlledUnits.get(i).getClass())) {
foundUnits.add(pamControlledUnits.get(i));
}
}
return foundUnits;
}
/** /**
* Check whether a controlled unit exists based on it's name. * Check whether a controlled unit exists based on it's name.
@ -1191,6 +1211,23 @@ public class PamController implements PamControllerInterface, PamSettings {
return false; return false;
} }
/*
*
* This needs to be called after prepareproces.
* Now we do some extra checks on the stores to see if we want to overwite data,
* carry on from where we left off, etc.
*/
if (saveSettings && getRunMode() == RUN_NORMAL) { // only true on a button press or network start.
ReprocessManager reprocessManager = new ReprocessManager();
boolean goonthen = reprocessManager.checkOutputDataStatus();
if (goonthen == false) {
System.out.println("Data processing will not start since you've chosen not to overwrite existing output data");
pamStop();
setPamStatus(PAM_IDLE);
return false;
}
}
if (saveSettings) { if (saveSettings) {
saveSettings(PamCalendar.getSessionStartTime()); saveSettings(PamCalendar.getSessionStartTime());
} }

View File

@ -0,0 +1,21 @@
package PamController;
public abstract class RawInputControlledUnit extends PamControlledUnit {
public static final int RAW_INPUT_UNKNOWN = 0;
public static final int RAW_INPUT_FILEARCHIVE = 1;
public static final int RAW_INPUT_REALTIME = 2;
public RawInputControlledUnit(String unitType, String unitName) {
super(unitType, unitName);
}
/**
* Type of data input, which can be one of RAW_INPUT_UNKNOWN (0),
* RAW_INPUT_FILEARCHIVE (1), or RAW_INPUT_REALTIME (2)
* @return
*/
public abstract int getRawInputType();
}

View File

@ -0,0 +1,116 @@
package PamController.fileprocessing;
import java.awt.BorderLayout;
import java.awt.GridBagConstraints;
import java.awt.GridBagLayout;
import java.awt.Window;
import java.util.List;
import javax.swing.ButtonGroup;
import javax.swing.JLabel;
import javax.swing.JPanel;
import javax.swing.JRadioButton;
import javax.swing.border.TitledBorder;
import PamUtils.PamCalendar;
import PamView.dialog.PamDialog;
import PamView.dialog.PamGridBagContraints;
import PamView.dialog.warn.WarnOnce;
import PamView.panel.PamAlignmentPanel;
public class ReprocessChoiceDialog extends PamDialog {
private static final long serialVersionUID = 1L;
private static ReprocessChoiceDialog singleInstance;
private JRadioButton[] choiceButtons;
private ReprocessStoreChoice chosenChoice = null;
private StoreChoiceSummary choiceSummary;
private ReprocessChoiceDialog(Window parentFrame, StoreChoiceSummary choiceSummary) {
super(parentFrame, "Existing Output Data", false);
this.choiceSummary = choiceSummary;
JPanel mainPanel = new JPanel(new BorderLayout());
JPanel infoPanel = new JPanel(new GridBagLayout());
infoPanel.setBorder(new TitledBorder("Data Summary"));
mainPanel.add(infoPanel, BorderLayout.NORTH);
GridBagConstraints c = new PamGridBagContraints();
c.gridx = c.gridy = 0;
String inStr = String.format("Input data dates: %s to %s", PamCalendar.formatDBDateTime(choiceSummary.getInputStartTime()),
PamCalendar.formatDBDateTime(choiceSummary.getInputEndTime()));
infoPanel.add(new JLabel(inStr), c);
c.gridy++;
String outStr = String.format("Output data dates: %s to %s", PamCalendar.formatDBDateTime(choiceSummary.getOutputStartTime()),
PamCalendar.formatDBDateTime(choiceSummary.getOutputEndTime()));
infoPanel.add(new JLabel(outStr), c);
String stateStr;
if (choiceSummary.isProcessingComplete()) {
stateStr = "Processing appears to be complete";
}
else {
stateStr = "Processing appears to be partially complete";
}
c.gridy++;
infoPanel.add(new JLabel(stateStr), c);
JPanel choicePanel = new PamAlignmentPanel(new GridBagLayout(), BorderLayout.WEST);
choicePanel.setBorder(new TitledBorder("Chose what to do"));
c = new PamGridBagContraints();
mainPanel.add(BorderLayout.SOUTH, choicePanel);
List<ReprocessStoreChoice> userChoices = choiceSummary.getChoices();
choiceButtons = new JRadioButton[userChoices.size()];
ButtonGroup bg = new ButtonGroup();
for (int i = 0; i < userChoices.size(); i++) {
ReprocessStoreChoice aChoice = userChoices.get(i);
choiceButtons[i] = new JRadioButton(aChoice.toString());
choiceButtons[i].setToolTipText(aChoice.getToolTip());
choicePanel.add(choiceButtons[i], c);
c.gridy++;
}
setDialogComponent(mainPanel);
getCancelButton().setVisible(false);
}
public static ReprocessStoreChoice showDialog(Window parentFrame, StoreChoiceSummary choices) {
// if (singleInstance == null || singleInstance.getOwner() != parentFrame) {
singleInstance = new ReprocessChoiceDialog(parentFrame, choices);
// }
singleInstance.setVisible(true);
return singleInstance.chosenChoice;
}
@Override
public boolean getParams() {
List<ReprocessStoreChoice> userChoices = choiceSummary.getChoices();
for (int i = 0; i < choiceButtons.length; i++) {
if (choiceButtons[i].isSelected()) {
chosenChoice = userChoices.get(i);
break;
}
}
if (chosenChoice == ReprocessStoreChoice.OVERWRITEALL) {
String w = "Are you sure you want to delete / overwrite all existing output data ?";
int ans = WarnOnce.showWarning("Overwrite existing data", w, WarnOnce.OK_CANCEL_OPTION);
if (ans == WarnOnce.CANCEL_OPTION) {
return false;
}
}
return chosenChoice != null;
}
@Override
public void cancelButtonPressed() {
chosenChoice = null;
}
@Override
public void restoreDefaultSettings() {
// TODO Auto-generated method stub
}
}

View File

@ -0,0 +1,277 @@
package PamController.fileprocessing;
import java.util.ArrayList;
import PamController.DataInputStore;
import PamController.DataOutputStore;
import PamController.InputStoreInfo;
import PamController.OfflineDataStore;
import PamController.PamControlledUnit;
import PamController.PamController;
import PamController.PamGUIManager;
import PamController.RawInputControlledUnit;
import PamUtils.PamCalendar;
import PamView.dialog.warn.WarnOnce;
import pamguard.GlobalArguments;
/**
* Set of functions to help decide what to do when reprocessing.
* These are probably all called from AcquisitionProcess, but it's easier to have them in their own class.
* @author dg50
*
*/
public class ReprocessManager {
/**
public ReprocessManager() {
// TODO Auto-generated constructor stub
}
/*
* run checks on the output data storage system. If data already exist in the output
* we may not want to start again.
*/
public boolean checkOutputDataStatus() {
StoreChoiceSummary choiceSummary = null;
if (isOfflineFiles()) {
choiceSummary = checkIOFilesStatus();
}
else {
/*
* don't really need to do anything for real time processing since adding
* more data to existing stores is normal behaviour.
*/
return true;
}
if (choiceSummary == null) {
return true;
}
if (choiceSummary.getInputStoreInfo() == null) {
return true;
}
// need to decide what to do based on the list of possible choices.
ReprocessStoreChoice choice = chosePartStoreAction(choiceSummary);
if (choice == ReprocessStoreChoice.DONTSSTART) {
return false;
}
boolean deleteOK = deleteOldData(choiceSummary, choice);
boolean setupOK = setupInputStream(choiceSummary, choice);
return true;
}
private boolean setupInputStream(StoreChoiceSummary choiceSummary, ReprocessStoreChoice choice) {
// work out the first file index and send it to the appropriate input module.
long deleteFrom = getDeleteFromTime(choiceSummary, choice);
ArrayList<PamControlledUnit> inputStores = PamController.getInstance().findControlledUnits(DataInputStore.class, true);
if (inputStores == null || inputStores.size() == 0) {
return false;
}
InputStoreInfo inputInfo = null;
boolean OK = true;
for (PamControlledUnit aPCU : inputStores) {
DataInputStore inputStore = (DataInputStore) aPCU;
OK &= inputStore.setAnalysisStartTime(deleteFrom);
// System.out.println("Input store info: " + inputInfo);
}
return OK;
}
/**
* Just gets on and does it. The user should already have been asked what they
* want to do, so don't ask again.
* @param choiceSummary
* @param choice
*/
private boolean deleteOldData(StoreChoiceSummary choiceSummary, ReprocessStoreChoice choice) {
long deleteFrom = getDeleteFromTime(choiceSummary, choice);
// go through the data stores and tell them to delete from that time.
if (deleteFrom == Long.MAX_VALUE) {
return false;
}
ArrayList<PamControlledUnit> outputStores = PamController.getInstance().findControlledUnits(DataOutputStore.class, true);
boolean partStores = false;
boolean ok = true;
for (PamControlledUnit aPCU : outputStores) {
DataOutputStore offlineStore = (DataOutputStore) aPCU;
ok &= offlineStore.deleteDataFrom(deleteFrom);
}
return ok;
}
private long getDeleteFromTime(StoreChoiceSummary choiceSummary, ReprocessStoreChoice choice) {
if (choice == null) {
return Long.MAX_VALUE; // I don't think this can happen, but you never know.
}
Long t = null;
switch (choice) {
case CONTINUECURRENTFILE:
t = choiceSummary.getInputTimeForIndex(choiceSummary.getFileIndexBefore(choiceSummary.getOutputEndTime()));
break;
case CONTINUENEXTFILE:
t = choiceSummary.getInputTimeForIndex(choiceSummary.getFileIndexAfter(choiceSummary.getOutputEndTime()));
break;
case DONTSSTART: // we should'nt get here with this option.
return Long.MAX_VALUE;
case OVERWRITEALL:
return 0; // delete from start. (
case STARTNORMAL: // we should'nt get here with this option.
return Long.MAX_VALUE;
default:
break;
}
if (t == null) {
// shouldn't happen, don't do any deleteing
return Long.MAX_VALUE;
}
else {
return t;
}
}
/**
* Check the output of current files and databases and return a flag to PamController saying whether or
* not processing should actually start, possibly overwriting, or if we need to not start to avoid overwriting.
* @return true if processing should start.
*/
private StoreChoiceSummary checkIOFilesStatus() {
/**
* Get information about the input.
*
*/
ArrayList<PamControlledUnit> inputStores = PamController.getInstance().findControlledUnits(DataInputStore.class, true);
if (inputStores == null || inputStores.size() == 0) {
return new StoreChoiceSummary(null, ReprocessStoreChoice.STARTNORMAL);
}
InputStoreInfo inputInfo = null;
for (PamControlledUnit aPCU : inputStores) {
DataInputStore inputStore = (DataInputStore) aPCU;
inputInfo = inputStore.getStoreInfo(true);
// System.out.println("Input store info: " + inputInfo);
}
StoreChoiceSummary choiceSummary = new StoreChoiceSummary(inputInfo);
if (inputInfo == null || inputInfo.getFileStartTimes() == null) {
choiceSummary.addChoice(ReprocessStoreChoice.STARTNORMAL);
return choiceSummary;
}
ArrayList<PamControlledUnit> outputStores = PamController.getInstance().findControlledUnits(DataOutputStore.class, true);
boolean partStores = false;
for (PamControlledUnit aPCU : outputStores) {
DataOutputStore offlineStore = (DataOutputStore) aPCU;
StoreStatus status = offlineStore.getStoreStatus(false);
if (status == null) {
continue;
}
if (status.getStoreStatus() == StoreStatus.STATUS_HASDATA) {
status = offlineStore.getStoreStatus(true); // get more detail.
partStores = true;
System.out.printf("Storage %s already contains some data\n", offlineStore.getDataSourceName());
choiceSummary.testOutputEndTime(status.getLastDataTime());
choiceSummary.testOutputStartTime(status.getFirstDataTime());
}
}
if (partStores == false) {
choiceSummary.addChoice(ReprocessStoreChoice.STARTNORMAL);
return choiceSummary;
}
if (choiceSummary.getInputStartTime() >= choiceSummary.getOutputEndTime()) {
/*
* looks like it's new data that starts after the end of the current store,
* so there is no need to do anything.
*/
choiceSummary.addChoice(ReprocessStoreChoice.STARTNORMAL);
return choiceSummary;
}
/*
* If we land here, it looks like we have overlapping data. so need to make a decision
* First, check to see if processing has actually completed which will be the case if
* the data time and the end of the files are the same.
*/
choiceSummary.addChoice(ReprocessStoreChoice.DONTSSTART);
choiceSummary.addChoice(ReprocessStoreChoice.OVERWRITEALL);
if (choiceSummary.isProcessingComplete() == false) {
choiceSummary.addChoice(ReprocessStoreChoice.CONTINUECURRENTFILE);
choiceSummary.addChoice(ReprocessStoreChoice.CONTINUENEXTFILE);
}
return choiceSummary;
}
/**
* Either opens a dialog to ask the user, or uses a choice entered into the command line for nogui mode.
* Decide what to do with stores that already have data. Can return continue from end or overwrite
* in which case stores will be deleted and we'll start again. The chosen action will need to be
* communicated to the various inputs.
* @param choices
*/
private ReprocessStoreChoice chosePartStoreAction(StoreChoiceSummary choices) {
/**
* Do we really have to deal with multiple inputs ? Can I envisage a situation where there is
* ever more than one input going at any one time ? not really, but should I add code
* to make sure that there really can be only one ? i.e. two daq's would be allowed for real
* time processing, but only one for offline ? could do all I guess by looking at sources of
* all output data blocks and doing it on a case by case basis. All we have to do here though
* is to get an answer about what to do.
*/
// see if we've got a global parameter passed in as an argument
String arg = GlobalArguments.getParam(ReprocessStoreChoice.paramName);
if (arg != null) {
ReprocessStoreChoice choice = ReprocessStoreChoice.valueOf(arg);
if (choice == null) {
String warn = String.format("Reprocessing storage input parameter %s value \"%s\" is not a recognised value", ReprocessStoreChoice.paramName, arg);
WarnOnce.showWarning("Invalid input parameter", warn, WarnOnce.WARNING_MESSAGE);
}
if (choice == ReprocessStoreChoice.CONTINUECURRENTFILE || choice == ReprocessStoreChoice.CONTINUENEXTFILE) {
if (choices.isProcessingComplete()) {
return ReprocessStoreChoice.DONTSSTART;
}
}
return choice;
}
if (PamGUIManager.getGUIType() == PamGUIManager.NOGUI) {
System.out.println("In Nogui mode you should set a choice as to how to handle existing storage overwrites. Using default of overwriting everything");
return ReprocessStoreChoice.OVERWRITEALL;
}
// otherwise we'll need to show a dialog to let the user decide what to do
ReprocessStoreChoice choice = ReprocessChoiceDialog.showDialog(PamController.getMainFrame(), choices);
return choice;
}
/**
* Return true if we seem to be reprocessing offline files.
* Note that this might be the Tritech data as well as the sound acquisition so
* have added an abstract intermediate class on the controlled units so we can check them all.
* @return
*/
public boolean isOfflineFiles() {
ArrayList<PamControlledUnit> sources = PamController.getInstance().findControlledUnits(RawInputControlledUnit.class, true);
if (sources == null) {
return false;
}
for (PamControlledUnit pcu : sources) {
RawInputControlledUnit rawPCU = (RawInputControlledUnit) pcu;
if (rawPCU.getRawInputType() == RawInputControlledUnit.RAW_INPUT_FILEARCHIVE) {
return true;
}
}
return false;
}
}

View File

@ -0,0 +1,53 @@
package PamController.fileprocessing;
/**
* Choices on what to do when re-processing data and finding that output data already exist.
* @author dg50
*
*/
public enum ReprocessStoreChoice {
STARTNORMAL, CONTINUECURRENTFILE, CONTINUENEXTFILE, OVERWRITEALL, DONTSSTART;
public static final String paramName = "-reprocessoption";
@Override
public String toString() {
switch (this) {
case STARTNORMAL:
return "Start normally. No risk of overwriting";
case CONTINUECURRENTFILE:
return "Continue from start of last input file processed";
case CONTINUENEXTFILE:
return "Continue from start of next input file to process";
case DONTSSTART:
return "Don't start processing";
case OVERWRITEALL:
return "Overwrite existing output data";
default:
break;
}
return null;
}
public String getToolTip() {
switch (this) {
case STARTNORMAL:
return "No risk of data overlap, so system will start normally";
case CONTINUECURRENTFILE:
return "System will work out how far data processing has got and continue from the start of the file it stopped in";
case CONTINUENEXTFILE:
return "System will work out how far data processing has got and continue from the start of the file AFTER the one it stopped in";
case DONTSSTART:
return "Processing will not start. Select alternative storage locations / databases and try again";
case OVERWRITEALL:
return "Overwrite existing output data. Existing data will be deleted";
default:
break;
}
return null;
}
}

View File

@ -0,0 +1,234 @@
package PamController.fileprocessing;
import java.util.ArrayList;
import java.util.List;
import PamController.InputStoreInfo;
/**
* Summary information about the data stores.
* @author dg50
*
*/
public class StoreChoiceSummary {
private long outputEndTime;
private long outputStartTime;
private List<ReprocessStoreChoice> choices = new ArrayList<>();
private InputStoreInfo inputStoreInfo;
public StoreChoiceSummary(InputStoreInfo info, ReprocessStoreChoice singleChoice) {
this.inputStoreInfo = info;
addChoice(singleChoice);
}
public StoreChoiceSummary(long outputEndTime, InputStoreInfo inputStoreInfo) {
super();
this.outputEndTime = outputEndTime;
this.inputStoreInfo = inputStoreInfo;
}
public StoreChoiceSummary(InputStoreInfo inputInfo) {
this.inputStoreInfo = inputInfo;
}
/**
* Get the number of choices. If it's only one, then there
* isn't a lot to do. If it's >1, then need a decision in the
* form of a command line instruction or a user dialog.
* @return number of choices.
*/
public int getNumChoices() {
return choices.size();
}
/**
* Is processing complete, i.e. last time in output matches last time
* in input data.
* @return true if processing appears to be complete.
*/
public boolean isProcessingComplete() {
if (inputStoreInfo == null) {
return false;
}
long inputEnd = getInputEndTime();
long outputEnd = getOutputEndTime();
long diff = inputEnd-outputEnd;
return (diff < 1000);
}
/**
* Add a reasonable choice to what the user can select to do.
* @param choice
*/
public void addChoice(ReprocessStoreChoice choice) {
choices.add(choice);
}
/**
* @return the start time of the first input file
*/
public Long getInputStartTime() {
if (inputStoreInfo == null) {
return null;
}
return inputStoreInfo.getFirstFileStart();
}
/**
* @return the start time of the first input file
*/
public Long getInputEndTime() {
if (inputStoreInfo == null) {
return null;
}
return inputStoreInfo.getLastFileEnd();
}
/**
* @return the outputEndTime
*/
public long getOutputEndTime() {
return outputEndTime;
}
/**
* Set the last data time, but only if the passed value
* is not null and is bigger than the current value.
* @param lastDataTime
* @return largest of current and passed value.
*/
public long testOutputEndTime(Long lastDataTime) {
if (lastDataTime == null) {
return this.getOutputEndTime();
}
setOutputEndTime(Math.max(outputEndTime, lastDataTime));
return getOutputEndTime();
}
/**
* Set the last data time, but only if the passed value
* is not null and is bigger than the current value.
* @param lastDataTime
* @return largest of current and passed value.
*/
public long testOutputStartTime(Long firstDataTime) {
if (firstDataTime == null) {
return this.getOutputStartTime();
}
if (outputStartTime == 0 || firstDataTime < outputStartTime) {
outputStartTime = firstDataTime;
}
return getOutputStartTime();
}
/**
* @param outputEndTime the outputEndTime to set
*/
public void setOutputEndTime(long outputEndTime) {
this.outputEndTime = outputEndTime;
}
/**
* @return the inputStoreInfo
*/
public InputStoreInfo getInputStoreInfo() {
return inputStoreInfo;
}
/**
* @param inputStoreInfo the inputStoreInfo to set
*/
public void setInputStoreInfo(InputStoreInfo inputStoreInfo) {
this.inputStoreInfo = inputStoreInfo;
}
/**
* @return the choices
*/
public List<ReprocessStoreChoice> getChoices() {
return choices;
}
/**
* @return the outputStartTime
*/
public long getOutputStartTime() {
return outputStartTime;
}
/**
* @param outputStartTime the outputStartTime to set
*/
public void setOutputStartTime(long outputStartTime) {
this.outputStartTime = outputStartTime;
}
/**
* Get the index of the file that starts before or exactly at the given time.
* @param inputEndTime
* @return index of file, or -1 if none found.
*/
public int getFileIndexBefore(Long inputEndTime) {
if (inputStoreInfo == null) {
return -1;
}
long[] fileStarts = inputStoreInfo.getFileStartTimes();
if (fileStarts == null) {
return -1;
}
for (int i = fileStarts.length-1; i>= 0; i--) {
if (fileStarts[i] <= inputEndTime) {
return i;
}
}
return -1;
}
/**
* Get the start time in millis of a file for the given index.
* @param fileIndex
* @return file time, or null if no file available.
*/
public Long getInputTimeForIndex(int fileIndex) {
if (inputStoreInfo == null) {
return null;
}
long[] fileStarts = inputStoreInfo.getFileStartTimes();
if (fileStarts == null) {
return null;
}
if (fileIndex < 0 || fileIndex >= fileStarts.length) {
return null;
}
return fileStarts[fileIndex];
}
/**
* Get the index of the file that starts after the given time.
* @param inputEndTime
* @return index of file, or -1 if none found.
*/
public int getFileIndexAfter(Long inputEndTime) {
if (inputStoreInfo == null) {
return -1;
}
long[] fileStarts = inputStoreInfo.getFileStartTimes();
if (fileStarts == null) {
return -1;
}
for (int i = 0; i < fileStarts.length; i++) {
if (fileStarts[i] > inputEndTime) {
return i;
}
}
return -1;
}
}

View File

@ -0,0 +1,115 @@
package PamController.fileprocessing;
import java.io.File;
import PamController.OfflineDataStore;
/**
* Class to carry information about an OfflineDataStore. Used when restarting offline
* processing to help work out if we should overwrite, start again, etc.
* @author dg50
*
*/
abstract public class StoreStatus {
public static final int STATUS_MISSING = 1;
public static final int STATUS_EMPTY = 2;
public static final int STATUS_HASDATA = 3;
private OfflineDataStore offlineDataStore;
/**
* Time of first data, may be null if detail not asked for or if
* hasData is false.
*/
private Long firstDataTime;
/**
* Time of last data, may be null if detail not asked for or if
* hasData is false.
*/
private Long lastDataTime;
/**
* General status flag.
*/
private int storeStatus;
public StoreStatus(OfflineDataStore offlineDataStore) {
this.offlineDataStore = offlineDataStore;
}
/**
* Get the amount of free space for this storage.
* @return free space in bytes.
*/
public abstract long getFreeSpace();
public long getFreeSpace(String currDir) {
if (currDir == null) {
return 0;
}
File dirFile = new File(currDir);
long space = 0;
try {
space = dirFile.getUsableSpace();
}
catch (SecurityException e) {
System.out.printf("Security exception getting space for %s: \n%s\n", currDir, e.getMessage());
}
return space;
}
/**
* @return the firstDataTime
*/
public Long getFirstDataTime() {
return firstDataTime;
}
/**
* @param firstDataTime the firstDataTime to set
*/
public void setFirstDataTime(Long firstDataTime) {
this.firstDataTime = firstDataTime;
}
/**
* @return the lastDataTime
*/
public Long getLastDataTime() {
return lastDataTime;
}
/**
* @param lastDataTime the lastDataTime to set
*/
public void setLastDataTime(Long lastDataTime) {
this.lastDataTime = lastDataTime;
}
/**
* @return the storeStatus
*/
public int getStoreStatus() {
return storeStatus;
}
/**
* @param storeStatus the storeStatus to set
*/
public void setStoreStatus(int storeStatus) {
this.storeStatus = storeStatus;
}
/**
* @return the offlineDataStore
*/
public OfflineDataStore getOfflineDataStore() {
return offlineDataStore;
}
}

View File

@ -460,8 +460,6 @@ final public class PamModel implements PamModelInterface, PamSettings {
mi.setModulesMenuGroup(utilitiesGroup); mi.setModulesMenuGroup(utilitiesGroup);
mi.setMaxNumber(1); mi.setMaxNumber(1);
/* /*
* ************* End Utilities Group ******************* * ************* End Utilities Group *******************
*/ */

View File

@ -55,9 +55,19 @@ public class WavFileType extends File {
* @return the audioInfo * @return the audioInfo
*/ */
public AudioFormat getAudioInfo() { public AudioFormat getAudioInfo() {
if (audioInfo == null) {
audioInfo = getAudioFormat();
}
return audioInfo; return audioInfo;
} }
/**
* Get the audio format.
* @return the audio format.
*/
private AudioFormat getAudioFormat() {
return getAudioFormat(this);
}
/** /**
* Get the audio format. * Get the audio format.

View File

@ -5,6 +5,7 @@ import java.awt.event.ActionEvent;
import java.awt.event.ActionListener; import java.awt.event.ActionListener;
import java.awt.event.MouseAdapter; import java.awt.event.MouseAdapter;
import java.awt.event.MouseEvent; import java.awt.event.MouseEvent;
import java.util.ArrayList;
import javax.swing.JComponent; import javax.swing.JComponent;
import javax.swing.JMenuItem; import javax.swing.JMenuItem;
@ -49,6 +50,14 @@ public abstract class DataBlockTableView<T extends PamDataUnit> {
private SwingTableColumnWidths columnWidths; private SwingTableColumnWidths columnWidths;
/**
* Most work will run off a copy of the data.
* Makes it easier to include data selectors, etc.
*/
private ArrayList<T> dataUnitCopy;
private Object copySynch = new Object();
public DataBlockTableView(PamDataBlock<T> pamDataBlock, String displayName) { public DataBlockTableView(PamDataBlock<T> pamDataBlock, String displayName) {
this.pamDataBlock = pamDataBlock; this.pamDataBlock = pamDataBlock;
this.displayName = displayName; this.displayName = displayName;
@ -141,6 +150,9 @@ public abstract class DataBlockTableView<T extends PamDataUnit> {
String tip = null; String tip = null;
java.awt.Point p = e.getPoint(); java.awt.Point p = e.getPoint();
int rowIndex = rowAtPoint(p); int rowIndex = rowAtPoint(p);
if (rowIndex < 0) {
return null;
}
int colIndex = columnAtPoint(p); int colIndex = columnAtPoint(p);
int realColumnIndex = convertColumnIndexToModel(colIndex); int realColumnIndex = convertColumnIndexToModel(colIndex);
T dataUnit = getDataUnit(rowIndex); T dataUnit = getDataUnit(rowIndex);
@ -201,10 +213,14 @@ public abstract class DataBlockTableView<T extends PamDataUnit> {
* @return data unit for the table row. * @return data unit for the table row.
*/ */
private final T getDataUnit(int tableRow) { private final T getDataUnit(int tableRow) {
synchronized (pamDataBlock.getSynchLock()) { synchronized (copySynch) {
int rowIndex = getDataIndexForRow(tableRow); int rowIndex = getDataIndexForRow(tableRow);
if (rowIndex < 0) return null; if (rowIndex < 0) return null;
return pamDataBlock.getDataUnit(rowIndex, PamDataBlock.REFERENCE_CURRENT); if (dataUnitCopy == null) {
return null;
}
return dataUnitCopy.get(tableRow);
// return pamDataBlock.getDataUnit(rowIndex, PamDataBlock.REFERENCE_CURRENT);
} }
} }
@ -212,10 +228,13 @@ public abstract class DataBlockTableView<T extends PamDataUnit> {
* Get the number of rows in the table - default behaviour is the * Get the number of rows in the table - default behaviour is the
* number of rows in the datablock, but this may be overridded if * number of rows in the datablock, but this may be overridded if
* data are being selected in a different way. * data are being selected in a different way.
* @return numer of table rows to show. * @return number of table rows to show.
*/ */
public int getRowCount() { public int getRowCount() {
return pamDataBlock.getUnitsCount(); if (dataUnitCopy == null) {
return 0;
}
return dataUnitCopy.size();
} }
/** /**
@ -227,7 +246,10 @@ public abstract class DataBlockTableView<T extends PamDataUnit> {
* @return * @return
*/ */
public int getDataIndexForRow(int tableRow) { public int getDataIndexForRow(int tableRow) {
int nRow = pamDataBlock.getUnitsCount(); if (dataUnitCopy == null) {
return tableRow;
}
int nRow = dataUnitCopy.size();
if (!isViewer) { if (!isViewer) {
tableRow = nRow-tableRow-1; tableRow = nRow-tableRow-1;
} }
@ -244,12 +266,14 @@ public abstract class DataBlockTableView<T extends PamDataUnit> {
@Override @Override
public void addData(PamObservable o, PamDataUnit arg) { public void addData(PamObservable o, PamDataUnit arg) {
blockTableModel.fireTableDataChanged(); DataBlockTableView.this.updatePamData();
// blockTableModel.fireTableDataChanged();
} }
@Override @Override
public void updateData(PamObservable observable, PamDataUnit pamDataUnit) { public void updateData(PamObservable observable, PamDataUnit pamDataUnit) {
blockTableModel.fireTableDataChanged(); DataBlockTableView.this.updatePamData();
// blockTableModel.fireTableDataChanged();
} }
@Override @Override
@ -258,6 +282,15 @@ public abstract class DataBlockTableView<T extends PamDataUnit> {
} }
} }
private void updatePamData() {
synchronized (copySynch) {
dataUnitCopy = pamDataBlock.getDataCopy();
}
blockTableModel.fireTableDataChanged();
}
private class MouseAction extends MouseAdapter { private class MouseAction extends MouseAdapter {
/* (non-Javadoc) /* (non-Javadoc)
@ -304,7 +337,11 @@ public abstract class DataBlockTableView<T extends PamDataUnit> {
* @return Array of multiple rows selected. * @return Array of multiple rows selected.
*/ */
public T[] getMultipleSelectedRows() { public T[] getMultipleSelectedRows() {
synchronized(pamDataBlock.getSynchLock()) { if (dataUnitCopy == null) {
return null;
}
// synchronized(pamDataBlock.getSynchLock()) { // synch not needed with data copy.
synchronized (copySynch) {
int[] selRows = testTable.getSelectedRows(); int[] selRows = testTable.getSelectedRows();
if (selRows == null) { if (selRows == null) {
return null; return null;
@ -337,12 +374,14 @@ public abstract class DataBlockTableView<T extends PamDataUnit> {
@Override @Override
public void scrollValueChanged(AbstractPamScroller abstractPamScroller) { public void scrollValueChanged(AbstractPamScroller abstractPamScroller) {
blockTableModel.fireTableDataChanged(); // blockTableModel.fireTableDataChanged();
updatePamData();
} }
@Override @Override
public void scrollRangeChanged(AbstractPamScroller pamScroller) { public void scrollRangeChanged(AbstractPamScroller pamScroller) {
blockTableModel.fireTableDataChanged(); // blockTableModel.fireTableDataChanged();
updatePamData();
} }
} }

View File

@ -46,6 +46,7 @@ import javax.xml.xpath.XPathConstants;
import javax.xml.xpath.XPathExpression; import javax.xml.xpath.XPathExpression;
import javax.xml.xpath.XPathFactory; import javax.xml.xpath.XPathFactory;
import org.apache.commons.io.comparator.NameFileComparator;
import org.w3c.dom.Document; import org.w3c.dom.Document;
import org.w3c.dom.Element; import org.w3c.dom.Element;
import org.w3c.dom.Node; import org.w3c.dom.Node;
@ -391,7 +392,7 @@ public class binaryUIDFunctions {
// System.out.println("Warning - no " + filePrefix + " binary files found in " + binStore.getBinaryStoreSettings().getStoreLocation()); // System.out.println("Warning - no " + filePrefix + " binary files found in " + binStore.getBinaryStoreSettings().getStoreLocation());
return maxUID; return maxUID;
} }
Collections.sort(binFiles); Collections.sort(binFiles, NameFileComparator.NAME_COMPARATOR);
// loop through the binary files from the last one to the first, and stop as // loop through the binary files from the last one to the first, and stop as
// soon as we find // soon as we find

View File

@ -177,6 +177,7 @@ public class RWEProcess extends PamProcess {
minSoundType = rweControl.rweParameters.minSoundType; minSoundType = rweControl.rweParameters.minSoundType;
classifier.setSoundData(getSampleRate(), sourceDataBlock.getFftLength(), classifier.setSoundData(getSampleRate(), sourceDataBlock.getFftLength(),
sourceDataBlock.getFftHop()); sourceDataBlock.getFftHop());
System.out.println("Create right whale channel process " + iChannel);
} }

View File

@ -117,6 +117,8 @@ public class BinaryOutputStream {
File outputFile = new File(mainFileName); File outputFile = new File(mainFileName);
boolean open = openPGDFFile(outputFile); boolean open = openPGDFFile(outputFile);
// System.out.println("Open outout file " + outputFile.getAbsolutePath());
if (open) { if (open) {
addToDataMap(outputFile); addToDataMap(outputFile);
@ -260,6 +262,7 @@ public class BinaryOutputStream {
public synchronized boolean closeFile() { public synchronized boolean closeFile() {
boolean ok = true; boolean ok = true;
// System.out.println("Close output file " + mainFileName);
if (dataOutputStream != null) { if (dataOutputStream != null) {
if (currentDataMapPoint != null) { if (currentDataMapPoint != null) {
currentDataMapPoint.setBinaryFooter(footer); currentDataMapPoint.setBinaryFooter(footer);

View File

@ -36,6 +36,8 @@ import dataGram.DatagramManager;
import dataMap.OfflineDataMap; import dataMap.OfflineDataMap;
import dataMap.OfflineDataMapPoint; import dataMap.OfflineDataMapPoint;
import PamController.AWTScheduler; import PamController.AWTScheduler;
import PamController.DataInputStore;
import PamController.DataOutputStore;
import PamController.OfflineDataStore; import PamController.OfflineDataStore;
import PamController.PamControlledUnit; import PamController.PamControlledUnit;
import PamController.PamControlledUnitGUI; import PamController.PamControlledUnitGUI;
@ -49,6 +51,7 @@ import PamController.PamSettingsGroup;
import PamController.PamSettingsSource; import PamController.PamSettingsSource;
import PamController.StorageOptions; import PamController.StorageOptions;
import PamController.StorageParameters; import PamController.StorageParameters;
import PamController.fileprocessing.StoreStatus;
import PamController.status.ModuleStatus; import PamController.status.ModuleStatus;
import PamController.status.QuickRemedialAction; import PamController.status.QuickRemedialAction;
import PamModel.SMRUEnable; import PamModel.SMRUEnable;
@ -89,7 +92,7 @@ import binaryFileStorage.layoutFX.BinaryStoreGUIFX;
* *
*/ */
public class BinaryStore extends PamControlledUnit implements PamSettings, public class BinaryStore extends PamControlledUnit implements PamSettings,
PamSettingsSource, OfflineDataStore { PamSettingsSource, DataOutputStore {
public static final String fileType = "pgdf"; public static final String fileType = "pgdf";
@ -1113,6 +1116,8 @@ PamSettingsSource, OfflineDataStore {
String lastFailedStream = null; String lastFailedStream = null;
private BinaryStoreStatusFuncs binaryStoreStatusFuncs;
public boolean removeMapPoint(File aFile, ArrayList<PamDataBlock> streams) { public boolean removeMapPoint(File aFile, ArrayList<PamDataBlock> streams) {
BinaryHeaderAndFooter bhf = getFileHeaderAndFooter(aFile); BinaryHeaderAndFooter bhf = getFileHeaderAndFooter(aFile);
if (bhf == null || bhf.binaryHeader == null) { if (bhf == null || bhf.binaryHeader == null) {
@ -1354,6 +1359,26 @@ PamSettingsSource, OfflineDataStore {
} }
return newFile; return newFile;
} }
/**
* Find the noise file to match a given data file.
* @param dataFile data file.
* @param checkExists check the file exists and if it doens't return null
* @return index file to go with the data file.
*/
public File findNoiseFile(File dataFile, boolean checkExists) {
// String filePath = dataFile.getAbsolutePath();
// // check that the last 4 characters are "pgdf"
// int pathLen = filePath.length();
// String newPath = filePath.substring(0, pathLen-4) + indexFileType;
File newFile = swapFileType(dataFile, noiseFileType);
if (checkExists) {
if (newFile.exists() == false) {
return null;
}
}
return newFile;
}
/** /**
* Create an index file (pgdx) name from a data file (pgdf) file name * Create an index file (pgdx) name from a data file (pgdf) file name
@ -1488,7 +1513,7 @@ PamSettingsSource, OfflineDataStore {
* @param folder folder to search * @param folder folder to search
* @param filter file filter * @param filter file filter
*/ */
private void listDataFiles(ArrayList<File> fileList, File folder, PamFileFilter filter) { public void listDataFiles(ArrayList<File> fileList, File folder, PamFileFilter filter) {
File[] newFiles = folder.listFiles(filter); File[] newFiles = folder.listFiles(filter);
if (newFiles == null) { if (newFiles == null) {
return; return;
@ -1684,7 +1709,7 @@ PamSettingsSource, OfflineDataStore {
* @param binaryObjectData * @param binaryObjectData
* @param dataSink * @param dataSink
*/ */
private void unpackAnnotationData(int fileVersion, PamDataUnit createdUnit, BinaryObjectData binaryObjectData, BinaryDataSink dataSink) { protected void unpackAnnotationData(int fileVersion, PamDataUnit createdUnit, BinaryObjectData binaryObjectData, BinaryDataSink dataSink) {
//System.out.println("Hello annotation " + binaryObjectData.getAnnotationDataLength()); //System.out.println("Hello annotation " + binaryObjectData.getAnnotationDataLength());
if (binaryObjectData.getAnnotationDataLength() == 0) { if (binaryObjectData.getAnnotationDataLength() == 0) {
@ -2211,7 +2236,7 @@ PamSettingsSource, OfflineDataStore {
private boolean reportError(String string) { boolean reportError(String string) {
System.out.println(string); System.out.println(string);
return false; return false;
} }
@ -2379,6 +2404,7 @@ PamSettingsSource, OfflineDataStore {
} }
else { else {
reportError("No valid header in file " + file.getAbsolutePath()); reportError("No valid header in file " + file.getAbsolutePath());
inputStream.closeFile();
return null; return null;
} }
@ -2509,4 +2535,17 @@ PamSettingsSource, OfflineDataStore {
return binaryStoreSettings.getNoiseStoreType(); return binaryStoreSettings.getNoiseStoreType();
} }
@Override
public StoreStatus getStoreStatus(boolean getDetail) {
if (binaryStoreStatusFuncs == null) {
binaryStoreStatusFuncs = new BinaryStoreStatusFuncs(this);
}
return binaryStoreStatusFuncs.getStoreStatus(getDetail);
}
@Override
public boolean deleteDataFrom(long timeMillis) {
BinaryStoreDeleter storeDeleter = new BinaryStoreDeleter(this);
return storeDeleter.deleteDataFrom(timeMillis);
}
} }

View File

@ -0,0 +1,369 @@
package binaryFileStorage;
import java.io.BufferedInputStream;
import java.io.DataInputStream;
import java.io.File;
import java.io.FileFilter;
import java.io.FileInputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import org.apache.commons.io.comparator.NameFileComparator;
import PamUtils.PamCalendar;
import PamUtils.PamFileFilter;
import PamguardMVC.DataUnitBaseData;
import PamguardMVC.PamDataBlock;
import PamguardMVC.PamDataUnit;
public class BinaryStoreDeleter {
private static final int FILE_DELETE_ERROR = 1;
private static final int FILE_TOO_EARLY = 2;
private static final int FILE_DELETED = 3;
private static final int FILE_PARTIAL_DELETE = 4;
private BinaryStore binaryStore;
private FileFilter directoryFilter;
private BinaryStoreStatusFuncs binaryStoreStatusFuncs;
public BinaryStoreDeleter(BinaryStore binaryStore) {
this.binaryStore = binaryStore;
directoryFilter = new DirectoryFilter();
binaryStoreStatusFuncs = new BinaryStoreStatusFuncs(binaryStore);
}
public boolean deleteDataFrom(long timeMillis) {
if (timeMillis == 0) {
return deleteEverything();
}
else {
return deleteFrom(timeMillis);
}
}
private class DirectoryFilter implements java.io.FileFilter {
@Override
public boolean accept(File pathname) {
return pathname.isDirectory();
}
}
private boolean deleteEverything() {
ArrayList<File> fileList = new ArrayList<File>();
String root = binaryStore.binaryStoreSettings.getStoreLocation();
if (root == null) {
return false;
}
File rootFolder = new File(root);
PamFileFilter binaryDataFilter = new PamFileFilter("Binary Data Files", BinaryStore.fileType);
binaryDataFilter.addFileType(BinaryStore.indexFileType);
binaryDataFilter.addFileType(BinaryStore.noiseFileType);
binaryDataFilter.setAcceptFolders(true);
binaryStore.listDataFiles(fileList, rootFolder, binaryDataFilter);
int errors = 0;
for (File aFile : fileList) {
try {
aFile.delete();
}
catch (Exception e) {
errors++;
}
}
deleteEmptyFolders();
return errors == 0;
}
private boolean deleteFrom(long timeMillis) {
/*
* need to go through the data one stream at a time so that
* we can pick files off from the end of the list.
*/
ArrayList<PamDataBlock> streams = BinaryStore.getStreamingDataBlocks(true);
int errors = 0;
for (PamDataBlock aBlock : streams) {
boolean ok = deleteFrom(aBlock, timeMillis);
if (!ok) {
errors++;
}
}
deleteEmptyFolders();
return false;
}
private boolean deleteFrom(PamDataBlock aBlock, long timeMillis) {
System.out.printf("Deleting binary data for %s from %s\n", aBlock.getDataName(), PamCalendar.formatDBDateTime(timeMillis));
BinaryDataSource dataSource = aBlock.getBinaryDataSource();
if (dataSource == null) {
return true; // don't see how this can happen.
}
// first deal with pgdf and pgdx files, then noise.
String filePrefix = dataSource.createFilenamePrefix();
List<File> binFiles = binaryStore.listAllFilesWithPrefix(filePrefix);
if (binFiles == null || binFiles.isEmpty()) {
return true; // nothing to delete.
}
Collections.sort(binFiles, NameFileComparator.NAME_COMPARATOR);
for (int i = binFiles.size()-1; i >= 0; i--) {
int ans = deleteFileFrom(aBlock, binFiles.get(i), timeMillis);
if (ans == FILE_TOO_EARLY) {
break;
}
}
return true;
}
/**
* Delete a specific file from a specific time. If the start of the file
* is after timeMillis, delete the entire file, otherwise it will have
* to be a partial delete.
* @param aBlock
* @param file
* @param timeMillis
* @return
*/
private int deleteFileFrom(PamDataBlock aBlock, File dataFile, long timeMillis) {
File indexFile = binaryStore.findIndexFile(dataFile, true);
if (indexFile == null) {
indexFile = dataFile;
}
File noiseFile = binaryStore.findNoiseFile(dataFile, true);
// get the header.
boolean headOk = false;
BinaryHeader binaryHead = new BinaryHeader();
try {
FileInputStream fis = new FileInputStream(indexFile);
DataInputStream dis = new DataInputStream(new BufferedInputStream(fis));
headOk = binaryHead.readHeader(dis);
fis.close();
}
catch (IOException e) {
headOk = false;
}
if (headOk == false || binaryHead.getDataDate() >= timeMillis) {
boolean deleteOk = deleteFileSet(dataFile);
return deleteOk ? FILE_DELETED : FILE_DELETE_ERROR;
}
/**
* Now need to see if the file is earlier than we want, in which case we return
* immediately and won't look at any more files.
*/
BinaryFooter fileEnd = binaryStoreStatusFuncs.findLastData(dataFile);
if (fileEnd == null) {
// the file has no footer and no data, so must be corrupt, so delete it.
boolean deleteOk = deleteFileSet(dataFile);
return deleteOk ? FILE_DELETED : FILE_DELETE_ERROR;
}
if (fileEnd.getDataDate() <= timeMillis) {
/*
* this file is earlier than our delete time, so we don't want to delete it
* and need to send a message saying not to delete anything else either.
*/
return FILE_TOO_EARLY;
}
/**
* If we land here, it looks like we're in the realm of needing to partially delete
* a file / set of data and noise files. What a pain ! Will need to do
* the deleting and update the index file. f** knows what to do about a
* serialized datamap.
*/
partialCopyFile(aBlock, dataFile, timeMillis);
if (indexFile != null) {
partialCopyFile(aBlock, indexFile, timeMillis);
}
if (noiseFile != null) {
partialCopyFile(aBlock, noiseFile, timeMillis);
}
return FILE_PARTIAL_DELETE;
}
private boolean partialCopyFile(PamDataBlock aBlock, File dataFile, long timeMillis) {
System.out.printf("Partial delete of file %s from %s\n", dataFile.getAbsoluteFile(), PamCalendar.formatDBDateTime(timeMillis));
try {
BinaryInputStream inputStream = new BinaryInputStream(binaryStore, aBlock);
if (inputStream.openFile(dataFile) == false) {
return false;
}
BinaryDataSource dataSource = aBlock.getBinaryDataSource();
File tempFile = new File(dataFile.getAbsolutePath() + ".tmp");
BinaryOutputStream outputStream = new BinaryOutputStream(binaryStore, aBlock);
dataSource.setBinaryStorageStream(outputStream);
BinaryObjectData binaryObjectData;
BinaryHeader bh = inputStream.readHeader();
if (bh==null) {
return false;
}
outputStream.writeHeader(bh.getDataDate(), bh.getAnalysisDate());
ModuleHeader mh = null;
BinaryFooter bf = null;
int inputFormat = bh.getHeaderFormat();
while ((binaryObjectData = inputStream.readNextObject(inputFormat)) != null) {
switch (binaryObjectData.getObjectType()) {
case BinaryTypes.FILE_FOOTER:
// this is unlikely to happen, since we'll probably already have found an index file.
bf = new BinaryFooter();
bf.readFooterData(binaryObjectData.getDataInputStream(), inputFormat);
bf.setDataDate(timeMillis);
outputStream.writeFileFooter(bf);
break;
case BinaryTypes.MODULE_HEADER:
mh = dataSource.sinkModuleHeader(binaryObjectData, bh);
outputStream.writeModuleHeader();
break;
case BinaryTypes.MODULE_FOOTER:
ModuleFooter mf = dataSource.sinkModuleFooter(binaryObjectData, bh, mh);
outputStream.writeModuleFooter();
break;
case BinaryTypes.DATAGRAM:
// dataSource.
break;
default: // should be data.
DataUnitBaseData baseData = binaryObjectData.getDataUnitBaseData();
if (baseData == null) {
continue;
}
if (baseData.getTimeMilliseconds() > timeMillis) {
continue;
}
/*
* otherwise we need to store this data unit. I think we can just copy in the
* existing binary data to the new file non ? Might mess the datagram slightly,
* but that is only in the index file and can sort itself out.
* better to make a data unit and then rewrite it I think.
*/
PamDataUnit dataUnit = dataSource.sinkData(binaryObjectData, bh, inputFormat);
if (dataUnit != null) {
dataUnit.getBasicData().mergeBaseData(binaryObjectData.getDataUnitBaseData());
binaryStore.unpackAnnotationData(bh.getHeaderFormat(), dataUnit, binaryObjectData, null);
dataSource.saveData(dataUnit);
}
}
}
outputStream.closeFile();
inputStream.closeFile();
/*
* Now file final stage - copy the temp file in place of the
* original file.
*/
boolean deletedOld = false;
try {
deletedOld = dataFile.delete();
}
catch (SecurityException e) {
System.out.println("Error deleting old pgdf file: " + dataFile.getAbsolutePath());
e.printStackTrace();
}
boolean renamedNew = false;
try {
renamedNew = tempFile.renameTo(dataFile);
}
catch (SecurityException e) {
System.out.println("Error renaming new pgdf file: " + tempFile.getAbsolutePath() +
" to " + dataFile.getAbsolutePath());
e.printStackTrace();
}
if (renamedNew == false) {
if (deletedOld == false) {
binaryStore.reportError("Unable to delete " + dataFile.getAbsolutePath());
}
return binaryStore.reportError(String.format("Unable to rename %s to %s",
tempFile.getAbsolutePath(), dataFile.getAbsolutePath()));
}
return true;
}
catch (Exception ex) {
return false;
}
}
/**
* Delete a set of files, including main data file, index file and noise file.
* @param dataFile
* @return
*/
private boolean deleteFileSet(File dataFile) {
System.out.printf("Deleting full file set for %s\n", dataFile.getAbsoluteFile());
boolean deleteOk = true;
try {
File indexFile = binaryStore.findIndexFile(dataFile, true);
File noiseFile = binaryStore.findNoiseFile(dataFile, true);
deleteOk &= dataFile.delete();
if (indexFile != null) {
deleteOk &= indexFile.delete();
}
if (noiseFile != null) {
deleteOk &= noiseFile.delete();
}
}
catch (Exception e) {
deleteOk = false;
}
System.out.printf("Deleting full file set %s for %s\n", deleteOk?"OK":"Error", dataFile.getAbsoluteFile());
return deleteOk;
}
private void deleteEmptyFolders() {
String root = binaryStore.binaryStoreSettings.getStoreLocation();
if (root == null) {
return;
}
/**
* Iterate through the root folder first and then call a
* recursive function to delete sub folders. this will stop the
* root folder from being deleted, but sub folders will get deleted if
* they have no files (of any type) in them.
*/
File rootFolder = new File(root);
File[] subFolders = rootFolder.listFiles(directoryFilter);
if (subFolders == null) {
return;
}
for (int i = 0; i < subFolders.length; i++) {
deleteEmptyFolders(subFolders[i]);
}
}
private void deleteEmptyFolders(File file) {
File[] subFolders = file.listFiles(directoryFilter);
for (int i = 0; i < subFolders.length; i++) {
deleteEmptyFolders(subFolders[i]);
}
// see if there is anything at all in this folder
File[] remaining = file.listFiles();
if (remaining.length == 0) {
try {
file.delete();
}
catch (Exception e) {
System.out.printf("Binary folder %s cannot be deleted: %s\n", file.getAbsolutePath(), e.getMessage());
}
}
}
}

View File

@ -0,0 +1,166 @@
package binaryFileStorage;
import PamController.fileprocessing.StoreStatus;
public class BinaryStoreStatus extends StoreStatus {
private BinaryStore binaryStore;
private BinaryHeader firstHeader;
private BinaryFooter lastFooter;
private BinaryFooter lastData;
public BinaryStoreStatus(BinaryStore binaryStore) {
super(binaryStore);
this.binaryStore = binaryStore;
}
public BinaryStoreStatus(BinaryStore binaryStore, BinaryHeader firstHead, BinaryFooter lastFoot,
BinaryFooter lastData) {
super(binaryStore);
this.binaryStore = binaryStore;
this.firstHeader = firstHead;
this.lastFooter = lastFoot;
this.lastData = lastData;
}
@Override
public Long getFirstDataTime() {
if (firstHeader != null) {
return firstHeader.getDataDate();
}
return null;
}
@Override
public Long getLastDataTime() {
if (lastData != null) {
return lastData.getDataDate();
}
if (lastFooter != null) {
return lastFooter.getDataDate();
}
return null;
}
/**
* @return the firstHeader
*/
public BinaryHeader getFirstHeader() {
return firstHeader;
}
/**
* @param firstHeader the firstHeader to set
*/
public void setFirstHeader(BinaryHeader firstHeader) {
this.firstHeader = firstHeader;
if (firstHeader != null) {
setFirstDataTime(firstHeader.getDataDate());
}
else {
setFirstDataTime(null);
}
}
/**
* @return the lastFooter
*/
public BinaryFooter getLastFooter() {
return lastFooter;
}
/**
* @param lastFooter the lastFooter to set
*/
public void setLastFooter(BinaryFooter lastFooter) {
this.lastFooter = lastFooter;
}
/**
* @return the lastData
*/
public BinaryFooter getLastData() {
return lastData;
}
/**
* @param lastData the lastData to set
*/
public void setLastData(BinaryFooter lastData) {
this.lastData = lastData;
if (lastData != null) {
setLastDataTime(lastData.getDataDate());
}
else {
setLastDataTime(null);
}
}
@Override
public long getFreeSpace() {
return getFreeSpace(binaryStore.getBinaryStoreSettings().getStoreLocation());
}
/**
* Looking overall for first header, last footers, etc.
* @param blockStatus
*/
public void considerBlockStatus(BinaryStoreStatus blockStatus) {
considerFirstHeader(blockStatus.firstHeader);
considerLastFooter(blockStatus.lastFooter);
considerLastData(blockStatus.lastData);
}
/**
* Take a footer for last data with the later date
* @param footer
*/
private void considerLastData(BinaryFooter footer) {
if (footer == null || footer.getDataDate() == 0) {
return;
}
if (lastData == null || lastData.getDataDate() == 0) {
lastData = footer;
}
if (footer.getDataDate() > lastData.getDataDate()) {
lastData = footer;
}
}
/**
* Take a footer for last footer with the later date
* @param footer
*/
private void considerLastFooter(BinaryFooter footer) {
if (footer == null || footer.getDataDate() == 0) {
return;
}
if (lastFooter == null || lastFooter.getDataDate() == 0) {
lastFooter = footer;
}
if (footer.getDataDate() > lastFooter.getDataDate()) {
lastFooter = footer;
}
}
/**
* Take a header for the first header with the earliest date.
* @param header
*/
private void considerFirstHeader(BinaryHeader header) {
if (header == null || header.getDataDate() == 0) {
return;
}
if (this.firstHeader == null || firstHeader.getDataDate() == 0) {
this.firstHeader = header;
}
if (header.getDataDate() < firstHeader.getDataDate()) {
firstHeader = header;
}
}
}

View File

@ -0,0 +1,284 @@
package binaryFileStorage;
import java.io.BufferedInputStream;
import java.io.DataInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import org.apache.commons.io.comparator.NameFileComparator;
import PamController.fileprocessing.StoreStatus;
import PamUtils.PamFileFilter;
import PamguardMVC.DataUnitBaseData;
import PamguardMVC.PamDataBlock;
/**
* Set of functions used at restarts to determine the status of the binary store.
* @author dg50
*
*/
public class BinaryStoreStatusFuncs {
private BinaryStore binaryStore;
public BinaryStoreStatusFuncs(BinaryStore binaryStore) {
this.binaryStore = binaryStore;
}
public StoreStatus getStoreStatus(boolean getDetail) {
BinaryStoreStatus binStoreStatus = new BinaryStoreStatus(binaryStore);
binStoreStatus.setStoreStatus(checkStoreStatus());
if (getDetail && binStoreStatus.getStoreStatus() == StoreStatus.STATUS_HASDATA) {
binStoreStatus = getStoreDetail(binStoreStatus);
}
return binStoreStatus;
}
private BinaryStoreStatus getStoreDetail(BinaryStoreStatus binStoreStatus) {
// go through every stream and find it's first and last data times.
long lastTime = Long.MIN_VALUE;
long firstTime = Long.MAX_VALUE;
ArrayList<PamDataBlock> streams = BinaryStore.getStreamingDataBlocks(true);
for (PamDataBlock aBlock : streams) {
BinaryDataSource dataSource = aBlock.getBinaryDataSource();
if (dataSource == null) {
continue;
}
BinaryStoreStatus blockStatus = getStreamStartEnd(dataSource);
binStoreStatus.considerBlockStatus(blockStatus);
}
return binStoreStatus;
}
private BinaryStoreStatus getStreamStartEnd(BinaryDataSource dataSource) {
String filePrefix = dataSource.createFilenamePrefix();
List<File> binFiles = binaryStore.listAllFilesWithPrefix(filePrefix);
if (binFiles == null || binFiles.isEmpty()) {
return null;
}
Collections.sort(binFiles, NameFileComparator.NAME_COMPARATOR);
BinaryHeader firstHead = findFirstHeader(binFiles);
BinaryFooter lastFoot = findLastFooter(binFiles);
BinaryFooter lastData = findLastData(binFiles);
BinaryStoreStatus storeStatus = new BinaryStoreStatus(binaryStore, firstHead, lastFoot, lastData);
return storeStatus;
}
/**
* Get the last footer. This may be in the last file, but may not be if things
* crashed and the last file didn't get completed, i nwhich case it will be in
* the file before.
* @param binFiles
* @return
*/
private BinaryFooter findLastFooter(List<File> binFiles) {
for (int i = binFiles.size()-1; i>=0; i--) {
File aFile = binFiles.get(i);
/*
* if the last file was completed correctly, it will have an index file. If there isn't
* an index file it's very unlikely there will be a footer in the main file
*/
File indexFile = binaryStore.findIndexFile(aFile, true);
if (indexFile == null) {
continue;
}
BinaryHeaderAndFooter headAndFoot = binaryStore.readHeaderAndFooter(indexFile);
if (headAndFoot != null && headAndFoot.binaryFooter != null) {
return headAndFoot.binaryFooter;
}
}
return null;
}
/**
* Get the last time of any data, whether it's from a header, footer, or actual data.
* @param binFiles
* @return
*/
private BinaryFooter findLastData(List<File> binFiles) {
for (int i = binFiles.size()-1; i>=0; i--) {
File aFile = binFiles.get(i);
BinaryFooter bf = findLastData(aFile);
if (bf != null) {
return bf;
}
}
return null;
}
/**
* Get the last data in a file. Hopefully this comes
* from the footer, but it might have to look at all data if
* the footer is absent or the index file missing.
* @param aFile
* @return
*/
public BinaryFooter findLastData(File aFile) {
Long lastUID = null;
Long lastTime = null;
Long firstUID = null;
File indexFile = binaryStore.findIndexFile(aFile, true);
if (indexFile != null) {
BinaryHeaderAndFooter headAndFoot = binaryStore.readHeaderAndFooter(indexFile);
if (headAndFoot != null && headAndFoot.binaryFooter != null) {
return headAndFoot.binaryFooter;
}
}
/*
* otherwise it would seem that we've a file without a valid end, so unpack it and
* get the UID and time of the last item in the file. Can return these in the form of
* a BinaryFooter since it's pretty much the same information needed.
*/
BinaryInputStream inputStream = new BinaryInputStream(binaryStore, null);
try {
// need to work through the file now.
if (inputStream.openFile(aFile) == false) {
return null;
};
BinaryObjectData binaryObjectData;
BinaryHeader bh = inputStream.readHeader();
if (bh==null) {
inputStream.closeFile();
return null;
}
int inputFormat = bh.getHeaderFormat();
while ((binaryObjectData = inputStream.readNextObject(inputFormat)) != null) {
if (binaryObjectData.getTimeMilliseconds() != 0) {
lastTime = binaryObjectData.getTimeMilliseconds();
}
BinaryFooter bf;
switch (binaryObjectData.getObjectType()) {
case BinaryTypes.FILE_FOOTER:
// this is unlikely to happen, since we'll probably already have found an index file.
bf = new BinaryFooter();
if (bf.readFooterData(binaryObjectData.getDataInputStream(), inputFormat)) {
if (bf.getDataDate() != 0) {
inputStream.closeFile();
return bf;
}
}
break;
case BinaryTypes.MODULE_HEADER:
break;
case BinaryTypes.MODULE_FOOTER:
break;
case BinaryTypes.DATAGRAM:
break;
default: // should be data.
DataUnitBaseData baseData = binaryObjectData.getDataUnitBaseData();
if (baseData != null) {
if (baseData.getTimeMilliseconds() != 0) {
lastTime = baseData.getTimeMilliseconds();
}
if (baseData.getUID() != 0) {
lastUID = baseData.getUID();
if (firstUID == null) {
firstUID = lastUID;
}
}
}
}
}
}
catch (Exception e) {
System.out.printf("Corrupt data file %s: %s\n", aFile, e.getMessage());
// return null;
}
try {
if (inputStream != null) {
inputStream.closeFile();
}
}
catch (Exception e) {
}
if (lastTime != null && lastUID != null) {
BinaryFooter bf = new BinaryFooter();
bf.setHighestUID(lastUID);
bf.setLowestUID(firstUID);
bf.setDataDate(lastTime);
bf.setFileEndReason(BinaryFooter.END_CRASHED);
return bf;
}
else {
return null;
}
}
/**
* Get the first header. This can be read from a data file whether or not there was a
* valid index file created.
* @param binFiles
* @return
*/
private BinaryHeader findFirstHeader(List<File> binFiles) {
BinaryHeader binaryHead = new BinaryHeader();
DataInputStream dis = null;
for (File aFile : binFiles) {
try {
dis = new DataInputStream(new BufferedInputStream(new FileInputStream(aFile)));
}
catch (IOException e) {
binaryHead = null;
continue;
}
try {
dis.close();
}
catch (IOException e) {
}
}
return binaryHead;
}
/**
* first simple status check to see if there are any files there at all.
*/
private int checkStoreStatus() {
String currDir = binaryStore.binaryStoreSettings.getStoreLocation();
if (currDir == null) {
return StoreStatus.STATUS_MISSING;
}
File currfolder = new File(currDir);
if (currfolder.exists() == false) {
return StoreStatus.STATUS_MISSING;
}
// look for files in the folder.
boolean hasFiles = hasAnyFiles(currfolder);
if (hasFiles) {
return StoreStatus.STATUS_HASDATA;
}
else {
return StoreStatus.STATUS_EMPTY;
}
}
private boolean hasAnyFiles(File currFolder) {
PamFileFilter filefilter = new PamFileFilter("data files", ".pgdf");
File[] list = currFolder.listFiles(filefilter);
if (list == null) {
return false;
}
for (int i = 0; i < list.length; i++) {
if (list[i].isDirectory()) {
if (hasAnyFiles(list[i])) {
return true;
}
}
if (list[i].getAbsolutePath().endsWith(".pgdf")) {
return true;
}
}
return false;
}
}

View File

@ -19,6 +19,7 @@ import PamController.PamControlledUnitSettings;
import PamController.PamController; import PamController.PamController;
import PamController.PamSettingManager; import PamController.PamSettingManager;
import PamController.PamSettings; import PamController.PamSettings;
import PamController.fileprocessing.StoreStatus;
import PamView.PamDetectionOverlayGraphics; import PamView.PamDetectionOverlayGraphics;
import PamView.PamSymbol; import PamView.PamSymbol;
import PamguardMVC.PamDataBlock; import PamguardMVC.PamDataBlock;
@ -219,7 +220,7 @@ public class CPODControl extends OfflineFileControl implements PamSettings {
int n = cpodLoader.loadData(dataBlock, usedMapPoints, offlineDataLoadInfo, loadObserver); int n = cpodLoader.loadData(dataBlock, usedMapPoints, offlineDataLoadInfo, loadObserver);
return n >= 0; return n >= 0;
} }
} }

View File

@ -10,6 +10,7 @@ import java.util.List;
import com.opencsv.CSVReader; import com.opencsv.CSVReader;
import com.opencsv.exceptions.CsvException; import com.opencsv.exceptions.CsvException;
import PamController.fileprocessing.StoreStatus;
import d3.calibration.CalFileReader; import d3.calibration.CalFileReader;
import d3.calibration.CalibrationInfo; import d3.calibration.CalibrationInfo;
import d3.calibration.CalibrationSet; import d3.calibration.CalibrationSet;
@ -487,4 +488,5 @@ public class D3Control extends OfflineFileControl {
public D3DataPlotProvider getD3DataPlotProvider() { public D3DataPlotProvider getD3DataPlotProvider() {
return d3DataPlotProvider; return d3DataPlotProvider;
} }
} }

View File

@ -45,6 +45,7 @@ import PamController.PamController;
import PamController.PamControllerInterface; import PamController.PamControllerInterface;
import PamController.PamSettingManager; import PamController.PamSettingManager;
import PamController.PamSettings; import PamController.PamSettings;
import PamController.fileprocessing.StoreStatus;
import PamguardMVC.PamDataBlock; import PamguardMVC.PamDataBlock;
import PamguardMVC.PamDataUnit; import PamguardMVC.PamDataUnit;
import PamguardMVC.PamProcess; import PamguardMVC.PamProcess;
@ -261,6 +262,6 @@ public class DecimatorControl extends PamControlledUnit implements PamSettings,
double m = fbig % fsmall; double m = fbig % fsmall;
return m == 0; return m == 0;
} }
} }

View File

@ -17,6 +17,7 @@ import PamController.PamControlledUnitSettings;
import PamController.PamController; import PamController.PamController;
import PamController.PamSettingManager; import PamController.PamSettingManager;
import PamController.PamSettings; import PamController.PamSettings;
import PamController.fileprocessing.StoreStatus;
import PamUtils.PamUtils; import PamUtils.PamUtils;
import PamguardMVC.PamDataBlock; import PamguardMVC.PamDataBlock;
import PamguardMVC.PamProcess; import PamguardMVC.PamProcess;
@ -197,5 +198,6 @@ public class BeamformControl extends PamControlledUnit implements PamSettings, O
// TODO Auto-generated method stub // TODO Auto-generated method stub
return beamformProcess; return beamformProcess;
} }
} }

View File

@ -18,12 +18,14 @@ import generalDatabase.backup.DatabaseBackupStream;
import pamScrollSystem.ViewLoadObserver; import pamScrollSystem.ViewLoadObserver;
import pamViewFX.pamTask.PamTaskUpdate; import pamViewFX.pamTask.PamTaskUpdate;
import PamController.AWTScheduler; import PamController.AWTScheduler;
import PamController.DataOutputStore;
import PamController.OfflineDataStore; import PamController.OfflineDataStore;
import PamController.PamControlledUnit; import PamController.PamControlledUnit;
import PamController.PamController; import PamController.PamController;
import PamController.PamControllerInterface; import PamController.PamControllerInterface;
import PamController.PamGUIManager; import PamController.PamGUIManager;
import PamController.PamSettingManager; import PamController.PamSettingManager;
import PamController.fileprocessing.StoreStatus;
import PamController.status.ModuleStatus; import PamController.status.ModuleStatus;
import PamController.status.QuickRemedialAction; import PamController.status.QuickRemedialAction;
import PamguardMVC.PamDataBlock; import PamguardMVC.PamDataBlock;
@ -39,7 +41,7 @@ import backupmanager.BackupInformation;
* @see DBControl * @see DBControl
* *
*/ */
public class DBControlUnit extends DBControl implements OfflineDataStore { public class DBControlUnit extends DBControl implements DataOutputStore {
@ -492,5 +494,15 @@ public class DBControlUnit extends DBControl implements OfflineDataStore {
return backupInformation; return backupInformation;
} }
@Override
public StoreStatus getStoreStatus(boolean getDetail) {
return getDbProcess().getStoreStatus(this, getDetail);
}
@Override
public boolean deleteDataFrom(long timeMillis) {
return getDbProcess().deleteDataFrom(timeMillis);
}
} }

View File

@ -1,6 +1,9 @@
package generalDatabase; package generalDatabase;
import generalDatabase.ColumnMetaData.METACOLNAMES; import generalDatabase.ColumnMetaData.METACOLNAMES;
import generalDatabase.clauses.FixedClause;
import generalDatabase.clauses.FromClause;
import generalDatabase.clauses.PAMSelectClause;
import generalDatabase.pamCursor.PamCursor; import generalDatabase.pamCursor.PamCursor;
import generalDatabase.ucanAccess.UCanAccessSystem; import generalDatabase.ucanAccess.UCanAccessSystem;
@ -14,6 +17,7 @@ import java.io.InputStreamReader;
import java.io.PrintWriter; import java.io.PrintWriter;
import java.sql.Connection; import java.sql.Connection;
import java.sql.DatabaseMetaData; import java.sql.DatabaseMetaData;
import java.sql.PreparedStatement;
import java.sql.ResultSet; import java.sql.ResultSet;
import java.sql.ResultSetMetaData; import java.sql.ResultSetMetaData;
import java.sql.SQLException; import java.sql.SQLException;
@ -21,6 +25,7 @@ import java.sql.SQLWarning;
import java.sql.Statement; import java.sql.Statement;
import java.sql.Types; import java.sql.Types;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List;
import javax.swing.JFileChooser; import javax.swing.JFileChooser;
import javax.swing.JFrame; import javax.swing.JFrame;
@ -39,6 +44,7 @@ import loggerForms.formdesign.FormEditor;
import PamController.PamControlledUnit; import PamController.PamControlledUnit;
import PamController.PamController; import PamController.PamController;
import PamController.PamFolders; import PamController.PamFolders;
import PamController.fileprocessing.StoreStatus;
import PamUtils.PamCalendar; import PamUtils.PamCalendar;
import PamUtils.PamFileChooser; import PamUtils.PamFileChooser;
import PamView.dialog.warn.WarnOnce; import PamView.dialog.warn.WarnOnce;
@ -1435,4 +1441,141 @@ public class DBProcess extends PamProcess {
} }
} }
/**
* Get the store status for use with pre-process checks.
* @param getDetail get full details of start and end times.
* @return database store status.
*/
public StoreStatus getStoreStatus(DBControlUnit dbControlUnit, boolean getDetail) {
DatabaseStoreStatus dbStoreStatus = new DatabaseStoreStatus(dbControlUnit);
// and work out if any tables have anything in them already ...
int status = 0;
if (dbControlUnit.getConnection() == null) {
status = StoreStatus.STATUS_MISSING;
}
else {
boolean anyData = hasAnyOutputData();
if (anyData) {
status = StoreStatus.STATUS_HASDATA;
}
else {
status = StoreStatus.STATUS_EMPTY;
}
}
if (status == StoreStatus.STATUS_HASDATA && getDetail) {
getStoreLimits(dbStoreStatus);
}
dbStoreStatus.setStoreStatus(status);
return dbStoreStatus;
}
private void getStoreLimits(DatabaseStoreStatus dbStoreStatus) {
ArrayList<PamDataBlock> allDataBlocks = PamController.getInstance().getDataBlocks();
PamTableDefinition tableDefinition;
SQLLogging logging;
// for each datablock, check that the process can log (ignoring GPS process)
for (int i = 0; i < allDataBlocks.size(); i++) {
PamDataBlock aBlock = allDataBlocks.get(i);
logging = aBlock.getLogging();
if (logging == null) {
continue;
}
if (aBlock.getMixedDirection() != PamDataBlock.MIX_INTODATABASE) {
continue; // don't want things like GPS data.
}
getStoreLimits(aBlock, dbStoreStatus);
}
}
/**
* Get first and last records for a table.
* @param aBlock
* @param dbStoreStatus
*/
private void getStoreLimits(PamDataBlock aBlock, DatabaseStoreStatus dbStoreStatus) {
// TODO Auto-generated method stub
SQLLogging logging = aBlock.getLogging();
PamConnection con = databaseControll.getConnection();
SQLTypes sqlTypes = con.getSqlTypes();
String q1 = String.format("SELECT MIN(UTC) FROM %s",sqlTypes.formatTableName(logging.getTableDefinition().getTableName()));
Long t = getUTC(con, q1);
dbStoreStatus.testFirstDataTime(t);
String q2 = String.format("SELECT MAX(UTC) FROM %s",sqlTypes.formatTableName(logging.getTableDefinition().getTableName()));
Long t2 = getUTC(con, q2);
dbStoreStatus.testLastDataTime(t2);
}
private Long getUTC(PamConnection con, String qStr) {
Object utcObject = null;
try {
PreparedStatement stmt = con.getConnection().prepareStatement(qStr);
ResultSet result = stmt.executeQuery();
if (result.next()) {
utcObject = result.getObject(1);
}
result.close();
stmt.close();
} catch (SQLException e) {
e.printStackTrace();
return null;
}
if (utcObject == null) {
return null;
}
Long millis = SQLTypes.millisFromTimeStamp(utcObject);
return millis;
}
/**
* Is there any data in any output tables ?
* @return
*/
private boolean hasAnyOutputData() {
ArrayList<PamDataBlock> allDataBlocks = PamController.getInstance().getDataBlocks();
PamTableDefinition tableDefinition;
SQLLogging logging;
// for each datablock, check that the process can log (ignoring GPS process)
for (int i = 0; i < allDataBlocks.size(); i++) {
PamDataBlock aBlock = allDataBlocks.get(i);
logging = aBlock.getLogging();
if (logging == null) {
continue;
}
if (aBlock.getMixedDirection() != PamDataBlock.MIX_INTODATABASE) {
continue; // don't want things like GPS data.
}
// get a record count.
Integer count = logging.countTableItems(null);
if (count != null && count > 0) {
return true;
}
}
return false;
}
public boolean deleteDataFrom(long timeMillis) {
ArrayList<PamDataBlock> allDataBlocks = PamController.getInstance().getDataBlocks();
PamTableDefinition tableDefinition;
SQLLogging logging;
// for each datablock, check that the process can log (ignoring GPS process)
boolean ok = true;
for (int i = 0; i < allDataBlocks.size(); i++) {
PamDataBlock aBlock = allDataBlocks.get(i);
logging = aBlock.getLogging();
if (logging == null) {
continue;
}
PAMSelectClause clause = new FromClause(timeMillis);
ok &= logging.deleteData(clause);
}
return ok;
}
} }

View File

@ -0,0 +1,43 @@
package generalDatabase;
import PamController.fileprocessing.StoreStatus;
public class DatabaseStoreStatus extends StoreStatus {
private DBControlUnit dbControl;
public DatabaseStoreStatus(DBControlUnit dbControl) {
super(dbControl);
this.dbControl = dbControl;
}
@Override
public long getFreeSpace() {
String name = dbControl.getDatabaseName(); // may not have the path, which is what we need.
return getFreeSpace(name); // this may not work, particularly for server based systems.
}
public void testFirstDataTime(Long t) {
if (t == null) {
return;
}
if (getFirstDataTime() == null) {
setFirstDataTime(t);
}
if (t < getFirstDataTime()) {
setFirstDataTime(t);
}
}
public void testLastDataTime(Long t) {
if (t == null) {
return;
}
if (getLastDataTime() == null) {
setLastDataTime(t);
}
if (t > getLastDataTime()) {
setLastDataTime(t);
}
}
}

View File

@ -13,7 +13,7 @@ import PamUtils.PamCalendar;
* tables. Also used to prepare Sql statements for writing and * tables. Also used to prepare Sql statements for writing and
* reading back data. * reading back data.
* *
* I did a bit of redifining what columns are used for on 4 Oct, 2012. * I did a bit of redefining what columns are used for on 4 Oct, 2012.
* PCLocalTime was a UTC time from the PC of the time analysis took place. * PCLocalTime was a UTC time from the PC of the time analysis took place.
* When running in real time, this would be the same as the data in the UTC column * When running in real time, this would be the same as the data in the UTC column
* (give or take the odd second for data to get through the system). I've now defined * (give or take the odd second for data to get through the system). I've now defined

View File

@ -225,7 +225,10 @@ public class PamTableItem implements Cloneable {
// } // }
public String getDeblankedStringValue() { public String getDeblankedStringValue() {
if (sqlType != Types.CHAR || value == null) { // if (sqlType != Types.CHAR || value == null) {
// return null;
// }
if (value instanceof String == false) {
return null; return null;
} }
return ((String) value).trim(); return ((String) value).trim();

View File

@ -635,11 +635,20 @@ public abstract class SQLLogging {
if (pamConn == null) { if (pamConn == null) {
return null; return null;
} }
SQLTypes sqlTypes = pamConn.getSqlTypes(); SQLTypes sqlTypes = pamConn.getSqlTypes();
//the clause contains 'WHERE' so it's possible to make a null one. //the clause contains 'WHERE' so it's possible to make a null one.
String qStr = String.format("SELECT COUNT(%s.Id) FROM %s %s", String qStr;
pamTableDefinition.getTableName(), if (pamViewParameters == null) {
pamViewParameters.getSelectClause(sqlTypes)); qStr = String.format("SELECT COUNT(Id) FROM %s",
pamTableDefinition.getTableName());
}
else {
qStr = String.format("SELECT COUNT(%s.Id) FROM %s %s",
pamTableDefinition.getTableName(),
pamTableDefinition.getTableName(),
pamViewParameters.getSelectClause(sqlTypes));
}
int count = 0; int count = 0;
try { try {
PreparedStatement stmt = pamConn.getConnection().prepareStatement(qStr); PreparedStatement stmt = pamConn.getConnection().prepareStatement(qStr);

View File

@ -103,7 +103,10 @@ public class SqliteSystem extends DBSystem implements PamSettings {
if (commandName == null) { if (commandName == null) {
return; return;
} }
setDatabaseName(commandName); File commandFile = new File(commandName);
// check the file end is of the right type. Some batch systems may not get this right.
commandFile = PamFileFilter.checkFileEnd(commandFile, "sqlite3", true);
setDatabaseName(commandFile.getAbsolutePath());
} }
/** /**
@ -303,6 +306,30 @@ public class SqliteSystem extends DBSystem implements PamSettings {
return true; return true;
} }
@Override
public boolean checkDatabaseExists(String dbName) {
String commandName = GlobalArguments.getParam(DBControl.GlobalDatabaseNameArg);
if (commandName != null) {
return checkCommandLineDatabase();
}
return super.checkDatabaseExists(dbName);
}
private boolean checkCommandLineDatabase() {
String commandName = GlobalArguments.getParam(DBControl.GlobalDatabaseNameArg);
if (commandName == null) {
return false;
}
File dbFile = new File(commandName);
dbFile = PamFileFilter.checkFileEnd(dbFile, ".sqlite3", true);
commandName = dbFile.getAbsolutePath();
if (dbFile.exists() == false) {
// create a new database without asking.
createNewDatabase(commandName);
}
return dbFile.exists();
}
@Override @Override
public String getDatabaseName() { public String getDatabaseName() {
/* /*
@ -311,6 +338,9 @@ public class SqliteSystem extends DBSystem implements PamSettings {
*/ */
String commandName = GlobalArguments.getParam(DBControl.GlobalDatabaseNameArg); String commandName = GlobalArguments.getParam(DBControl.GlobalDatabaseNameArg);
if (commandName != null) { if (commandName != null) {
File dbFile = new File(commandName);
dbFile = PamFileFilter.checkFileEnd(dbFile, ".sqlite3", true);
commandName = dbFile.getAbsolutePath();
return commandName; return commandName;
} }

View File

@ -29,6 +29,7 @@ import PamController.PamGUIManager;
import PamController.PamSettingManager; import PamController.PamSettingManager;
import PamController.PamguardVersionInfo; import PamController.PamguardVersionInfo;
import PamController.pamBuoyGlobals; import PamController.pamBuoyGlobals;
import PamController.fileprocessing.ReprocessStoreChoice;
import PamModel.SMRUEnable; import PamModel.SMRUEnable;
import PamUtils.FileFunctions; import PamUtils.FileFunctions;
import PamUtils.PamExceptionHandler; import PamUtils.PamExceptionHandler;
@ -288,7 +289,15 @@ public class Pamguard {
// auto exit at end of processing. // auto exit at end of processing.
GlobalArguments.setParam(NetworkSender.PORT, args[iArg++]); GlobalArguments.setParam(NetworkSender.PORT, args[iArg++]);
} }
else if (anArg.equalsIgnoreCase(ReprocessStoreChoice.paramName)) {
String arg = args[iArg++];
ReprocessStoreChoice choice = ReprocessStoreChoice.valueOf(arg);
if (choice == null) {
String warn = String.format("Reprocessing storage input parameter %s value \"%s\" is not a recognised value", ReprocessStoreChoice.paramName, arg);
WarnOnce.showWarning("Invalid input parameter", warn, WarnOnce.WARNING_MESSAGE);
}
GlobalArguments.setParam(ReprocessStoreChoice.paramName, arg);
}
else if (anArg.equalsIgnoreCase("-help")) { else if (anArg.equalsIgnoreCase("-help")) {
System.out.println("--PamGuard Help"); System.out.println("--PamGuard Help");
System.out.println("\n--For standard GUI deployment run without any options.\n"); System.out.println("\n--For standard GUI deployment run without any options.\n");