Merge branch 'main' of https://github.com/PAMGuard/PAMGuard into main

This commit is contained in:
Douglas Gillespie 2023-11-15 15:02:50 +00:00
commit 2dd90f6a32
21 changed files with 573 additions and 136 deletions

View File

@ -8,6 +8,7 @@
</classpathentry> </classpathentry>
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-11"> <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-11">
<attributes> <attributes>
<attribute name="module" value="true"/>
<attribute name="maven.pomderived" value="true"/> <attribute name="maven.pomderived" value="true"/>
</attributes> </attributes>
</classpathentry> </classpathentry>

View File

@ -4,7 +4,7 @@
<groupId>org.pamguard</groupId> <groupId>org.pamguard</groupId>
<artifactId>Pamguard</artifactId> <artifactId>Pamguard</artifactId>
<name>Pamguard Java12+</name> <name>Pamguard Java12+</name>
<version>2.02.09b</version> <version>2.02.09c</version>
<description>Pamguard for Java 12+, using Maven to control dependcies</description> <description>Pamguard for Java 12+, using Maven to control dependcies</description>
<url>www.pamguard.org</url> <url>www.pamguard.org</url>
<organization> <organization>

View File

@ -786,7 +786,7 @@ C:\Users\*yourusername*\.m2\repository\pamguard\org\x3\2.2.2-->
<dependency> <dependency>
<groupId>pamguard.org</groupId> <groupId>pamguard.org</groupId>
<artifactId>x3</artifactId> <artifactId>x3</artifactId>
<version>2.2.3</version> <version>2.2.6</version>
</dependency> </dependency>

View File

@ -0,0 +1,4 @@
#NOTE: This is a Maven Resolver internal implementation file, its format can be changed without prior notice.
#Wed Nov 15 12:43:42 GMT 2023
x3-2.2.6.jar>=
x3-2.2.6.pom>=

Binary file not shown.

View File

@ -0,0 +1,9 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd" xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<modelVersion>4.0.0</modelVersion>
<groupId>pamguard.org</groupId>
<artifactId>x3</artifactId>
<version>2.2.6</version>
<description>POM was created from install:install-file</description>
</project>

View File

@ -7,57 +7,214 @@ import java.util.Arrays;
import javax.sound.sampled.AudioInputStream; import javax.sound.sampled.AudioInputStream;
import javax.sound.sampled.UnsupportedAudioFileException; import javax.sound.sampled.UnsupportedAudioFileException;
import javax.swing.SwingUtilities;
import org.pamguard.x3.sud.ChunkHeader;
import org.pamguard.x3.sud.SudMapListener;
import PamController.PamController;
import PamUtils.worker.PamWorkProgressMessage;
import PamUtils.worker.PamWorkWrapper;
import PamUtils.worker.PamWorker;
/** /**
* Opens a .sud audio file. * Opens a .sud audio file.
* <p> * <p>
* Sud files contain X3 compressed audio data. The sud * Sud files contain X3 compressed audio data. The sud file reader opens files,
* file reader opens files, creating a map of the file and saving * creating a map of the file and saving the map as a.sudx file so it can be
* the map as a.sudx file so it can be read more rapidly when the file * read more rapidly when the file is next accessed.
* is next accessed.
* <p> * <p>
* The SudioAudioInput stream fully implements AudioInputStream and so * The SudioAudioInput stream fully implements AudioInputStream and so sud files
* sud files can be accessed using much of the same code as .wav files. * can be accessed using much of the same code as .wav files.
* *
* @author Jamie Macaulay * @author Jamie Macaulay
* *
*/ */
public class SudAudioFile extends WavAudioFile { public class SudAudioFile extends WavAudioFile {
private Object conditionSync = new Object();
private volatile PamWorker<AudioInputStream> worker;
private volatile SudMapWorker sudMapWorker;
public SudAudioFile() { public SudAudioFile() {
super(); super();
fileExtensions = new ArrayList<String>(Arrays.asList(new String[]{".sud"})); fileExtensions = new ArrayList<String>(Arrays.asList(new String[] { ".sud" }));
} }
@Override @Override
public String getName() { public String getName() {
return "SUD"; return "SUD";
} }
@Override @Override
public AudioInputStream getAudioStream(File soundFile) { public AudioInputStream getAudioStream(File soundFile) {
if (soundFile.exists() == false) {
System.err.println("The sud file does not exist: " + soundFile);
return null;
}
if (soundFile != null) {
try {
return new SudAudioFileReader().getAudioInputStream(soundFile);
}
// don't do anything and it will try the built in Audiosystem
catch (UnsupportedAudioFileException e) {
System.err.println("UnsupportedAudioFileException: Could not open sud file: not a supported file " + soundFile.getName());
System.err.println(e.getMessage());
// e.printStackTrace();
} catch (IOException e) {
System.err.println("Could not open sud file: IO Exception: " + soundFile.getName());
e.printStackTrace(); synchronized (conditionSync) {
// System.out.println("Get SUD getAudioStream : " + soundFile.getName());
if (soundFile.exists() == false) {
System.err.println("The sud file does not exist: " + soundFile);
return null;
}
if (soundFile != null) {
if (new File(soundFile.getAbsolutePath() + "x").exists()) {
// System.out.println("----NO NEED TO MAP SUD FILE-----" + soundFile);
try {
return new SudAudioFileReader().getAudioInputStream(soundFile);
} catch (UnsupportedAudioFileException | IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
} else {
// System.out.println("----MAP SUD FILE ON OTHER THREAD-----" + soundFile);
/**
* We need to map the sud file. But we don't want this o just freeze the current
* GUI thread. Therefore add a listener to the mapping process and show a
* blocking dialog to indicate that something is happening. The mapping is put
* on a separate thread and blocks stuff from happening until the mapping
* process has completed.
*/
if (sudMapWorker == null || !sudMapWorker.getSudFile().equals(soundFile)) {
sudMapWorker = new SudMapWorker(soundFile);
worker = new PamWorker<AudioInputStream>(sudMapWorker,
PamController.getInstance().getMainFrame(), 1,
"Mapping sud file: " + soundFile.getName());
// System.out.println("Sud Audio Stream STARTED: " + soundFile.getName());
SwingUtilities.invokeLater(() -> {
worker.start();
});
// this should block AWT thread but won't block if called on another thread..
}
// this is only ever called if this function is called on another thread other
// than the event dispatch thread.
while (sudMapWorker == null || !sudMapWorker.isDone()) {
// do nothing
// System.out.println("Waiting for the SUD file map: " + soundFile.getName() + " worker: " + worker);
try {
// Thread.sleep(100);
Thread.sleep(100);
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
AudioInputStream stream = sudMapWorker.getSudAudioStream();
// sudMapWorker= null;
// worker = null;
// System.out.println("----RETURN SUD FILE ON OTHER THREAD-----" + stream);
return stream;
}
} }
} }
return null; return null;
} }
} public class SudMapProgress implements SudMapListener {
PamWorker<AudioInputStream> sudMapWorker;
public SudMapProgress(PamWorker<AudioInputStream> sudMapWorker) {
this.sudMapWorker = sudMapWorker;
}
@Override
public void chunkProcessed(ChunkHeader chunkHeader, int count) {
// System.out.println("Sud Map Progress: " + count);
if (count % 500 == 0) {
// don't update too often or everything just freezes
sudMapWorker.update(new PamWorkProgressMessage(-1, ("Mapped " + count + " sud file chunks")));
}
if (count == -1) {
sudMapWorker.update(new PamWorkProgressMessage(-1, ("Mapping sud file finished")));
}
}
}
/**
* Opens an sud file on a different thread and adds a listener for a mapping.
* This allows a callback to show map progress.
*
* @author Jamie Macaulay
*
*/
public class SudMapWorker implements PamWorkWrapper<AudioInputStream> {
private File soundFile;
private SudMapProgress sudMapListener;
private volatile boolean done = false;
private AudioInputStream result;
public SudMapWorker(File soundFile) {
this.soundFile = soundFile;
}
public File getSudFile() {
return soundFile;
}
public AudioInputStream getSudAudioStream() {
return result;
}
@Override
public AudioInputStream runBackgroundTask(PamWorker<AudioInputStream> pamWorker) {
AudioInputStream stream;
try {
// System.out.println("START OPEN SUD FILE:");
this.sudMapListener = new SudMapProgress(pamWorker);
stream = new SudAudioFileReader().getAudioInputStream(soundFile, sudMapListener);
// System.out.println("END SUD FILE:");
// for some reason - task finished may not be called on other
// thread so put this here.
this.result = stream;
this.done = true;
return stream;
} catch (UnsupportedAudioFileException e) {
System.err.println("UnsupportedAudioFileException: Could not open sud file: not a supported file "
+ soundFile.getName());
System.err.println(e.getMessage());
// e.printStackTrace();
} catch (IOException e) {
System.err.println("Could not open sud file: IO Exception: " + soundFile.getName());
e.printStackTrace();
}
return null;
}
@Override
public void taskFinished(AudioInputStream result) {
// System.out.println("TASK FINSIHED:");
this.result = result;
this.done = true;
}
public boolean isDone() {
return done;
}
}
}

View File

@ -27,13 +27,14 @@ public class SudAudioFileReader {
public SudAudioFileReader() { public SudAudioFileReader() {
sudParams = new SudParams(); sudParams = new SudParams();
//set up the sud params for default. i.e. just read files and //set up the sud params for default. i.e. just read files and
//don't save any decompressed or meta data. //don't save any decompressed or meta data.
sudParams.saveWav = false; // sudParams.saveWav = false;
sudParams.saveMeta = false; // sudParams.saveMeta = false;
sudParams.zeroPad = true; sudParams.setFileSave(false, false, false, false);
} sudParams.zeroPad = true;
}
/** /**
* Get the audio input streamn. * Get the audio input streamn.
@ -51,5 +52,25 @@ public class SudAudioFileReader {
} }
return sudAudioInputStream; return sudAudioInputStream;
} }
/**
* Get the audio input stream for a sud file.
* @param file - the .sud file to open.
* @param mapListener- a listener for the sud file maps - can be null.
* @return the sud AudioStream.
* @throws UnsupportedAudioFileException
* @throws IOException
*/
public AudioInputStream getAudioInputStream(File file, SudMapListener mapListener) throws UnsupportedAudioFileException, IOException {
// System.out.println("Get SUD getAudioInputStream");
try {
sudAudioInputStream = SudAudioInputStream.openInputStream(file, sudParams, mapListener, false);
} catch (Exception e) {
String msg = String.format("Corrupt sud file %s: %s", file.getName(), e.getMessage());
throw new UnsupportedAudioFileException(msg);
}
return sudAudioInputStream;
}
} }

View File

@ -1,18 +1,12 @@
package Acquisition.sud; package Acquisition.sud;
import java.io.File; import java.io.File;
import org.pamguard.x3.sud.ChunkHeader;
import org.pamguard.x3.sud.SudAudioInputStream; import org.pamguard.x3.sud.SudAudioInputStream;
import org.pamguard.x3.sud.SudFileMap;
import org.pamguard.x3.sud.SudParams;
import PamUtils.PamCalendar;
public class SUDFileTime { public class SUDFileTime {
private static long sudTime; private static long sudTime;
private static String lastFilePath = ""; private static String lastFilePath = "";
/** /**
* Temp measure to get the time from the first available sud record. * Temp measure to get the time from the first available sud record.
@ -20,6 +14,7 @@ public class SUDFileTime {
* @return * @return
*/ */
public static long getSUDFileTime(File file) { public static long getSUDFileTime(File file) {
//System.out.println("Get sud file time: " + file.getName());
if (file == null || file.exists() == false) { if (file == null || file.exists() == false) {
return Long.MIN_VALUE; return Long.MIN_VALUE;
} }
@ -36,22 +31,26 @@ public class SUDFileTime {
*/ */
// long t1 = System.currentTimeMillis(); // long t1 = System.currentTimeMillis();
sudTime = Long.MIN_VALUE; sudTime = Long.MIN_VALUE;
SudParams sudParams = new SudParams(); // SudParams sudParams = new SudParams();
sudParams.saveMeta = false; // sudParams.saveMeta = false;
sudParams.saveWav = false; // sudParams.saveWav = false;
try { try {
SudAudioInputStream sudAudioInputStream = SudAudioInputStream.openInputStream(file, sudParams, false); //
if (sudAudioInputStream == null) { // SudAudioInputStream sudAudioInputStream = SudAudioInputStream.openInputStream(file, sudParams, false);
return Long.MIN_VALUE; // if (sudAudioInputStream == null) {
} // return Long.MIN_VALUE;
SudFileMap sudMap = sudAudioInputStream.getSudMap(); // }
if (sudMap == null) { // SudFileMap sudMap = sudAudioInputStream.getSudMap();
return Long.MIN_VALUE; // if (sudMap == null) {
} // return Long.MIN_VALUE;
long t = sudMap.getFirstChunkTimeMillis(); // }
// long t = sudMap.getFirstChunkTimeMillis();
long t = SudAudioInputStream.quickFileTime(file);
t=t/1000; //turn to milliseconds.
if (t != 0) { if (t != 0) {
sudTime = t; sudTime = t;
} }
// sudAudioInputStream.addSudFileListener((chunkID, sudChunk)->{ // sudAudioInputStream.addSudFileListener((chunkID, sudChunk)->{
// ChunkHeader chunkHead = sudChunk.chunkHeader; // ChunkHeader chunkHead = sudChunk.chunkHeader;
// if (chunkHead == null || sudTime != Long.MIN_VALUE) { // if (chunkHead == null || sudTime != Long.MIN_VALUE) {
@ -70,17 +69,16 @@ public class SUDFileTime {
// sudAudioInputStream.read(); // sudAudioInputStream.read();
// } // }
// //
sudAudioInputStream.close(); // sudAudioInputStream.close();
// long t2 = System.currentTimeMillis(); // long t2 = System.currentTimeMillis();
// System.out.printf("SUD file time %s extracted in %d milliseconds\n", PamCalendar.formatDBDateTime(sudTime), t2-t1); // System.out.printf("SUD file time %s extracted in %d milliseconds\n", PamCalendar.formatDBDateTime(sudTime), t2-t1);
} catch (Exception e) { } catch (Exception e) {
System.err.println("Error getting time from SUD file: " + e.getMessage()); System.err.println("Error getting time from SUD file: " + file + " " + e.getMessage());
e.printStackTrace();
} }
return sudTime; return sudTime;
} }
} }

View File

@ -31,12 +31,12 @@ public class PamguardVersionInfo {
* Version number, major version.minorversion.sub-release. * Version number, major version.minorversion.sub-release.
* Note: can't go higher than sub-release 'f' * Note: can't go higher than sub-release 'f'
*/ */
static public final String version = "2.02.09b"; static public final String version = "2.02.09c";
/** /**
* Release date * Release date
*/ */
static public final String date = "29 June 2023"; static public final String date = "10 November 2023";
// /** // /**
// * Release type - Beta or Core // * Release type - Beta or Core

View File

@ -10,7 +10,7 @@ public class ArraySensorParams implements Serializable, Cloneable, ManagedParame
public static final long serialVersionUID = 1L; public static final long serialVersionUID = 1L;
public int readIntervalMillis = 1000; public volatile int readIntervalMillis = 1000;
private ArrayDisplayParameters arrayDisplayParameters; private ArrayDisplayParameters arrayDisplayParameters;

View File

@ -45,7 +45,8 @@ public class ArraySensorProcess extends PamProcess {
while(true) { while(true) {
readData(); readData();
try { try {
Thread.sleep(analogSensorControl.getAnalogSensorParams().readIntervalMillis); int slptime = analogSensorControl.getAnalogSensorParams().readIntervalMillis;
Thread.sleep(slptime);
} catch (InterruptedException e) { } catch (InterruptedException e) {
e.printStackTrace(); e.printStackTrace();
} }

View File

@ -139,7 +139,7 @@ public class BrainBoxDevices implements AnalogDeviceType, PamSettings{
double sensData = BBED549.hexToEngineering(bbRanges[item], sensInts); double sensData = BBED549.hexToEngineering(bbRanges[item], sensInts);
double paramValue = calibration.rawToValue(sensData, calibrationData[item]); double paramValue = calibration.rawToValue(sensData, calibrationData[item]);
analogDevicesManager.notifyData(new ItemAllData(item, sensInts, sensData, paramValue)); analogDevicesManager.notifyData(new ItemAllData(item, sensInts, sensData, paramValue));
// System.out.printf("Read item %d, chan %d, int %d, real %3.5f, param %3.5f\n", iChan, chan, sensInts, sensData, paramValue); // System.out.printf("Read item %d, chan %d, int %d, real %3.5f, param %3.5f\n", 0, chan, sensInts, sensData, paramValue);
sayError(null); sayError(null);
return new AnalogSensorData(sensData, paramValue); return new AnalogSensorData(sensData, paramValue);

View File

@ -221,7 +221,7 @@ public class AnalogDiagnosticsDisplay extends UserDisplayComponentAdapter implem
break; break;
case 3: case 3:
if (lastUpdate[rowIndex] > 0) { if (lastUpdate[rowIndex] > 0) {
return PamCalendar.formatTime(lastUpdate[rowIndex]); return PamCalendar.formatTime(lastUpdate[rowIndex], true);
} }
break; break;
case 4: case 4:

View File

@ -22,6 +22,7 @@ public class BLOfflineTask extends OfflineTask {
this.bearingLocaliserControl = bearingLocaliserControl; this.bearingLocaliserControl = bearingLocaliserControl;
bearingProcess = bearingLocaliserControl.getBearingProcess(); bearingProcess = bearingLocaliserControl.getBearingProcess();
this.addRequiredDataBlock(rawOrFFTBlock = bearingProcess.getParentDataBlock()); this.addRequiredDataBlock(rawOrFFTBlock = bearingProcess.getParentDataBlock());
addAffectedDataBlock(detectionBlock);
// PamDataBlock detectionSource = bearingLocaliserControl.getDetectionMonitor().getParentDataBlock(); // PamDataBlock detectionSource = bearingLocaliserControl.getDetectionMonitor().getParentDataBlock();
// this.setParentDataBlock(detectionSource); // this.setParentDataBlock(detectionSource);
// setParentDataBlock(bearingProcess.getParentDataBlock()); // setParentDataBlock(bearingProcess.getParentDataBlock());

View File

@ -26,6 +26,10 @@ public class Group3DOfflineTask extends OfflineTask<PamDataUnit>{
this.group3DControl = group3DControl; this.group3DControl = group3DControl;
group3DProcess = group3DControl.getGroup3dProcess(); group3DProcess = group3DControl.getGroup3dProcess();
addAffectedDataBlock(group3DProcess.getGroup3dDataBlock()); addAffectedDataBlock(group3DProcess.getGroup3dDataBlock());
PamDataBlock parentData = group3DProcess.getParentDataBlock();
if (parentData != null) {
this.addRequiredDataBlock(parentData);
}
} }
@Override @Override

View File

@ -32,6 +32,7 @@ import java.io.IOException;
import java.util.EnumMap; import java.util.EnumMap;
import javax.swing.JFileChooser; import javax.swing.JFileChooser;
import javax.swing.filechooser.FileNameExtensionFilter;
import PamUtils.PamCalendar; import PamUtils.PamCalendar;
@ -126,31 +127,34 @@ public class RoccaClassifyThis {
/** the field in the RoccaContourStats object which contains all the stats measures */ /** the field in the RoccaContourStats object which contains all the stats measures */
private EnumMap<RoccaContourStats.ParamIndx, Double> contourStats; private EnumMap<RoccaContourStats.ParamIndx, Double> contourStats;
private String dirIn; /**
* Constructor used when allowing user to select training dataset
/** the input filename */ * */
private String csvIn;
/** the input file */
private File statsFileIn;
/** the output filename */
private String csvOut;
/** the output file */
private File statsFileOut;
/** Constructor */
public RoccaClassifyThis(RoccaProcess roccaProcess) { public RoccaClassifyThis(RoccaProcess roccaProcess) {
File statsFileIn = getTheFile();
// initialize the BufferedReader if (statsFileIn!=null) {
BufferedReader inputFile = null; runTheClassifier(statsFileIn, roccaProcess);
}
}
/**
* Constructor when we pass in the training dataset
*/
public RoccaClassifyThis() {
}
/**
* Ask the user to select the file containing the testing dataset
*
* @return File the csv file containing the testing dataset
*/
public File getTheFile() {
// set the directory // set the directory
// this.dirIn = new String("C:\\Users\\Mike\\Documents\\Work\\Java\\EclipseWorkspace\\testing\\RoccaClassifyThis_testing"); // this.dirIn = new String("C:\\Users\\Mike\\Documents\\Work\\Java\\EclipseWorkspace\\testing\\RoccaClassifyThis_testing");
// this.dirIn = new String("C:\\Users\\Mike\\Documents\\Work\\Tom\\Atlantic Classifier\\manual 2-stage data"); // this.dirIn = new String("C:\\Users\\Mike\\Documents\\Work\\Tom\\Atlantic Classifier\\manual 2-stage data");
// this.dirIn = new String("C:\\Users\\Mike\\Documents\\Work\\Tom\\Hawaii dataset problems"); // this.dirIn = new String("C:\\Users\\Mike\\Documents\\Work\\Tom\\Hawaii dataset problems");
this.dirIn = new String("C:\\Users\\SCANS\\Documents\\Work\\Biowaves\\ONR classifier"); // this.dirIn = new String("C:\\Users\\SCANS\\Documents\\Work\\Biowaves\\ONR classifier");
// Define the input and output filenames // Define the input and output filenames
// Hard-coded for now. To Do: query the user for the filename // Hard-coded for now. To Do: query the user for the filename
@ -158,35 +162,54 @@ public class RoccaClassifyThis {
// this.csvIn = new String("Manual_5sp_April 9 2013.csv"); // this.csvIn = new String("Manual_5sp_April 9 2013.csv");
// this.csvIn = new String("CombinedContourStats-fixed.csv"); // this.csvIn = new String("CombinedContourStats-fixed.csv");
// this.csvOut = new String("RoccaContourStatsReclassified.csv"); // this.csvOut = new String("RoccaContourStatsReclassified.csv");
this.csvIn = new String("Atl_TestDFNoTrain_Call_W_160831.csv"); // this.csvIn = new String("Atl_TestDFNoTrain_Call_W_160831.csv");
statsFileIn = new File(dirIn, csvIn); // statsFileIn = new File(dirIn, csvIn);
this.csvOut = new String("Atl_TestDFNoTrain_Call_W_160829-classified.csv"); // this.csvOut = new String("Atl_TestDFNoTrain_Call_W_160829-classified.csv");
statsFileOut = new File(dirIn, csvOut); // statsFileOut = new File(dirIn, csvOut);
// let the user select the arff file
JFileChooser fileChooser = new JFileChooser();
fileChooser.setDialogTitle("Select spreadsheet to recalculate...");
fileChooser.setFileHidingEnabled(true);
fileChooser.setApproveButtonText("Select");
fileChooser.setFileSelectionMode(JFileChooser.FILES_ONLY);
FileNameExtensionFilter restrict = new FileNameExtensionFilter("Only .csv files", "csv");
fileChooser.addChoosableFileFilter(restrict);
int state = fileChooser.showOpenDialog(null);
File statsFileIn = null;
if (state == JFileChooser.APPROVE_OPTION) {
// load the file
statsFileIn = fileChooser.getSelectedFile();
return statsFileIn;
} else {
return null;
}
}
/**
* Run the classifier
* @param statsFileIn the File containing the testing dataset
* @param roccaProcess the RoccaProcess instance
*/
public void runTheClassifier(File statsFileIn, RoccaProcess roccaProcess) {
// JFileChooser fileChooser = new JFileChooser(); int index = statsFileIn.getAbsolutePath().lastIndexOf(".");
// fileChooser.setDialogTitle("Select spreadsheet to recalculate..."); String csvOut = statsFileIn.getAbsolutePath().substring(0,index) + "-classified.csv";
// fileChooser.setFileHidingEnabled(true); File statsFileOut = new File(csvOut);
// fileChooser.setApproveButtonText("Select");
// fileChooser.setFileSelectionMode(JFileChooser.FILES_ONLY);
//
// int state = fileChooser.showOpenDialog(this.dirIn);
// if (state == JFileChooser.APPROVE_OPTION) {
// load the classifier // load the classifier
System.out.println("Loading classifier..."); System.out.println("Loading classifier...");
roccaProcess.setClassifierLoaded roccaProcess.setClassifierLoaded
(roccaProcess.roccaClassifier.setUpClassifier()); (roccaProcess.roccaClassifier.setUpClassifier());
// initialize the BufferedReader
BufferedReader inputFile = null;
// open the input file // open the input file
try { try {
System.out.println("Opening input file "+statsFileIn); System.out.println("Opening input file "+statsFileIn);
@ -263,12 +286,45 @@ public class RoccaClassifyThis {
contourStats.put(RoccaContourStats.ParamIndx.FREQPOSSLOPEMEAN, Double.parseDouble(dataArray[34])); contourStats.put(RoccaContourStats.ParamIndx.FREQPOSSLOPEMEAN, Double.parseDouble(dataArray[34]));
contourStats.put(RoccaContourStats.ParamIndx.FREQNEGSLOPEMEAN, Double.parseDouble(dataArray[35])); contourStats.put(RoccaContourStats.ParamIndx.FREQNEGSLOPEMEAN, Double.parseDouble(dataArray[35]));
contourStats.put(RoccaContourStats.ParamIndx.FREQSLOPERATIO, Double.parseDouble(dataArray[36])); contourStats.put(RoccaContourStats.ParamIndx.FREQSLOPERATIO, Double.parseDouble(dataArray[36]));
contourStats.put(RoccaContourStats.ParamIndx.FREQBEGSWEEP, Double.parseDouble(dataArray[37]));
//contourStats.put(RoccaContourStats.ParamIndx.FREQBEGUP, Double.parseDouble(dataArray[38])); // Note that we have to modify the FREQBEGSWEEP value. Weka is trained with the FREQBEGSWEEP param
//contourStats.put(RoccaContourStats.ParamIndx.FREQBEGDWN, Double.parseDouble(dataArray[39])); // as -1=down, 0=flat and 1=up, and that would be how the test data comes through as well. HOWEVER,
contourStats.put(RoccaContourStats.ParamIndx.FREQENDSWEEP, Double.parseDouble(dataArray[40])); // Weka assumes that for nominal parameters, the value is the index location (0,1 or 2) and NOT the actual trained
//contourStats.put(RoccaContourStats.ParamIndx.FREQENDUP, Double.parseDouble(dataArray[41])); // value (-1,0 or 1). So if the whistle has a down sweep, Weka needs the FREQBEGSWEEP value to be 0 indicating the
//contourStats.put(RoccaContourStats.ParamIndx.FREQENDDWN, Double.parseDouble(dataArray[42])); // first location in the array (which was 'down'). If it was up, the value would need to be 2 indicating the third
// location in the array (which was 'up').
// Ideally we would map the values in the test data to the positions in the training array, but as a quick and
// dirty hack we'll simply add 1 to the value since the difference between the nominal values (-1,0,1) and the
/// index positions (0,1,2) is an offset of 1
// Note also that we don't have to do the same thing for FREQBEGUP and FREQBEGDWN since, by coincidence, the training
// values of 0 and 1 happen to match the index locations of 0 and 1
//contourStats.put(RoccaContourStats.ParamIndx.FREQBEGSWEEP, Double.parseDouble(dataArray[37]));
double tempVal = Double.parseDouble(dataArray[37]);
tempVal++;
contourStats.put(RoccaContourStats.ParamIndx.FREQBEGSWEEP, tempVal);
contourStats.put(RoccaContourStats.ParamIndx.FREQBEGUP, Double.parseDouble(dataArray[38]));
contourStats.put(RoccaContourStats.ParamIndx.FREQBEGDWN, Double.parseDouble(dataArray[39]));
// Note that we have to modify the FREQENDSWEEP value. Weka is trained with the FREQENDSWEEP param
// as -1=down, 0=flat and 1=up, and that would be how the test data comes through as well. HOWEVER,
// Weka assumes that for nominal parameters, the value is the index location (0,1 or 2) and NOT the actual trained
// value (-1,0 or 1). So if the whistle has a down sweep, Weka needs the FREQENDSWEEP value to be 0 indicating the
// first location in the array (which was 'down'). If it was up, the value would need to be 2 indicating the third
// location in the array (which was 'up').
// Ideally we would map the values in the test data to the positions in the training array, but as a quick and
// dirty hack we'll simply add 1 to the value since the difference between the nominal values (-1,0,1) and the
/// index positions (0,1,2) is an offset of 1
// Note also that we don't have to do the same thing for FREQENDUP and FREQENDDWN since, by coincidence, the training
// values of 0 and 1 happen to match the index locations of 0 and 1
//contourStats.put(RoccaContourStats.ParamIndx.FREQENDSWEEP, Double.parseDouble(dataArray[40]));
tempVal = Double.parseDouble(dataArray[40]);
tempVal++;
contourStats.put(RoccaContourStats.ParamIndx.FREQENDSWEEP, tempVal);
contourStats.put(RoccaContourStats.ParamIndx.FREQENDUP, Double.parseDouble(dataArray[41]));
contourStats.put(RoccaContourStats.ParamIndx.FREQENDDWN, Double.parseDouble(dataArray[42]));
// end of hack
contourStats.put(RoccaContourStats.ParamIndx.NUMSWEEPSUPDWN, Double.parseDouble(dataArray[43])); contourStats.put(RoccaContourStats.ParamIndx.NUMSWEEPSUPDWN, Double.parseDouble(dataArray[43]));
contourStats.put(RoccaContourStats.ParamIndx.NUMSWEEPSDWNUP, Double.parseDouble(dataArray[44])); contourStats.put(RoccaContourStats.ParamIndx.NUMSWEEPSDWNUP, Double.parseDouble(dataArray[44]));
contourStats.put(RoccaContourStats.ParamIndx.NUMSWEEPSUPFLAT, Double.parseDouble(dataArray[45])); contourStats.put(RoccaContourStats.ParamIndx.NUMSWEEPSUPFLAT, Double.parseDouble(dataArray[45]));
@ -285,8 +341,8 @@ public class RoccaClassifyThis {
contourStats.put(RoccaContourStats.ParamIndx.INFLMEANDELTA, Double.parseDouble(dataArray[56])); contourStats.put(RoccaContourStats.ParamIndx.INFLMEANDELTA, Double.parseDouble(dataArray[56]));
contourStats.put(RoccaContourStats.ParamIndx.INFLSTDDEVDELTA, Double.parseDouble(dataArray[57])); contourStats.put(RoccaContourStats.ParamIndx.INFLSTDDEVDELTA, Double.parseDouble(dataArray[57]));
contourStats.put(RoccaContourStats.ParamIndx.INFLMEDIANDELTA, Double.parseDouble(dataArray[58])); contourStats.put(RoccaContourStats.ParamIndx.INFLMEDIANDELTA, Double.parseDouble(dataArray[58]));
contourStats.put(RoccaContourStats.ParamIndx.INFLDUR, Double.parseDouble(dataArray[59])); //contourStats.put(RoccaContourStats.ParamIndx.INFLDUR, Double.parseDouble(dataArray[59]));
contourStats.put(RoccaContourStats.ParamIndx.STEPDUR, Double.parseDouble(dataArray[60])); //contourStats.put(RoccaContourStats.ParamIndx.STEPDUR, Double.parseDouble(dataArray[60]));
// Run the classifier // Run the classifier
roccaProcess.roccaClassifier.classifyContour2(rcdb); roccaProcess.roccaClassifier.classifyContour2(rcdb);

View File

@ -169,6 +169,7 @@ public class RoccaParametersDialog extends PamDialog implements ActionListener,
JButton classifier2Button; JButton classifier2Button;
JButton recalcButton; JButton recalcButton;
JButton reclassifyButton; JButton reclassifyButton;
JButton trainThenTestButton;
JButton clearClassifier; JButton clearClassifier;
JComboBox<DefaultComboBoxModel<Vector<String>>> stage1Classes; JComboBox<DefaultComboBoxModel<Vector<String>>> stage1Classes;
DefaultComboBoxModel<Vector<String>> stage1ClassModel; DefaultComboBoxModel<Vector<String>> stage1ClassModel;
@ -513,6 +514,10 @@ public class RoccaParametersDialog extends PamDialog implements ActionListener,
reclassifyButton.addActionListener(this); reclassifyButton.addActionListener(this);
reclassifyButton.setToolTipText("Load the whistle data from the contour stats output file, and run it through the current Classifier"); reclassifyButton.setToolTipText("Load the whistle data from the contour stats output file, and run it through the current Classifier");
reclassifyButton.setVisible(true); reclassifyButton.setVisible(true);
trainThenTestButton = new JButton("Train then Test");
trainThenTestButton.addActionListener(this);
trainThenTestButton.setToolTipText("Train a classifier on a set of training data, then test it with a set of testing data");
trainThenTestButton.setVisible(true);
// ******** THIS LINES CONTROLS THE VISIBILITY ******** // ******** THIS LINES CONTROLS THE VISIBILITY ********
if (RoccaDev.isEnabled()) { if (RoccaDev.isEnabled()) {
@ -528,13 +533,15 @@ public class RoccaParametersDialog extends PamDialog implements ActionListener,
extraPanelLayout.createParallelGroup(GroupLayout.Alignment.LEADING) extraPanelLayout.createParallelGroup(GroupLayout.Alignment.LEADING)
.addGroup(extraPanelLayout.createSequentialGroup() .addGroup(extraPanelLayout.createSequentialGroup()
.addComponent(recalcButton) .addComponent(recalcButton)
.addComponent(reclassifyButton)) .addComponent(reclassifyButton)
.addComponent(trainThenTestButton))
); );
extraPanelLayout.setVerticalGroup( extraPanelLayout.setVerticalGroup(
extraPanelLayout.createSequentialGroup() extraPanelLayout.createSequentialGroup()
.addGroup(extraPanelLayout.createParallelGroup(GroupLayout.Alignment.BASELINE) .addGroup(extraPanelLayout.createParallelGroup(GroupLayout.Alignment.BASELINE)
.addComponent(recalcButton) .addComponent(recalcButton)
.addComponent(reclassifyButton)) .addComponent(reclassifyButton)
.addComponent(trainThenTestButton))
); );
classifierPanel.add(extraButtonsSubPanel); classifierPanel.add(extraButtonsSubPanel);
@ -892,7 +899,9 @@ public class RoccaParametersDialog extends PamDialog implements ActionListener,
} else if (e.getSource() == recalcButton) { } else if (e.getSource() == recalcButton) {
RoccaFixParams recalc = new RoccaFixParams(roccaControl.roccaProcess); RoccaFixParams recalc = new RoccaFixParams(roccaControl.roccaProcess);
} else if (e.getSource() == reclassifyButton) { } else if (e.getSource() == reclassifyButton) {
RoccaClassifyThisEvent reclassify = new RoccaClassifyThisEvent(roccaControl.roccaProcess); RoccaClassifyThis reclassify = new RoccaClassifyThis(roccaControl.roccaProcess);
} else if (e.getSource() == trainThenTestButton) {
RoccaTrainThenTest trainThenTest = new RoccaTrainThenTest(roccaControl.roccaProcess);
} else if (e.getSource() == fftButton) { } else if (e.getSource() == fftButton) {
roccaParameters.setUseFFT(true); roccaParameters.setUseFFT(true);
this.enableTheCorrectSource(); this.enableTheCorrectSource();

View File

@ -145,6 +145,7 @@ public class RoccaRFModel implements java.io.Serializable {
} catch (Exception ex) { } catch (Exception ex) {
System.err.println("1st Classification failed: " + ex.getMessage()); System.err.println("1st Classification failed: " + ex.getMessage());
ex.printStackTrace();
rcdb.setClassifiedAs("Err"); rcdb.setClassifiedAs("Err");
} }
} }

View File

@ -24,10 +24,14 @@
package rocca; package rocca;
import java.io.BufferedReader; import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader; import java.io.FileReader;
import java.util.Date; import java.util.Date;
import java.util.Enumeration; import java.util.Enumeration;
import javax.swing.JFileChooser;
import javax.swing.filechooser.FileNameExtensionFilter;
import weka.classifiers.trees.RandomForest; import weka.classifiers.trees.RandomForest;
import weka.core.Instances; import weka.core.Instances;
import weka.core.SerializationHelper; import weka.core.SerializationHelper;
@ -42,13 +46,64 @@ import weka.core.SerializationHelper;
*/ */
public class RoccaTrainClassifier { public class RoccaTrainClassifier {
/**
* Standalone implementation
*
* @param args
*/
public static void main(String[] args) { public static void main(String[] args) {
RoccaTrainClassifier rtc = new RoccaTrainClassifier();
File arffFile = rtc.getArff();
if (arffFile!=null) {
String modelName = rtc.trainClassifier(arffFile);
}
}
/**
* Let user choose arff file training dataset
*
* @return File the arff file containing the training dataset
*/
public File getArff() {
// String arffFile = "C:\\Users\\SCANS\\Documents\\Work\\Biowaves\\ONR classifier\\TP_TrainEvtDF_170408";
// let the user select the arff file
JFileChooser fileChooser = new JFileChooser();
fileChooser.setDialogTitle("Select arff file containing training data");
fileChooser.setFileHidingEnabled(true);
fileChooser.setApproveButtonText("Select");
fileChooser.setFileSelectionMode(JFileChooser.FILES_ONLY);
FileNameExtensionFilter restrict = new FileNameExtensionFilter("Only .arff files", "arff");
fileChooser.addChoosableFileFilter(restrict);
File arffFile;
int state = fileChooser.showOpenDialog(null);
if (state == JFileChooser.APPROVE_OPTION) {
// load the file
arffFile = fileChooser.getSelectedFile();
return arffFile;
} else {
return null;
}
}
/**
* Actual code to train the classifier
*
*/
public String trainClassifier(File arffFile) {
RandomForest model = new RandomForest (); RandomForest model = new RandomForest ();
Instances trainData = null; Instances trainData = null;
String arffFile = "C:\\Users\\SCANS\\Documents\\Work\\Biowaves\\ONR classifier\\TP_TrainEvtDF_170408";
// load the ARFF file containing the training set // load the ARFF file containing the training set
System.out.println("Loading data..."); System.out.println("Loading data..." + arffFile.getAbsolutePath());
try { try {
trainData = new Instances trainData = new Instances
(new BufferedReader (new BufferedReader
@ -56,10 +111,13 @@ public class RoccaTrainClassifier {
// ("C:\\Users\\Mike\\Documents\\Work\\Java\\WEKA\\allwhists 12 vars 8sp update 1-28-10.arff"))); // ("C:\\Users\\Mike\\Documents\\Work\\Java\\WEKA\\allwhists 12 vars 8sp update 1-28-10.arff")));
// ("C:\\Users\\Mike\\Documents\\Work\\Java\\WEKA\\weka vs R\\ETP_orcawale_whists2 modified-subset110perspecies-no_harm_ratios.arff"))); // ("C:\\Users\\Mike\\Documents\\Work\\Java\\WEKA\\weka vs R\\ETP_orcawale_whists2 modified-subset110perspecies-no_harm_ratios.arff")));
// ("C:\\Users\\SCANS\\Documents\\Work\\Biowaves\\ONR classifier\\Atl_TrainDF_Event_160829.arff"))); // ("C:\\Users\\SCANS\\Documents\\Work\\Biowaves\\ONR classifier\\Atl_TrainDF_Event_160829.arff")));
(arffFile + ".arff"))); // (arffFile + ".arff")));
(arffFile)));
trainData.setClassIndex(trainData.numAttributes()-1); trainData.setClassIndex(trainData.numAttributes()-1);
} catch (Exception ex) { } catch (Exception ex) {
System.out.println("Error Loading..."); System.out.println("Error Loading...");
ex.printStackTrace();
return null;
} }
// set the classifier parameters // set the classifier parameters
@ -78,6 +136,8 @@ public class RoccaTrainClassifier {
model.setOptions(options); model.setOptions(options);
} catch (Exception ex) { } catch (Exception ex) {
System.out.println("Error setting options..."); System.out.println("Error setting options...");
ex.printStackTrace();
return null;
} }
// train the classifier // train the classifier
@ -90,23 +150,29 @@ public class RoccaTrainClassifier {
new Date()); new Date());
} catch (Exception ex) { } catch (Exception ex) {
System.out.println("Error training classifier..."); System.out.println("Error training classifier...");
ex.printStackTrace();
return null;
} }
// save the classifier // save the classifier
String[] curOptions = model.getOptions(); // String[] curOptions = model.getOptions();
Enumeration test = model.listOptions(); // Enumeration test = model.listOptions();
System.out.println("Saving Classifier...");
Instances header = new Instances(trainData,0); Instances header = new Instances(trainData,0);
int index = arffFile.getAbsolutePath().lastIndexOf(".");
String modelName = arffFile.getAbsolutePath().substring(0,index) + ".model";
System.out.println("Saving Classifier..." + modelName);
try { try {
SerializationHelper.writeAll SerializationHelper.writeAll
// ("C:\\Users\\Mike\\Documents\\Work\\Java\\WEKA\\weka vs R\\RF_8sp_54att_110whistle-subset.model", // ("C:\\Users\\Mike\\Documents\\Work\\Java\\WEKA\\weka vs R\\RF_8sp_54att_110whistle-subset.model",
(arffFile + ".model", // (arffFile + ".model",
(modelName,
new Object[]{model,header}); new Object[]{model,header});
System.out.println("Finished!");
return modelName;
} catch (Exception ex) { } catch (Exception ex) {
System.out.println("Error saving classifier..."); System.out.println("Error saving classifier...");
ex.printStackTrace();
} }
return null;
System.out.println("Finished!");
} }
} }

View File

@ -0,0 +1,109 @@
/*
* PAMGUARD - Passive Acoustic Monitoring GUARDianship.
* To assist in the Detection Classification and Localisation
* of marine mammals (cetaceans).
*
* Copyright (C) 2006
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 3
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/
package rocca;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
import javax.swing.JFileChooser;
import javax.swing.filechooser.FileNameExtensionFilter;
public class RoccaTrainThenTest {
RoccaTrainClassifier roccaTrainClassifier;
RoccaClassifyThis roccaClassifyThis;
/**
* Main Constructor
* @param roccaProcess
*/
public RoccaTrainThenTest(RoccaProcess roccaProcess) {
// let the user select the csv file containing the training and testing dataset(s)
JFileChooser fileChooser = new JFileChooser();
fileChooser.setDialogTitle("Select csv file with the training/testing pairs");
fileChooser.setFileHidingEnabled(true);
fileChooser.setApproveButtonText("Select");
fileChooser.setFileSelectionMode(JFileChooser.FILES_ONLY);
FileNameExtensionFilter restrict = new FileNameExtensionFilter("Only .csv files", "csv");
fileChooser.addChoosableFileFilter(restrict);
int state = fileChooser.showOpenDialog(null);
if (state == JFileChooser.APPROVE_OPTION) {
// load the file
try {
File csvDataPairs = fileChooser.getSelectedFile();
BufferedReader br = new BufferedReader(new FileReader(csvDataPairs));
String curPath = csvDataPairs.getParent();
// main loop
// read through the csv file one line at a time. The first column should contain the training dataset filename,
// and the second column the testing dataset filename. Paths should be relative to the path containing
// the csv file
String line = "";
String splitBy = ",";
while ((line=br.readLine())!=null) {
String[] filenames = line.split(splitBy);
// train the classifier
File arffFile = new File(curPath + File.separator + filenames[0]);
roccaTrainClassifier = new RoccaTrainClassifier();
String modelName = roccaTrainClassifier.trainClassifier(arffFile);
if (modelName == null) {
System.out.println("ERROR: could not create classifier model from "+arffFile);
continue;
}
// set the classifier as the current one in RoccaParameters
roccaProcess.roccaControl.roccaParameters.setRoccaClassifierModelFilename(new File(modelName));
// test the classifier with the testing dataset
File testFile = new File(curPath + File.separator + filenames[1]);
roccaClassifyThis = new RoccaClassifyThis();
roccaClassifyThis.runTheClassifier(testFile, roccaProcess);
}
} catch (FileNotFoundException e) {
e.printStackTrace();
return;
} catch (IOException e) {
e.printStackTrace();
return;
}
} else {
return;
}
}
}