Updates to deep learning click classifier

Fixed bug for classifying clicks in viewer mode where the classification buffer was not being filled.
Implemented Risso's classifier and checked it's working. Required changes to jpam
Fixed bug in source data blocks in deep learning classifier
Updated deb build
This commit is contained in:
Jamie Mac 2025-03-26 12:40:22 +00:00
parent 452a582027
commit 8fceb3e3fe
13 changed files with 305 additions and 144 deletions
build/deb
dependency-reduced-pom.xmlpom.xml
src
dataPlotsFX/data/generic
rawDeepLearningClassifier
test/rawDeepLearningClassifier

View File

@ -4,10 +4,10 @@ Section: java
Priority: optional
Architecture: all
Maintainer: Jamie Macaulay <jdjm@st-andrews.ac.uk>
Description: A complex Java application with additional dependencies.
Description: Process passive acoustic data for whales, dolphins and other species.
Icon: pamguard_icon.png
Depends: openjdk-21-jre
postinst script:
#!/bin/bash
# Set JVM options for the application
export JAVA_OPTS="-Xmx2g -Xms512m"
export JAVA_OPTS="-Xmx4g -Xms512m -Dsun.java2d.uiScale=2 -Dawt.useSystemAAFontSettings=on -Dswing.aatext=true"

View File

@ -26,6 +26,71 @@
</resources>
</build>
<profiles>
<profile>
<id>linux-profile</id>
<build>
<plugins>
<plugin>
<groupId>org.vafer</groupId>
<artifactId>jdeb</artifactId>
<version>1.11</version>
<executions>
<execution>
<phase>package</phase>
<goals>
<goal>jdeb</goal>
</goals>
<configuration>
<controlDir>${basedir}/build/deb</controlDir>
<dataSet>
<data>
<src>${project.build.directory}/${project.build.finalName}.jar</src>
<type>file</type>
<mapper>
<type>perm</type>
<prefix>/usr/share/pamguard</prefix>
</mapper>
</data>
<data>
<src>${basedir}/liblinux</src>
<type>directory</type>
<includes>*.txt</includes>
<includes>*.so</includes>
<mapper>
<type>perm</type>
<prefix>/usr/share/pamguard/liblinux</prefix>
</mapper>
</data>
<data>
<src>${basedir}/build/deb/PAMGuardIcon2.png</src>
<type>file</type>
<mapper>
<type>perm</type>
<prefix>/usr/share/pamguard</prefix>
</mapper>
</data>
<data>
<src>${basedir}/build/deb/pamguard.desktop</src>
<type>file</type>
<mapper>
<type>perm</type>
<prefix>/usr/share/applications</prefix>
</mapper>
</data>
<data>
<type>link</type>
<linkName>/usr/share/pamguard/Pamguard</linkName>
<linkTarget>/usr/share/pamguard/${project.build.finalName}.jar</linkTarget>
<symlink>true</symlink>
</data>
</dataSet>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
<profile>
<id>macos-profile</id>
<build>
@ -145,36 +210,11 @@
<plugin>
<artifactId>maven-compiler-plugin</artifactId>
<version>3.12.1</version>
<executions>
<execution>
<id>default-compile</id>
<phase>compile</phase>
<goals>
<goal>compile</goal>
</goals>
<configuration>
<release>21</release>
<compilerId>jdt</compilerId>
</configuration>
</execution>
<execution>
<id>default-testCompile</id>
<phase>test-compile</phase>
<goals>
<goal>testCompile</goal>
</goals>
<configuration>
<release>21</release>
<compilerId>jdt</compilerId>
</configuration>
</execution>
</executions>
<dependencies>
<dependency>
<groupId>org.eclipse.tycho</groupId>
<artifactId>tycho-compiler-jdt</artifactId>
<version>1.5.1</version>
<scope>compile</scope>
</dependency>
</dependencies>
<configuration>
@ -207,17 +247,6 @@
</transformer>
<transformer />
</transformers>
<filters>
<filter>
<artifact>*:*</artifact>
<excludes>
<exclude>META-INF/*.SF</exclude>
<exclude>META-INF/*.DSA</exclude>
<exclude>META-INF/*.RSA</exclude>
<exclude>test/resources/**</exclude>
</excludes>
</filter>
</filters>
</configuration>
</execution>
</executions>
@ -254,7 +283,7 @@
<goal>copy-dependencies</goal>
</goals>
<configuration>
<outputDirectory>C:\Users\dg50\source\repos\PAMGuardPAMGuard\target/tempDependencies</outputDirectory>
<outputDirectory>${project.build.directory}/tempDependencies</outputDirectory>
<overWriteReleases>false</overWriteReleases>
<overWriteSnapshots>false</overWriteSnapshots>
<overWriteIfNewer>true</overWriteIfNewer>

189
pom.xml
View File

@ -51,7 +51,106 @@
earlier. Most OS specific profiles are for generating executables and code signing.
Note use help:active-profiles to see which profiles are active -->
<profiles>
<!-- The Linux profile creates a DMG package for installing PAMGuard on
Linux Ubuntu based systems-->
<!--Note: There is a Maven bug/feature when using profiles when the plugin order is messed up and seemingly quite random.
In PAMGuard it essential that the shade plugin is called before the linux plugin. Therefore explicately call
the package phase using mvn package shade:shade jdeb:jdeb. Note that although a deb file can be created in Windows it
will not have the correct JavaFX libraries and therefore will not work when riun -->
<profile>
<id>linux-profile</id>
<activation>
<os>
<family>linux</family>
</os>
</activation>
<build>
<plugins>
<!--Creates a deb file for Linux-->
<plugin>
<artifactId>jdeb</artifactId>
<groupId>org.vafer</groupId>
<version>1.11</version>
<executions>
<execution>
<phase>package</phase>
<goals>
<goal>jdeb</goal>
</goals>
<configuration>
<controlDir>${basedir}/build/deb</controlDir>
<dataSet>
<data>
<!--TODO-really the jar file should be named properly but thos would mean we would
have to change the name in the desktop file too. For now create a link to the jar file
with correct version number-->
<src>
${project.build.directory}/${project.build.finalName}.jar</src>
<type>file</type>
<mapper>
<type>perm</type>
<prefix>/usr/share/pamguard</prefix>
</mapper>
</data>
<data>
<src>${basedir}/liblinux</src>
<type>directory</type>
<includes>*.txt</includes>
<includes>*.so</includes>
<mapper>
<type>perm</type>
<prefix>
/usr/share/pamguard/liblinux</prefix>
</mapper>
</data>
<data>
<src>
${basedir}/build/deb/PAMGuardIcon2.png</src>
<type>file</type>
<mapper>
<type>perm</type>
<prefix>/usr/share/pamguard</prefix>
</mapper>
</data>
<!---THe desktop file will create a PAMGuard icon in the applications tray-->
<data>
<src>
${basedir}/build/deb/pamguard.desktop</src>
<type>file</type>
<mapper>
<type>perm</type>
<prefix>/usr/share/applications</prefix>
</mapper>
</data>
<!---Create a link which is just called Pamguard. This means the .desktop file does not need
to be altered depending on the build name-->
<data>
<type>link</type>
<linkName>
/usr/share/pamguard/Pamguard</linkName>
<linkTarget>
/usr/share/pamguard/${project.build.finalName}.jar</linkTarget>
<symlink>true</symlink>
</data>
<!--<data>
<src>${project.basedir}/build/deb/set-java-property.sh</src>
<type>file</type>
<mapper>
<type>perm</type>
<prefix>/usr/bin</prefix>
<filemode>755</filemode>
</mapper>
</data>-->
</dataSet>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
<!-- The MacOS profile creates a DMG package for installing PAMGuard on MacOS-->
<!--Note: There is a Maven bug when using profiles when the plugin order is messed up and seemingly quite random.
In PAMGuard it essential that the shade plugin is called before the macos plugin and so the phase of the
@ -312,82 +411,7 @@
-->
</executions>
</plugin>
<!--Creates a deb file for Linux-->
<plugin>
<artifactId>jdeb</artifactId>
<groupId>org.vafer</groupId>
<version>1.11</version>
<executions>
<execution>
<phase>package</phase>
<goals>
<goal>jdeb</goal>
</goals>
<configuration>
<controlDir>${basedir}/build/deb</controlDir>
<dataSet>
<data>
<!--TODO-really the jar file should be named properly but thos would mean we would
have to change the name in the desktop file too. For now create a link to the jar file
with correct version number-->
<src>${project.build.directory}/${project.build.finalName}.jar</src>
<type>file</type>
<mapper>
<type>perm</type>
<prefix>/usr/share/pamguard</prefix>
</mapper>
</data>
<data>
<src>${basedir}/liblinux</src>
<type>directory</type>
<includes>*.txt</includes>
<includes>*.so</includes>
<mapper>
<type>perm</type>
<prefix>/usr/share/pamguard/liblinux</prefix>
</mapper>
</data>
<data>
<src>${basedir}/build/deb/PAMGuardIcon2.png</src>
<type>file</type>
<mapper>
<type>perm</type>
<prefix>/usr/share/pamguard</prefix>
</mapper>
</data>
<!---THe desktop file will create a PAMGuard icon in the applications tray-->
<data>
<src>${basedir}/build/deb/pamguard.desktop</src>
<type>file</type>
<mapper>
<type>perm</type>
<prefix>/usr/share/applications</prefix>
</mapper>
</data>
<!---Create a link which is just called Pamguard. This means the .desktop file does not need
to be altered depending on the build name-->
<data>
<type>link</type>
<linkName>/usr/share/pamguard/Pamguard</linkName>
<linkTarget>/usr/share/pamguard/${project.build.finalName}.jar</linkTarget>
<symlink>true</symlink>
</data>
<!--<data>
<src>${project.basedir}/build/deb/set-java-property.sh</src>
<type>file</type>
<mapper>
<type>perm</type>
<prefix>/usr/bin</prefix>
<filemode>755</filemode>
</mapper>
</data>-->
</dataSet>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
<pluginManagement>
@ -432,8 +456,8 @@
</profile>
</profiles>
<!-- Plugin which creates a .dmg file for MacOS.
<!-- Plugin which creates a .dmg file for MacOS.
<plugin>
<groupId>de.perdian.maven.plugins</groupId>
<artifactId>macosappbundler-maven-plugin</artifactId>
@ -561,7 +585,7 @@
<groupId>io.github.macster110</groupId>
<artifactId>jpamutils</artifactId>
<version>0.0.59f</version>
<!-- com.github.psambit9791:wavfile:jar:0.1 pulls in various junit dependencies which breaks our own testing -->
<!-- com.github.psambit9791:wavfile:jar:0.1 pulls in various junit dependencies which breaks our own testing -->
<!--<exclusions>
<exclusion>
<groupId>org.junit.platform</groupId>
@ -729,7 +753,7 @@
</exclusions>
</dependency>
<!-- html to markdown conversion https://github.com/furstenheim/copy-down -->
<!-- html to markdown conversion https://github.com/furstenheim/copy-down -->
<dependency>
<groupId>io.github.furstenheim</groupId>
<artifactId>copy_down</artifactId>
@ -1116,7 +1140,7 @@
<version>2.1.1</version>
</dependency>
<!--
<!--
<dependency>
<groupId>javax.xml.bind</groupId>
<artifactId>jaxb-api</artifactId>
@ -1273,7 +1297,6 @@
</dependency>
<!-- Test dependencies -->
<dependency>
<groupId>org.junit.jupiter</groupId>

View File

@ -136,6 +136,8 @@ public class GenericDataPlotInfo extends TDDataInfoFX {
System.out.println("GenericDataPlotInfo: Single frequency measure in data unit " + pamDataUnit.toString());
}
//System.out.println("Frequency: " + f[0] + " " + f[1] + " " + pamDataUnit);
// draw a frequency box.
double y0 = tdProjector.getYPix(f[0]);
double y1 = tdProjector.getYPix(f[1]);

View File

@ -210,7 +210,7 @@ public class DLClassifyProcess extends PamProcess {
*/
@Override
public void newData(PamObservable obs, PamDataUnit pamRawData) {
//System.out.println("NEW SEGMENTER DATA: " + PamCalendar.formatDateTime2(pamRawData.getTimeMilliseconds(), "dd MMM yyyy HH:mm:ss.SSS", false) + " " + pamRawData.getUID() + " " + pamRawData.getChannelBitmap() + " " + pamRawData);
//System.out.println("NEW SEGMENTER DATA: " + PamCalendar.formatDateTime2(pamRawData.getTimeMilliseconds(), "dd MMM yyyy HH:mm:ss.SSS", false) + " " + pamRawData.getUID() + " " + pamRawData.getChannelBitmap() + " " + pamRawData);
//if grouped data then just run the classifier on the group - do not try and create a buffer.
if (pamRawData instanceof SegmenterDetectionGroup) {
@ -618,10 +618,11 @@ public class DLClassifyProcess extends PamProcess {
*/
public void forceRunClassifier(PamDataUnit dataUnit) {
// System.out.println("CLASSIFICATION BUFFER: " + classificationBuffer.size());
//System.out.println("CLASSIFICATION BUFFER: " + classificationBuffer.size());
if (this.classificationBuffer.size()>0) {
if (classificationBuffer.get(0) instanceof GroupedRawData) {
//System.out.println("Run raw model on: " + classificationBuffer.get(0) + " " + classificationBuffer.get(1));
runRawModel(); //raw data or raw data units
}
if (classificationBuffer.get(0) instanceof SegmenterDetectionGroup) {
@ -689,10 +690,12 @@ public class DLClassifyProcess extends PamProcess {
DataUnitBaseData basicData = groupDataBuffer.get(0).getBasicData().clone();
basicData.setMillisecondDuration(1000.*rawdata[0].length/this.sampleRate);
basicData.setSampleDuration((long) (groupDataBuffer.size()*dlControl.getDLParams().rawSampleSize));
// System.out.println("Model result: " + modelResult.size());
DLDetection dlDetection = new DLDetection(basicData, rawdata, getSampleRate());
addDLAnnotation(dlDetection,modelResult);
dlDetection.setFrequency(new double[] {0, this.getSampleRate()/2});
//create the data unit
return dlDetection;

View File

@ -139,9 +139,8 @@ public class ArchiveModelWorker extends GenericModelWorker {
System.out.println(modelParams.dlTransforms);
ArrayList<DLTransform> transforms = DLTransformsFactory.makeDLTransforms(modelParams.dlTransforms);
// ///HACK here for now to fix an issue with dB and Ketos transforms having zero length somehow...
// for (int i=0; i<modelParams.dlTransforms.size(); i++) {
// System.out.println(modelParams.dlTransforms.get(i));
// System.out.println(modelParams.dlTransforms.get(i).toString());
// }
//only load new transforms if defaults are selected

View File

@ -11,6 +11,7 @@ import org.jamdev.jdl4pam.transforms.DLTransformsFactory;
import org.jamdev.jdl4pam.utils.DLUtils;
import org.jamdev.jpamutils.wavFiles.AudioData;
import PamUtils.PamArrayUtils;
import PamguardMVC.PamDataUnit;
import rawDeepLearningClassifier.DLControl;
import rawDeepLearningClassifier.DLStatus;
@ -87,6 +88,12 @@ public abstract class DLModelWorker<T> {
// if (transform instanceof FreqTransform) {
// transformedData = ((FreqTransform) transform).getSpecTransfrom().getTransformedData();
// System.out.println("DLModelWorker: transform : " + modelTransforms.get(i).getDLTransformType() + " "+ i + transformedData.length + " " + transformedData[0].length + " minmax: " + PamArrayUtils.minmax(transformedData)[0] + " " + PamArrayUtils.minmax(transformedData)[1]);
// }
// //TEMP
// if (transform instanceof WaveTransform) {
// transformedData1 = ((WaveTransform) transform).getWaveData().getScaledSampleAmplitudes();
// System.out.println("DLModelWorker: transform : " + modelTransforms.get(i).getDLTransformType() + " "+ i + " " + transformedData1.length + " " + PamArrayUtils.minmax(transformedData1)[0] + " " + PamArrayUtils.minmax(transformedData1)[1]);
// }
}

View File

@ -104,6 +104,7 @@ public class GenericModelWorker extends DLModelWorker<StandardPrediction> {
}
setModelTransforms(genericParams.dlTransfroms);
//is this a waveform or a spectrogram model?
setWaveFreqModel(genericParams);

View File

@ -583,10 +583,11 @@ public class DLSettingsPane extends SettingsPane<RawDLParams>{
@Override
public void setParams(RawDLParams currParams) {
sourcePane.setParams(currParams.groupedSourceParams);
sourcePane.sourceChanged();
sourcePane.setSourceList();
sourcePane.setParams(currParams.groupedSourceParams);
dlControl.createDataSelector(sourcePane.getSource());

View File

@ -59,7 +59,7 @@ public class PeakTrimTransformPane extends DLTransformPane {
});
//spinner for changing filter order.
targetLenSpinner = new Spinner<Integer>(1,50,4,1);
targetLenSpinner = new Spinner<Integer>(1,Integer.MAX_VALUE,4,1);
targetLenSpinner.valueProperty().addListener((obsVal, oldVal, newVal)->{
this.notifySettingsListeners();
});

View File

@ -1,5 +1,7 @@
package rawDeepLearningClassifier.offline;
import java.util.ListIterator;
import PamController.PamController;
import PamguardMVC.PamDataUnit;
import PamguardMVC.PamObservable;
@ -8,6 +10,7 @@ import dataMap.OfflineDataMapPoint;
import matchedTemplateClassifer.MTClassifierControl;
import offlineProcessing.OfflineTask;
import rawDeepLearningClassifier.DLControl;
import rawDeepLearningClassifier.segmenter.GroupedRawData;
import rawDeepLearningClassifier.segmenter.SegmenterDetectionGroup;
import rawDeepLearningClassifier.segmenter.SegmenterProcess;
@ -48,7 +51,7 @@ public class DLOfflineTask extends OfflineTask<PamDataUnit<?,?>>{
@Override
public boolean processDataUnit(PamDataUnit<?, ?> dataUnit) {
// System.out.println("--------------");
// System.out.println("Offline task start: " + dataUnit.getUpdateCount() + " UID " + dataUnit.getUID());
//System.out.println("Offline task start: " + dataUnit.getUpdateCount() + " UID " + dataUnit.getUID() + " " + dlControl.getDLParams().enableSegmentation);
boolean saveBinary = false;
try {
@ -79,9 +82,16 @@ public class DLOfflineTask extends OfflineTask<PamDataUnit<?,?>>{
//detection has been added we force the classifier to run on all the segments generated from
//the raw data.
//Process a data unit
//Process a data unit within the segmenter
dlControl.getSegmenter().newData(dataUnit);
//System.out.println("Segments: " + dlControl.getSegmenter().getSegmenterDataBlock().getUnitsCount());
//need to add the segmenter data units into the classification buffer
ListIterator<GroupedRawData> iterator = dlControl.getSegmenter().getSegmenterDataBlock().getListIterator(0);
while (iterator.hasNext()) {
dlControl.getDLClassifyProcess().newData(dlControl.getSegmenter().getSegmenteGroupDataBlock(), iterator.next());
}
//force click data save
dlControl.getDLClassifyProcess().forceRunClassifier(dataUnit);

View File

@ -241,7 +241,7 @@ public class SegmenterProcess extends PamProcess {
newRawDataUnit(pamRawData);
}
else if (pamRawData instanceof ClickDetection) {
newClickData( pamRawData);
newClickData(pamRawData);
}
else if (pamRawData instanceof ClipDataUnit) {
newClipData(pamRawData);
@ -491,7 +491,7 @@ public class SegmenterProcess extends PamProcess {
public void newClickData(PamDataUnit pamRawData) {
//the raw data units should appear in sequential channel order
// System.out.println("New raw data in: chan: " + PamUtils.getSingleChannel(pamRawData.getChannelBitmap()) + " Size: " + pamRawData.getSampleDuration());
//System.out.println("New raw data in: chan: " + PamUtils.getSingleChannel(pamRawData.getChannelBitmap()) + " Size: " + pamRawData.getSampleDuration());
ClickDetection clickDataUnit = (ClickDetection) pamRawData;
@ -542,6 +542,12 @@ public class SegmenterProcess extends PamProcess {
//segment the data unit into different chunks.
newRawData(pamDataUnit,
rawDataChunk[i], chans[i], dlControl.getDLParams().rawSampleSize, dlControl.getDLParams().sampleHop, true);
//the way that the newRawdata works is it waits for the next chunk and copies all relevant bits
//from previous chunks into segments. This is fine for continuous data but means that chunks of data
//don't get their last hop...
//got to save the last chunk of raw data -even if the segment has not been filled.
saveRawGroupData(true);
}
else {
// //send the whole data chunk to the deep learning unit
@ -551,13 +557,8 @@ public class SegmenterProcess extends PamProcess {
// pamDataUnit.getStartSample(), rawDataChunk[i].length, rawDataChunk[i].length);
}
//the way that the newRawdata works is it waits for the next chunk and copies all relevant bits
//from previous chunks into segments. This is fine for continuous data but means that chunks of data
//don't get their last hop...
}
//got to save the last chunk of raw data -even if the segment has not been filled.
saveRawGroupData(true);
}
@ -600,7 +601,7 @@ public class SegmenterProcess extends PamProcess {
long timeMilliseconds = unit.getTimeMilliseconds();
long startSampleTime = unit.getStartSample();
//System.out.println("Segmenter: RawDataIn: chan: 1 " + getSourceParams().countChannelGroups() + currentRawChunks);
//System.out.println("Segmenter: RawDataIn: chan: 1 " + getSourceParams().countChannelGroups() + currentRawChunks + " rawSampleSize " + rawSampleSize + " rawSampleHop: " +rawSampleHop);
if (currentRawChunks==null) {
System.err.println("Current raw chunk arrays are null");
@ -753,6 +754,7 @@ public class SegmenterProcess extends PamProcess {
* @param forceSave - true to also save the remaining unfilled segment.
*/
private void saveRawGroupData(boolean forceSave) {
//System.out.println("Segmenter process: saveRawGroupData(boolean forceSave)");
for (int i=0; i<getSourceParams().countChannelGroups(); i++) {
saveRawGroupData(i, forceSave);
}
@ -771,6 +773,7 @@ public class SegmenterProcess extends PamProcess {
* @param i - the group index.
*/
private void saveRawGroupData(int i) {
//System.out.println("Segmenter process: saveRawGroupData(int i)");
saveRawGroupData(i, false);
}

View File

@ -19,6 +19,8 @@ import org.jamdev.jdl4pam.transforms.DLTransform.DLTransformType;
import org.junit.jupiter.api.Test;
import PamUtils.PamArrayUtils;
import rawDeepLearningClassifier.dlClassification.animalSpot.StandardModelParams;
import rawDeepLearningClassifier.dlClassification.archiveModel.ArchiveModelWorker;
import rawDeepLearningClassifier.dlClassification.genericModel.GenericModelParams;
import rawDeepLearningClassifier.dlClassification.genericModel.GenericModelWorker;
import rawDeepLearningClassifier.dlClassification.genericModel.StandardPrediction;
@ -34,6 +36,79 @@ import us.hebi.matlab.mat.types.Struct;
* Model from Thomas webber which is a good way to test the click based stuff is working in PAMGUard.
*/
public class ClickDLTest {
/**
* Test just one click using the zipped classifier
* @throws
*/
@Test
public void aclickDLTestZip() {
System.out.println("*****CLickDLTest: Single click test zip*****");
//relative paths to the resource folders.
String relModelPath = "/home/jamiemac/Dropbox/PAMGuard_dev/Deep_Learning/click_classifier_Thomas/model_v2/model_pb.zip";
String clicksPath = "/home/jamiemac/Dropbox/PAMGuard_dev/Deep_Learning/click_classifier_Thomas/model_v2/example_2000021.mat";
// String matout = "/home/jamiemac/MATLAB-Drive/MATLAB/PAMGUARD/deep_learning/generic_classifier/example_2000021_transforms.mat";
String matout=null;
// load the click data up.
Path clkPath = Paths.get(clicksPath);
PredGroupedRawData clickData = null;
Struct matclkStruct = Mat5.newStruct();
try {
Mat5File mfr = Mat5.readFromFile(clkPath.toAbsolutePath().normalize().toString());
// //get array of a name "my_array" from file
Struct mlArrayRetrived = mfr.getStruct( "newStruct" );
Matrix clickWavM = mlArrayRetrived.get("wave", 0);
double[][] clickWaveform= PamArrayUtils.matrix2array(clickWavM);
clickWaveform=PamArrayUtils.transposeMatrix(clickWaveform);
Matrix clickUID= mlArrayRetrived.get("UID", 0);
Matrix pred= mlArrayRetrived.get("pred", 0);
//create a click object whihc we can pass through transforms etc.
clickData = new PredGroupedRawData(0L, 1, 0, clickWaveform[0].length, clickWaveform[0].length);
clickData.setUID(clickUID.getLong(0));
clickData.setRawData(clickWaveform);
clickData.setPrediction(new double[] {pred.getDouble(0)});
// load the model up
Path path = Paths.get(relModelPath);
ArchiveModelWorker genericModelWorker = new ArchiveModelWorker();
StandardModelParams genericModelParams = new StandardModelParams();
genericModelParams.modelPath = path.toAbsolutePath().normalize().toString();
//prep the model - all setting are included within the model
genericModelWorker.prepModel(genericModelParams, null);
System.out.println("seglen: " + genericModelParams.defaultSegmentLen);
ArrayList<GroupedRawData> groupedData = new ArrayList<GroupedRawData>();
groupedData.add(clickData);
System.out.println("Waveform input: " + groupedData.get(0).getRawData().length + " " + groupedData.get(0).getRawData()[0].length);
ArrayList<StandardPrediction> genericPrediction = genericModelWorker.runModel(groupedData,96000, 0);
float[] outputPAMGuard = genericPrediction.get(0).getPrediction();
System.out.println("Model output PAMGuard: " + outputPAMGuard[0]);
assertEquals(outputPAMGuard[0], 0.99, 0.05);
}
catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
assertTrue(false); //make sure the unit test fails
return;
}
}
/**
* Test just one click
@ -104,7 +179,7 @@ public class ClickDLTest {
ArrayList<DLTransfromParams> dlTransformParamsArr = new ArrayList<DLTransfromParams>();
//waveform transforms.
dlTransformParamsArr.add(new SimpleTransformParams(DLTransformType.DECIMATE_SCIPY, 248000.));
dlTransformParamsArr.add(new SimpleTransformParams(DLTransformType.DECIMATE_SCIPY, 96000.));
dlTransformParamsArr.add(new SimpleTransformParams(DLTransformType.NORMALISE_WAV, 0., 1, AudioData.ZSCORE)); //needs to be here
dlTransformParamsArr.add(new SimpleTransformParams(DLTransformType.PEAK_TRIM, 64, 1));
@ -122,7 +197,7 @@ public class ClickDLTest {
System.out.println("Waveform input: " + groupedData.get(0).getRawData().length + " " + groupedData.get(0).getRawData()[0].length);
ArrayList<StandardPrediction> genericPrediction = genericModelWorker.runModel(groupedData,248000, 0);
ArrayList<StandardPrediction> genericPrediction = genericModelWorker.runModel(groupedData,96000, 0);
// System.out.println("PAMGuard input len: " + pythonModelInputF.length);
@ -199,7 +274,7 @@ public class ClickDLTest {
ArrayList<DLTransfromParams> dlTransformParamsArr = new ArrayList<DLTransfromParams>();
//waveform transforms.
dlTransformParamsArr.add(new SimpleTransformParams(DLTransformType.DECIMATE_SCIPY, 248000.));
dlTransformParamsArr.add(new SimpleTransformParams(DLTransformType.DECIMATE_SCIPY, 96000.));
dlTransformParamsArr.add(new SimpleTransformParams(DLTransformType.NORMALISE_WAV, 0., 1, AudioData.ZSCORE));
dlTransformParamsArr.add(new SimpleTransformParams(DLTransformType.PEAK_TRIM, 64, 1));
@ -215,7 +290,8 @@ public class ClickDLTest {
System.out.println("Model has loaded: n clicks " + clicks.size());
float count = 0;
long timeStart = System.currentTimeMillis();
for (int i=0; i<clicks.size(); i++) {
float prediction = (float) clicks.get(i).getPrediction()[0];
@ -231,10 +307,17 @@ public class ClickDLTest {
System.out.println(String.format("Click %d Predicted output: %.4f true output: %.4f passed: %b delta %.2f", clicks.get(i).getUID(),
output[0], prediction, output[0]>prediction*0.9 && output[0]<prediction*1.1, (Math.abs(output[0] -prediction))));
if (output[0]>prediction*0.9 && output[0]<prediction*1.1) {
count++;
}
}
long timeEnd = System.currentTimeMillis();
double perctrue = count/clicks.size();
System.out.println(String.format("Percentage clicks passed: %.2f TIme to process %d clicks - %2f seconds", perctrue, clicks.size(), ((double) (timeEnd-timeStart))/1000.));
}
/**