mirror of
https://github.com/PAMGuard/PAMGuard.git
synced 2024-11-22 07:02:29 +00:00
Bug fixes to deep learning module and TDisplayFX
This commit is contained in:
parent
120b1eb56f
commit
1fc349974a
@ -80,6 +80,18 @@ public class TDGraphParametersFX implements Serializable, Cloneable, ManagedPara
|
||||
*/
|
||||
public String plotFillS = "white";
|
||||
|
||||
/**
|
||||
* Show the hide panel on the left on start up. Note only used when first opening
|
||||
* saved settings or saving settings.
|
||||
*/
|
||||
public boolean showHidePaneLeft = true;
|
||||
|
||||
/**
|
||||
* Show the hide panel on the right on start up. Note only used when first opening
|
||||
* saved settings or saving settings.
|
||||
*/
|
||||
public boolean showHidePaneRight = false;
|
||||
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see java.lang.Object#clone()
|
||||
|
@ -350,8 +350,8 @@ public class TDGraphFX extends PamBorderPane {
|
||||
setOverlayColour(LIGHT_TD_DISPLAY);
|
||||
|
||||
|
||||
//show the left hiding pane byu default.
|
||||
stackPane.getLeftHidingPane().showHidePane(true);
|
||||
// //show the left hiding pane byu default.
|
||||
// stackPane.getLeftHidingPane().showHidePane(true);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -1699,8 +1699,7 @@ public class TDGraphFX extends PamBorderPane {
|
||||
|
||||
/**
|
||||
* A bit different to the standard getter in that this only gets called just
|
||||
* before the configuration is serialized into the .psf. It's time to pull any
|
||||
* configuration information out about every line drawn on this boomin' thing !
|
||||
* before the configuration is serialized into the .psfx.
|
||||
*
|
||||
* @return graph parameters ready to serialised.
|
||||
*/
|
||||
@ -1733,6 +1732,11 @@ public class TDGraphFX extends PamBorderPane {
|
||||
graphParameters.setScaleInfoData(scaleInfo.getDataTypeInfo(), scaleInfo.getScaleInfoData());
|
||||
}
|
||||
}
|
||||
|
||||
//Finally save whether the hiding panels are open or not.
|
||||
graphParameters.showHidePaneLeft = stackPane.getLeftHidingPane().isShowing();
|
||||
graphParameters.showHidePaneRight = stackPane.getRightHidingPane().isShowing();
|
||||
|
||||
}
|
||||
|
||||
return graphParameters;
|
||||
@ -1817,6 +1821,11 @@ public class TDGraphFX extends PamBorderPane {
|
||||
tdAxisSelPane.selectAxisType();
|
||||
setAxisName(graphParameters.currentDataType);
|
||||
|
||||
//Open hide panes if needed.
|
||||
//Finally save whether the hiding panels are open or not.
|
||||
stackPane.getLeftHidingPane().showHidePane(graphParameters.showHidePaneLeft);
|
||||
stackPane.getRightHidingPane().showHidePane(graphParameters.showHidePaneRight);
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -2,6 +2,7 @@ package dataPlotsFX.rawClipDataPlot;
|
||||
|
||||
|
||||
import java.awt.geom.Path2D;
|
||||
import java.io.Serializable;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
|
||||
@ -14,7 +15,7 @@ import PamguardMVC.PamDataUnit;
|
||||
import PamguardMVC.RawDataHolder;
|
||||
import clipgenerator.ClipSpectrogram;
|
||||
import dataPlotsFX.TDSymbolChooserFX;
|
||||
import dataPlotsFX.clickPlotFX.ClickSymbolChooserFX;
|
||||
import dataPlotsFX.clickPlotFX.ClickDisplayParams;
|
||||
import dataPlotsFX.data.TDDataProviderFX;
|
||||
import dataPlotsFX.data.TDScaleInfo;
|
||||
import dataPlotsFX.data.generic.GenericDataPlotInfo;
|
||||
@ -256,5 +257,35 @@ public class RawClipDataInfo extends GenericDataPlotInfo {
|
||||
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see dataPlots.data.TDDataInfo#getStoredSettings()
|
||||
*/
|
||||
@Override
|
||||
public Serializable getStoredSettings() {
|
||||
return rawClipParams;
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see dataPlots.data.TDDataInfo#setStoredSettings(java.io.Serializable)
|
||||
*/
|
||||
@Override
|
||||
public boolean setStoredSettings(Serializable storedSettings) {
|
||||
if (RawClipParams.class.isAssignableFrom(storedSettings.getClass())) {
|
||||
rawClipParams = (RawClipParams) storedSettings;
|
||||
updateSettings();
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Called whenever settings are updated.
|
||||
*/
|
||||
private void updateSettings() {
|
||||
// TODO Auto-generated method stub
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
}
|
||||
|
@ -38,6 +38,13 @@ public class RawDLParams implements Serializable, Cloneable {
|
||||
*/
|
||||
public GroupedSourceParameters groupedSourceParams = new GroupedSourceParameters();
|
||||
|
||||
/**
|
||||
* True to enable segmentation. If segmentation is disabled then the raw waveform from
|
||||
* a data unit is passed directly to the model. Note that this is not an option for raw sound
|
||||
* data.
|
||||
*/
|
||||
public boolean enableSegmentation = true;
|
||||
|
||||
/**
|
||||
* The number of raw samples to send to the classifier.
|
||||
*/
|
||||
@ -85,7 +92,7 @@ public class RawDLParams implements Serializable, Cloneable {
|
||||
* different class names. If we change model then the class names may change.
|
||||
* Previously annotated data will then be messed up. But, in a giant dataset
|
||||
* that may be an issue. Perhaps users wish to run a new model on some chunk of
|
||||
* data without messing up all the other classified detectionS which have used
|
||||
* data without messing up all the other classified detections which have used
|
||||
* that module. So store the data in binary files? That is super inefficient as
|
||||
* the same string is stored many times. So instead store a short which
|
||||
* identifies the string that sits in this table. Everytime a new model is added
|
||||
|
@ -141,12 +141,12 @@ public class DLPredictonPane extends DynamicSettingsPane<DLPredictionFilterParam
|
||||
@Override
|
||||
public DLPredictionFilterParams getParams(DLPredictionFilterParams currParams) {
|
||||
|
||||
if (classPanes==null) return currParams;
|
||||
for (int i=0; i<classPanes.length ; i++) {
|
||||
currParams.classSelect[i] = classPanes[i].enable.isSelected();
|
||||
currParams.minClassPredicton[i] = classPanes[i].slider.getValue();
|
||||
}
|
||||
|
||||
|
||||
return currParams;
|
||||
}
|
||||
|
||||
|
@ -382,6 +382,7 @@ public class DelphinIDUtils {
|
||||
// String modelPath = "/Users/au671271/Library/CloudStorage/Dropbox/PAMGuard_dev/Deep_Learning/delphinID/testencounter415/whistle_model_2/whistle_4s_415.zip";
|
||||
String modelPath = "D:/Dropbox/PAMGuard_dev/Deep_Learning/delphinID/testencounter415/whistle_model_2/whistle_4s_415.zip";
|
||||
|
||||
//line widths in pixels
|
||||
double[] lineWidths = new double[] {6, 7, 10, 15, 20};
|
||||
|
||||
for (double lineWidth:lineWidths) {
|
||||
|
@ -486,9 +486,17 @@ public class SegmenterProcess extends PamProcess {
|
||||
|
||||
//pass the raw click data to the segmenter
|
||||
for (int i=0;i<chans.length; i++) {
|
||||
newRawData(pamDataUnit,
|
||||
rawDataChunk[i], chans[i], true);
|
||||
|
||||
if (dlControl.getDLParams().enableSegmentation) {
|
||||
//segment the data unit into different chunks.
|
||||
newRawData(pamDataUnit,
|
||||
rawDataChunk[i], chans[i], dlControl.getDLParams().rawSampleSize, dlControl.getDLParams().sampleHop, true);
|
||||
}
|
||||
else {
|
||||
//send the whole data chunk to the deep learning unit
|
||||
newRawData(pamDataUnit,
|
||||
rawDataChunk[i], chans[i], rawDataChunk[i].length, rawDataChunk[i].length, true);
|
||||
}
|
||||
//the way that the newRawdata works is it waits for the next chunk and copies all relevant bits
|
||||
//from previous chunks into segments. This is fine for continuous data but means that chunks of data
|
||||
//don't get their last hop...
|
||||
@ -512,7 +520,7 @@ public class SegmenterProcess extends PamProcess {
|
||||
* @param iChan - the channel that is being segmented
|
||||
*/
|
||||
public void newRawData(PamDataUnit unit, double[] rawDataChunk, int iChan) {
|
||||
newRawData(unit, rawDataChunk, iChan, false);
|
||||
newRawData(unit, rawDataChunk, iChan, dlControl.getDLParams().rawSampleSize ,dlControl.getDLParams().sampleHop , false);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -523,15 +531,17 @@ public class SegmenterProcess extends PamProcess {
|
||||
*
|
||||
* @param unit - the data unit which contains relevant metadata on time
|
||||
* etc.
|
||||
* @param rawDataChunk - the sound chunk to segment extracted from the data
|
||||
* @param rawDataChunk - the sound chunk extracted from the data
|
||||
* unit.
|
||||
* @param iChan - the channel that is being segmented
|
||||
* @param rawSampleSize - the segment size in samples i.e. the size of the segmenting window.
|
||||
* @param rawSampleHop - the segment hop in samples i.e. how far the window jumps for each segment.
|
||||
* @param forceSave - make sure that all data is passed into the buffers and
|
||||
* do not wait for the next data unit. This is used to make
|
||||
* sure that discrete chunks have their full number of
|
||||
* segments saved.
|
||||
*/
|
||||
public synchronized void newRawData(PamDataUnit unit, double[] rawDataChunk, int iChan, boolean forcesave) {
|
||||
public synchronized void newRawData(PamDataUnit unit, double[] rawDataChunk, int iChan, int rawSampleSize, int rawSampleHop, boolean forcesave) {
|
||||
|
||||
long timeMilliseconds = unit.getTimeMilliseconds();
|
||||
long startSampleTime = unit.getStartSample();
|
||||
@ -555,7 +565,7 @@ public class SegmenterProcess extends PamProcess {
|
||||
if (currentRawChunks[i]==null) {
|
||||
//create a new data unit - should only be called once after initial start.
|
||||
currentRawChunks[i] = new GroupedRawData(timeMilliseconds, getSourceParams().getGroupChannels(i),
|
||||
startSampleTime, dlControl.getDLParams().rawSampleSize, dlControl.getDLParams().rawSampleSize);
|
||||
startSampleTime, rawSampleSize, rawSampleSize);
|
||||
|
||||
currentRawChunks[i].setParentDataUnit(unit);;
|
||||
}
|
||||
@ -615,7 +625,7 @@ public class SegmenterProcess extends PamProcess {
|
||||
|
||||
//segments which do not include any last zero padded segmen- zeros can confuse deep learning models so it may be better to keep use
|
||||
//this instead of zero padding end chunks.
|
||||
int nChunks = (int) Math.ceil((overFlow)/(double) dlControl.getDLParams().sampleHop);
|
||||
int nChunks = (int) Math.ceil((overFlow)/(double) rawSampleHop);
|
||||
|
||||
nChunks = Math.max(nChunks, 1); //cannot be less than one (if forceSave is used then can be zero if no overflow)
|
||||
nextRawChunks[i]=new GroupedRawData[nChunks];
|
||||
@ -638,11 +648,11 @@ public class SegmenterProcess extends PamProcess {
|
||||
|
||||
//go from current raw chunks tim millis to try and minimise compounding time errors.
|
||||
// long timeMillis = (long) (currentRawChunks[i].getTimeMilliseconds() + j*(1000.*(dlControl.getDLParams().sampleHop)/this.getSampleRate()));
|
||||
long startSample = lastRawDataChunk.getStartSample() + dlControl.getDLParams().sampleHop;
|
||||
long startSample = lastRawDataChunk.getStartSample() + rawSampleHop;
|
||||
long timeMillis = this.absSamplesToMilliseconds(startSample);
|
||||
|
||||
nextRawChunks[i][j] = new GroupedRawData(timeMillis, getSourceParams().getGroupChannels(i),
|
||||
startSample, dlControl.getDLParams().rawSampleSize, dlControl.getDLParams().rawSampleSize);
|
||||
startSample, rawSampleSize, rawSampleSize);
|
||||
nextRawChunks[i][j].setParentDataUnit(unit);
|
||||
|
||||
}
|
||||
|
31
src/test/rawDeepLearningClassifier/ClickDLTest.java
Normal file
31
src/test/rawDeepLearningClassifier/ClickDLTest.java
Normal file
@ -0,0 +1,31 @@
|
||||
package test.rawDeepLearningClassifier;
|
||||
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import rawDeepLearningClassifier.dlClassification.genericModel.GenericModelParams;
|
||||
import rawDeepLearningClassifier.dlClassification.genericModel.GenericModelWorker;
|
||||
|
||||
public class ClickDLTest {
|
||||
|
||||
|
||||
@Test
|
||||
public void clickDLTest() {
|
||||
//relative paths to the resource folders.
|
||||
System.out.println("*****Click classification Deep Learning*****");
|
||||
|
||||
//relative paths to the resource folders.
|
||||
String relModelPath = "D:/Dropbox/PAMGuard_dev/Deep_Learning/click_classifier_Thomas/best_model/saved_model.pb";
|
||||
|
||||
|
||||
GenericModelWorker genericModelWorker = new GenericModelWorker();
|
||||
|
||||
GenericModelParams genericModelParams = new GenericModelParams();
|
||||
|
||||
genericModelParams.modelPath = relModelPath;
|
||||
|
||||
genericModelWorker.prepModel(genericModelParams, null);
|
||||
|
||||
|
||||
}
|
||||
|
||||
}
|
@ -5,8 +5,8 @@ import org.junit.jupiter.api.Test;
|
||||
public class PamZipDLClassifierTest {
|
||||
|
||||
/**
|
||||
* Test the koogu classifier and tests are working properly. This tests loading the koogu model and also using
|
||||
* functions in KooguWorker.
|
||||
* Test the koogu classifier and tests are working properly for a PAMGuard zip model - i.e. this is a very similar model to Koogu but zipped with a .zip
|
||||
* filename instead of .kgu.
|
||||
*/
|
||||
@Test
|
||||
public void zipClassifierTest() {
|
||||
@ -21,4 +21,7 @@ public class PamZipDLClassifierTest {
|
||||
KooguDLClassifierTest.runKooguClassifier( relModelPath, relWavPath, relMatPath);
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user