mirror of
https://github.com/PAMGuard/PAMGuard.git
synced 2024-11-25 08:32:32 +00:00
Updating deep learning module to support delphinID models
This commit is contained in:
parent
198071e051
commit
0eca6a1a9f
@ -6,7 +6,7 @@
|
|||||||
<attribute name="maven.pomderived" value="true"/>
|
<attribute name="maven.pomderived" value="true"/>
|
||||||
</attributes>
|
</attributes>
|
||||||
</classpathentry>
|
</classpathentry>
|
||||||
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/Amazon Coretto 21">
|
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/jdk-21.0.2.13-hotspot">
|
||||||
<attributes>
|
<attributes>
|
||||||
<attribute name="module" value="true"/>
|
<attribute name="module" value="true"/>
|
||||||
<attribute name="maven.pomderived" value="true"/>
|
<attribute name="maven.pomderived" value="true"/>
|
||||||
|
@ -411,26 +411,29 @@ public class ConnectionPane extends Pane {
|
|||||||
if (includeBranch) {
|
if (includeBranch) {
|
||||||
StandardConnectionSocket socket;
|
StandardConnectionSocket socket;
|
||||||
for (int i = 0; i < parentNode.getConnectionPlugs().size(); i++) {
|
for (int i = 0; i < parentNode.getConnectionPlugs().size(); i++) {
|
||||||
socket = (StandardConnectionSocket) parentNode.getConnectionPlugs().get(i).getConnectedShape();
|
if (parentNode.getConnectionPlugs().get(i).getConnectedShape() instanceof StandardConnectionSocket) {
|
||||||
if (socket == null || !socket.isBranch())
|
socket = (StandardConnectionSocket) parentNode.getConnectionPlugs().get(i).getConnectedShape();
|
||||||
continue;
|
if (socket == null || !socket.isBranch())
|
||||||
// is this socket connected to child node?
|
continue;
|
||||||
if (socket.getParentConnectionPlug().getConnectedShape() != null) {
|
// is this socket connected to child node?
|
||||||
|
if (socket.getParentConnectionPlug().getConnectedShape() != null) {
|
||||||
|
|
||||||
if (socket.getParentConnectionPlug().getConnectedShape().getConnectionNode() == childNode) {
|
if (socket.getParentConnectionPlug().getConnectedShape().getConnectionNode() == childNode) {
|
||||||
// return socket.getParentConnectionPlug();
|
// return socket.getParentConnectionPlug();
|
||||||
node = socket.getConnectedShape();
|
node = socket.getConnectedShape();
|
||||||
}
|
}
|
||||||
else if (bypassStructure && socket.getParentConnectionPlug().getConnectedShape()
|
else if (bypassStructure && socket.getParentConnectionPlug().getConnectedShape()
|
||||||
.getConnectionNode() instanceof ConnectionStructure) {
|
.getConnectionNode() instanceof ConnectionStructure) {
|
||||||
// go down another level
|
// go down another level
|
||||||
node = getConnectionPlug(parentNode,
|
node = getConnectionPlug(parentNode,
|
||||||
socket.getParentConnectionPlug().getConnectedShape().getConnectionNode(), includeBranch,
|
socket.getParentConnectionPlug().getConnectedShape().getConnectionNode(), includeBranch,
|
||||||
bypassStructure);
|
bypassStructure);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (node != null)
|
||||||
|
return node;
|
||||||
}
|
}
|
||||||
if (node != null)
|
|
||||||
return node;
|
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -12,7 +12,7 @@ public enum DLStatus {
|
|||||||
" The model failed to load - this could be because it is incompatible with PAMGuard or an uknown file format.",
|
" The model failed to load - this could be because it is incompatible with PAMGuard or an uknown file format.",
|
||||||
ErrorLevel.ERROR),
|
ErrorLevel.ERROR),
|
||||||
|
|
||||||
MODEL_LOAD_SUCCESS("The model loaded", " The model successfully load", ErrorLevel.NO_ERROR),
|
MODEL_LOAD_SUCCESS("The model loaded", " The model successfully loaded", ErrorLevel.NO_ERROR),
|
||||||
|
|
||||||
DOWNLOAD_STARTING("Download starting", "The model is downloading", ErrorLevel.NO_ERROR),
|
DOWNLOAD_STARTING("Download starting", "The model is downloading", ErrorLevel.NO_ERROR),
|
||||||
|
|
||||||
|
106
src/rawDeepLearningClassifier/DLZipUtils.java
Normal file
106
src/rawDeepLearningClassifier/DLZipUtils.java
Normal file
@ -0,0 +1,106 @@
|
|||||||
|
package rawDeepLearningClassifier;
|
||||||
|
|
||||||
|
import java.io.BufferedInputStream;
|
||||||
|
import java.io.File;
|
||||||
|
import java.io.FileInputStream;
|
||||||
|
import java.io.FileOutputStream;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.io.OutputStream;
|
||||||
|
import java.net.URI;
|
||||||
|
import java.net.URISyntaxException;
|
||||||
|
import java.util.Enumeration;
|
||||||
|
import java.util.zip.ZipEntry;
|
||||||
|
import java.util.zip.ZipException;
|
||||||
|
import java.util.zip.ZipFile;
|
||||||
|
import java.util.zip.ZipInputStream;
|
||||||
|
|
||||||
|
|
||||||
|
public class DLZipUtils {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Find the first file within a zip folder that matches a pattern. This peaks into the zip file instead of decompressing it.
|
||||||
|
* @param zipUri - uri to the zip file
|
||||||
|
* @param filePattern - the file pattern to match - the file must contain this string.
|
||||||
|
* @return null if no file found and the file pqth if the file is founf
|
||||||
|
* @throws ZipException
|
||||||
|
* @throws IOException
|
||||||
|
*/
|
||||||
|
public static String getZipFilePath(URI zipUri, String filePattern) throws ZipException, IOException {
|
||||||
|
return getZipFilePath(new File(zipUri), filePattern);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Extract a single file from a zip folder.
|
||||||
|
* @param zipPackage - URI to the the zip file
|
||||||
|
* @param fileToBeExtracted - the name of the file to be extracted
|
||||||
|
* @param - the folder path to extract the file to.
|
||||||
|
* @return - return the File that has been extracted.
|
||||||
|
* @throws IOException
|
||||||
|
* @throws URISyntaxException
|
||||||
|
*/
|
||||||
|
public static File extractFile(String zipPackage, String fileToBeExtracted, String outFolder) throws IOException, URISyntaxException {
|
||||||
|
return extractFile( new URI(zipPackage), fileToBeExtracted, outFolder) ;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Extract a single file from a zip folder.
|
||||||
|
* @param zipPackage - path the the zip file
|
||||||
|
* @param fileToBeExtracted - the name of the file to be extracted
|
||||||
|
* @param - the folder path to extract the file to.
|
||||||
|
* @return - return the File that has been extracted.
|
||||||
|
* @throws IOException
|
||||||
|
*/
|
||||||
|
public static File extractFile(URI zipPackage, String fileToBeExtracted, String outFolder) throws IOException {
|
||||||
|
File fileOut = new File(outFolder, fileToBeExtracted);
|
||||||
|
OutputStream out = new FileOutputStream(fileOut);
|
||||||
|
FileInputStream fileInputStream = new FileInputStream(new File(zipPackage));
|
||||||
|
BufferedInputStream bufferedInputStream = new BufferedInputStream(fileInputStream );
|
||||||
|
ZipInputStream zin = new ZipInputStream(bufferedInputStream);
|
||||||
|
ZipEntry ze = null;
|
||||||
|
while ((ze = zin.getNextEntry()) != null) {
|
||||||
|
if (ze.getName().equals(fileToBeExtracted)) {
|
||||||
|
byte[] buffer = new byte[9000];
|
||||||
|
int len;
|
||||||
|
while ((len = zin.read(buffer)) != -1) {
|
||||||
|
out.write(buffer, 0, len);
|
||||||
|
}
|
||||||
|
out.close();
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
zin.close();
|
||||||
|
|
||||||
|
return fileOut;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Find the first file within a zip folder that matches a pattern.
|
||||||
|
* @param zipFile - uri to the zip file
|
||||||
|
* @param filePattern - the file pattern to match - the file must contain this string.
|
||||||
|
* @return null if no file found and the file pqth if the file is founf
|
||||||
|
* @throws ZipException
|
||||||
|
* @throws IOException
|
||||||
|
*/
|
||||||
|
public static String getZipFilePath(File zipFileIn, String filePattern) throws ZipException, IOException {
|
||||||
|
|
||||||
|
try (ZipFile zipFile = new ZipFile(zipFileIn)) {
|
||||||
|
Enumeration<? extends ZipEntry> entries = zipFile.entries();
|
||||||
|
//this iterates through all files, including in sub folders.
|
||||||
|
while (entries.hasMoreElements()) {
|
||||||
|
ZipEntry entry = entries.nextElement();
|
||||||
|
// Check if entry is a directory
|
||||||
|
if (!entry.isDirectory()) {
|
||||||
|
//System.out.println(entry);
|
||||||
|
if (entry.getName().contains(filePattern)) {
|
||||||
|
return entry.getName();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
@ -1,14 +1,21 @@
|
|||||||
package rawDeepLearningClassifier.dlClassification;
|
package rawDeepLearningClassifier.dlClassification;
|
||||||
|
|
||||||
|
import java.io.File;
|
||||||
|
import java.io.IOException;
|
||||||
import java.net.URI;
|
import java.net.URI;
|
||||||
|
|
||||||
|
import org.codehaus.plexus.util.FileUtils;
|
||||||
|
import org.jamdev.jdl4pam.transforms.jsonfile.DLTransformsParser;
|
||||||
|
import org.json.JSONObject;
|
||||||
|
|
||||||
|
import ai.djl.repository.FilenameUtils;
|
||||||
import rawDeepLearningClassifier.DLControl;
|
import rawDeepLearningClassifier.DLControl;
|
||||||
|
import rawDeepLearningClassifier.DLZipUtils;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Selects which type of DL classiifer to use.
|
* Selects which type of DL classifier to use.
|
||||||
|
*
|
||||||
* @author Jamie Macaulay
|
* @author Jamie Macaulay
|
||||||
*
|
|
||||||
*
|
|
||||||
*/
|
*/
|
||||||
public class DLClassifierChooser {
|
public class DLClassifierChooser {
|
||||||
|
|
||||||
@ -19,10 +26,62 @@ public class DLClassifierChooser {
|
|||||||
this.dlControl = dlControl;
|
this.dlControl = dlControl;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Select the correct classifier model to use from a URI.
|
||||||
|
* @param modelURI - the URI
|
||||||
|
* @return the classifier model that runs the file.
|
||||||
|
*/
|
||||||
public DLClassiferModel selectClassiferModel(URI modelURI) {
|
public DLClassiferModel selectClassiferModel(URI modelURI) {
|
||||||
|
|
||||||
//check for model compatibility.
|
String extension = FileUtils.getExtension(modelURI.getPath());
|
||||||
|
|
||||||
|
|
||||||
|
if (extension.equals("zip")) {
|
||||||
|
//If the model is a zip file it may contain a model and a metadata file. Want to check
|
||||||
|
//this metadata file in case it points to a particular framework.
|
||||||
|
try {
|
||||||
|
String settingsFile = DLZipUtils.getZipFilePath(modelURI, ".pdtf");
|
||||||
|
System.out.println("Settings file: " +settingsFile);
|
||||||
|
if (settingsFile!=null) {
|
||||||
|
//there's a settings file - does it contain a metadata field describing which
|
||||||
|
//type of classifier it belongs to.
|
||||||
|
String outFolder = System.getProperty("user.home") + File.separator + "PAMGuard_temp";
|
||||||
|
new File(outFolder).mkdir();
|
||||||
|
|
||||||
|
File file = DLZipUtils.extractFile(modelURI, settingsFile, outFolder);
|
||||||
|
if (file!=null) {
|
||||||
|
//now we need to open the file and get a specific JSON field which describes the model.
|
||||||
|
//read the JSON string from the the file.
|
||||||
|
String jsonString = DLTransformsParser.readJSONString(file);
|
||||||
|
JSONObject object = new JSONObject(jsonString);
|
||||||
|
|
||||||
|
JSONObject frameworkObject = object.getJSONObject("framework_info");
|
||||||
|
String frameworkString = frameworkObject.getString("framework");
|
||||||
|
|
||||||
|
for (DLClassiferModel model: dlControl.getDLModels()) {
|
||||||
|
//System.out.println("frameworkString: " + frameworkString + " " + model.getName());
|
||||||
|
if (model.getName().toLowerCase().equals(frameworkString.trim().toLowerCase())) return model;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
System.err.println("DLClassifierChooser: Unable to extract the metadata file even though it exists in the zip archive:??");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
System.err.println("DLClassifierChooser: The zip file does not contain a metadata file for the deep learning model");
|
||||||
|
}
|
||||||
|
} catch (IOException e) {
|
||||||
|
// TODO Auto-generated catch block
|
||||||
|
e.printStackTrace();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for model compatibility. This usually means checking for a file
|
||||||
|
// extension. Note that a koogu .kgu model. for example can cxontain a metadata
|
||||||
|
// file without the correct @koogu@ field. In this instance the model is loaded
|
||||||
|
// properly because of the file extension but would not have been loaded by
|
||||||
|
// checking the metadata as above
|
||||||
for (DLClassiferModel model: dlControl.getDLModels()) {
|
for (DLClassiferModel model: dlControl.getDLModels()) {
|
||||||
if (model.isModelType(modelURI)) return model;
|
if (model.isModelType(modelURI)) return model;
|
||||||
}
|
}
|
||||||
|
@ -87,6 +87,7 @@ public abstract class StandardClassifierModel implements DLClassiferModel, PamSe
|
|||||||
return modelResult; //returns to the classifier.
|
return modelResult; //returns to the classifier.
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
|
//REAL TIME
|
||||||
//add to a buffer if in real time.
|
//add to a buffer if in real time.
|
||||||
if (workerThread.getQueue().size()>DLModelWorker.MAX_QUEUE_SIZE) {
|
if (workerThread.getQueue().size()>DLModelWorker.MAX_QUEUE_SIZE) {
|
||||||
//we are not doing well - clear the buffer
|
//we are not doing well - clear the buffer
|
||||||
|
@ -62,9 +62,7 @@ public class SoundSpotClassifier extends StandardClassifierModel {
|
|||||||
this.soundSpotUI= new SoundSpotUI(this);
|
this.soundSpotUI= new SoundSpotUI(this);
|
||||||
//load the previous settings
|
//load the previous settings
|
||||||
PamSettingManager.getInstance().registerSettings(this);
|
PamSettingManager.getInstance().registerSettings(this);
|
||||||
|
|
||||||
// System.out.println("LOADED CLASS NAMES: currParams.classNames: " + soundSpotParmas.classNames);
|
// System.out.println("LOADED CLASS NAMES: currParams.classNames: " + soundSpotParmas.classNames);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -201,6 +201,7 @@ public abstract class StandardModelPane extends SettingsPane<StandardModelParams
|
|||||||
//speciesIDBox.setMaxWidth(100);
|
//speciesIDBox.setMaxWidth(100);
|
||||||
// speciesIDBox.setPrefWidth(100);
|
// speciesIDBox.setPrefWidth(100);
|
||||||
speciesIDBox.prefHeightProperty().bind(detectionSpinner.heightProperty());
|
speciesIDBox.prefHeightProperty().bind(detectionSpinner.heightProperty());
|
||||||
|
speciesIDBox.setMaxWidth(200); //otherwise expands too much if multiple classes selected
|
||||||
|
|
||||||
validator = new PamValidator();
|
validator = new PamValidator();
|
||||||
|
|
||||||
|
@ -177,7 +177,6 @@ public abstract class ArchiveModelClassifier extends StandardClassifierModel {
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean isModelType(URI uri) {
|
public boolean isModelType(URI uri) {
|
||||||
//Ketos is easy because there are not many files with a .ktpb extension.
|
|
||||||
return super.isModelExtensions(uri);
|
return super.isModelExtensions(uri);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -257,32 +257,5 @@ public class ArchiveModelWorker extends GenericModelWorker {
|
|||||||
return dlModel==null;
|
return dlModel==null;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Find the first file within a zip folder that matches a pattern.
|
|
||||||
* @param zipFile - uri to the zip file
|
|
||||||
* @param filePattern - the file pattern to match - the file must contain this string.
|
|
||||||
* @return null if no file found and the file pqth if the file is founf
|
|
||||||
* @throws ZipException
|
|
||||||
* @throws IOException
|
|
||||||
*/
|
|
||||||
static String getZipFilePath(File zipFileIn, String filePattern) throws ZipException, IOException {
|
|
||||||
|
|
||||||
try (ZipFile zipFile = new ZipFile(zipFileIn)) {
|
|
||||||
Enumeration<? extends ZipEntry> entries = zipFile.entries();
|
|
||||||
//this iterates through all files, including in sub folders.
|
|
||||||
while (entries.hasMoreElements()) {
|
|
||||||
ZipEntry entry = entries.nextElement();
|
|
||||||
// Check if entry is a directory
|
|
||||||
if (!entry.isDirectory()) {
|
|
||||||
//System.out.println(entry);
|
|
||||||
if (entry.getName().contains(filePattern)) {
|
|
||||||
return entry.getName();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
@ -6,6 +6,7 @@ import java.net.URI;
|
|||||||
import java.util.zip.ZipException;
|
import java.util.zip.ZipException;
|
||||||
import rawDeepLearningClassifier.DLControl;
|
import rawDeepLearningClassifier.DLControl;
|
||||||
import rawDeepLearningClassifier.DLStatus;
|
import rawDeepLearningClassifier.DLStatus;
|
||||||
|
import rawDeepLearningClassifier.DLZipUtils;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Loads a zip file and checks for a saved model alongside a pamguard settings file
|
* Loads a zip file and checks for a saved model alongside a pamguard settings file
|
||||||
@ -72,8 +73,8 @@ public class PamZipModelClassifier extends ArchiveModelClassifier {
|
|||||||
* @throws ZipException
|
* @throws ZipException
|
||||||
* @throws IOException
|
* @throws IOException
|
||||||
*/
|
*/
|
||||||
private static String getZipFilePath(URI zipUri, String filePattern) throws ZipException, IOException {
|
public static String getZipFilePath(URI zipUri, String filePattern) throws ZipException, IOException {
|
||||||
return ArchiveModelWorker.getZipFilePath(new File(zipUri), filePattern);
|
return DLZipUtils.getZipFilePath(new File(zipUri), filePattern);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -58,9 +58,14 @@ public class SimpleArchiveModel extends ArchiveModel {
|
|||||||
return "zip_model";
|
return "zip_model";
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gte the relative path of file within a zip folder.
|
||||||
|
* @param zipFolder
|
||||||
|
* @param fileEnd
|
||||||
|
* @return
|
||||||
|
* @throws IOException
|
||||||
|
*/
|
||||||
private static String getRelFilePath(String zipFolder, String fileEnd) throws IOException {
|
private static String getRelFilePath(String zipFolder, String fileEnd) throws IOException {
|
||||||
// find files matched `png` file extension from folder C:\\test
|
|
||||||
try (Stream<Path> walk = Files.walk(Paths.get(zipFolder))) {
|
try (Stream<Path> walk = Files.walk(Paths.get(zipFolder))) {
|
||||||
List<String> result = walk
|
List<String> result = walk
|
||||||
.filter(p -> !Files.isDirectory(p)) // not a directory
|
.filter(p -> !Files.isDirectory(p)) // not a directory
|
||||||
|
@ -0,0 +1,41 @@
|
|||||||
|
package rawDeepLearningClassifier.dlClassification.delphinID;
|
||||||
|
|
||||||
|
import rawDeepLearningClassifier.DLControl;
|
||||||
|
import rawDeepLearningClassifier.dlClassification.archiveModel.ArchiveModelClassifier;
|
||||||
|
import rawDeepLearningClassifier.dlClassification.archiveModel.ArchiveModelWorker;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A classifier based on the delphinID method which uses whistle contours to predict
|
||||||
|
* dolphin species.
|
||||||
|
*
|
||||||
|
* @author Jamie Macaulay
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
public class DelphinIDClassifier extends ArchiveModelClassifier{
|
||||||
|
|
||||||
|
private static final String MODEL_NAME = "delphinID";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Reference to the worker
|
||||||
|
*/
|
||||||
|
private DelphinIDWorker delphinIDWorker;
|
||||||
|
|
||||||
|
public DelphinIDClassifier(DLControl dlControl) {
|
||||||
|
super(dlControl);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String getName() {
|
||||||
|
return MODEL_NAME;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ArchiveModelWorker getModelWorker() {
|
||||||
|
if (delphinIDWorker==null) {
|
||||||
|
delphinIDWorker= new DelphinIDWorker();
|
||||||
|
}
|
||||||
|
return delphinIDWorker;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
}
|
@ -0,0 +1,12 @@
|
|||||||
|
package rawDeepLearningClassifier.dlClassification.delphinID;
|
||||||
|
|
||||||
|
import rawDeepLearningClassifier.dlClassification.animalSpot.StandardModelParams;
|
||||||
|
|
||||||
|
public class DelphinIDParams extends StandardModelParams {
|
||||||
|
|
||||||
|
/**
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
private static final long serialVersionUID = 1L;
|
||||||
|
|
||||||
|
}
|
@ -0,0 +1,10 @@
|
|||||||
|
package rawDeepLearningClassifier.dlClassification.delphinID;
|
||||||
|
|
||||||
|
import rawDeepLearningClassifier.dlClassification.archiveModel.ArchiveModelWorker;
|
||||||
|
|
||||||
|
public class DelphinIDWorker extends ArchiveModelWorker{
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
}
|
@ -39,6 +39,7 @@ import rawDeepLearningClassifier.DLControl;
|
|||||||
import rawDeepLearningClassifier.DLStatus;
|
import rawDeepLearningClassifier.DLStatus;
|
||||||
import rawDeepLearningClassifier.RawDLParams;
|
import rawDeepLearningClassifier.RawDLParams;
|
||||||
import warnings.PamWarning;
|
import warnings.PamWarning;
|
||||||
|
import whistlesAndMoans.ConnectedRegionDataUnit;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The settings pane.
|
* The settings pane.
|
||||||
@ -159,6 +160,7 @@ public class DLSettingsPane extends SettingsPane<RawDLParams>{
|
|||||||
sourcePane = new GroupedSourcePaneFX("Raw Sound Data", RawDataUnit.class, true, false, true);
|
sourcePane = new GroupedSourcePaneFX("Raw Sound Data", RawDataUnit.class, true, false, true);
|
||||||
sourcePane.addSourceType(ClickDetection.class, false);
|
sourcePane.addSourceType(ClickDetection.class, false);
|
||||||
sourcePane.addSourceType(ClipDataUnit.class, false);
|
sourcePane.addSourceType(ClipDataUnit.class, false);
|
||||||
|
sourcePane.addSourceType(ConnectedRegionDataUnit.class, false);
|
||||||
|
|
||||||
|
|
||||||
vBox.getChildren().add(sourcePane);
|
vBox.getChildren().add(sourcePane);
|
||||||
|
@ -0,0 +1,20 @@
|
|||||||
|
package rawDeepLearningClassifier.segmenter;
|
||||||
|
|
||||||
|
import Localiser.detectionGroupLocaliser.GroupDetection;
|
||||||
|
import PamguardMVC.PamDataUnit;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A group of detection which are within a particular segment. This is used to pass detection groups straight to
|
||||||
|
*
|
||||||
|
*
|
||||||
|
* * @author Jamie Macaulay
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
public class SegmenterDetectionGroup extends GroupDetection<PamDataUnit> {
|
||||||
|
|
||||||
|
public SegmenterDetectionGroup(long timeMilliseconds, int channelBitmap, long startSample, long duration) {
|
||||||
|
super(timeMilliseconds, channelBitmap, startSample, duration);
|
||||||
|
// TODO Auto-generated constructor stub
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
@ -0,0 +1,18 @@
|
|||||||
|
package rawDeepLearningClassifier.segmenter;
|
||||||
|
|
||||||
|
import PamguardMVC.PamDataBlock;
|
||||||
|
import PamguardMVC.PamProcess;
|
||||||
|
|
||||||
|
/**
|
||||||
|
*
|
||||||
|
* @author Jamie Macaulay
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
public class SegmenterGroupDataBlock extends PamDataBlock<SegmenterDetectionGroup> {
|
||||||
|
|
||||||
|
public SegmenterGroupDataBlock(String dataName, PamProcess parentProcess, int channelMap) {
|
||||||
|
super(SegmenterDetectionGroup.class, dataName, parentProcess, channelMap);
|
||||||
|
// TODO Auto-generated constructor stub
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
@ -21,6 +21,7 @@ import PamguardMVC.debug.Debug;
|
|||||||
import clickDetector.ClickDetection;
|
import clickDetector.ClickDetection;
|
||||||
import clipgenerator.ClipDataUnit;
|
import clipgenerator.ClipDataUnit;
|
||||||
import rawDeepLearningClassifier.DLControl;
|
import rawDeepLearningClassifier.DLControl;
|
||||||
|
import whistlesAndMoans.ConnectedRegionDataUnit;
|
||||||
import PamUtils.PamCalendar;
|
import PamUtils.PamCalendar;
|
||||||
|
|
||||||
|
|
||||||
@ -60,6 +61,11 @@ public class SegmenterProcess extends PamProcess {
|
|||||||
PamSymbol defaultSymbol = new PamSymbol(PamSymbolType.SYMBOL_DIAMOND, 10, 12, false,
|
PamSymbol defaultSymbol = new PamSymbol(PamSymbolType.SYMBOL_DIAMOND, 10, 12, false,
|
||||||
Color.CYAN, Color.CYAN);
|
Color.CYAN, Color.CYAN);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Holds groups of data units which are within a defined segment.
|
||||||
|
*/
|
||||||
|
private SegmenterGroupDataBlock segmenterGroupDataBlock;
|
||||||
|
|
||||||
|
|
||||||
public SegmenterProcess(DLControl pamControlledUnit, PamDataBlock parentDataBlock) {
|
public SegmenterProcess(DLControl pamControlledUnit, PamDataBlock parentDataBlock) {
|
||||||
super(pamControlledUnit, parentDataBlock);
|
super(pamControlledUnit, parentDataBlock);
|
||||||
@ -76,7 +82,12 @@ public class SegmenterProcess extends PamProcess {
|
|||||||
|
|
||||||
segmenterDataBlock = new SegmenterDataBlock("Segmented Raw Data", this,
|
segmenterDataBlock = new SegmenterDataBlock("Segmented Raw Data", this,
|
||||||
dlControl.getDLParams().groupedSourceParams.getChanOrSeqBitmap());
|
dlControl.getDLParams().groupedSourceParams.getChanOrSeqBitmap());
|
||||||
|
|
||||||
|
segmenterGroupDataBlock = new SegmenterGroupDataBlock("Segmented Raw Data", this,
|
||||||
|
dlControl.getDLParams().groupedSourceParams.getChanOrSeqBitmap());
|
||||||
|
|
||||||
addOutputDataBlock(segmenterDataBlock);
|
addOutputDataBlock(segmenterDataBlock);
|
||||||
|
addOutputDataBlock(segmenterGroupDataBlock);
|
||||||
|
|
||||||
setProcessName("Segmenter");
|
setProcessName("Segmenter");
|
||||||
|
|
||||||
@ -208,11 +219,25 @@ public class SegmenterProcess extends PamProcess {
|
|||||||
else if (pamRawData instanceof ClipDataUnit) {
|
else if (pamRawData instanceof ClipDataUnit) {
|
||||||
newClipData(pamRawData);
|
newClipData(pamRawData);
|
||||||
}
|
}
|
||||||
|
else if (pamRawData instanceof ConnectedRegionDataUnit) {
|
||||||
|
newWhistleData(pamRawData);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A new whistle data unit.
|
||||||
|
* @param dataUnit - the whistle data unit.
|
||||||
|
*/
|
||||||
|
private void newWhistleData(PamDataUnit dataUnit) {
|
||||||
|
ConnectedRegionDataUnit rawDataUnit = (ConnectedRegionDataUnit) dataUnit;
|
||||||
|
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A new raw data unit.
|
* A new raw data unit.
|
||||||
* @param obs - the PAM observable
|
* @param obs - the PAM observable
|
||||||
@ -344,7 +369,7 @@ public class SegmenterProcess extends PamProcess {
|
|||||||
*
|
*
|
||||||
* @param unit - the data unit which contains relevant metadata on time
|
* @param unit - the data unit which contains relevant metadata on time
|
||||||
* etc.
|
* etc.
|
||||||
* @param rawDataChunk - the sound chunk to segment extracted form the data
|
* @param rawDataChunk - the sound chunk to segment extracted from the data
|
||||||
* unit.
|
* unit.
|
||||||
* @param iChan - the channel that is being segmented
|
* @param iChan - the channel that is being segmented
|
||||||
* @param forceSave - make sure that all data is passed into the buffers and
|
* @param forceSave - make sure that all data is passed into the buffers and
|
||||||
|
Loading…
Reference in New Issue
Block a user