mirror of
https://github.com/PAMGuard/PAMGuard.git
synced 2024-10-29 23:30:44 +00:00
Ishmael sort out and improve inverse FFT's (#159)
* Ishmael Tethys output Added necessaries for Tethys output from Ishmael detectors. Also found a pretty major bug in the spectrogram correlation detector, where for each block of data it was only testing the first sample of each block, not all samples, for being over threshold. * Speed and algorithm improvements to Ish matched filter Seems to be errors in correlation, didn't support multiple channels and also used very old and slow FFT, so working to fix all three issues. * Updated matched filter Updated and working Matched filter, though still some thinking to do about how the scaling of this works, since currently scaled by the template, so whole thing is dependent on the input. Need to think of a better way to do this. * Update match filt normalisation Normalisation now correctly using both the template and the signal for normalisation so that it will be data amplitude independent. * invFFT improvements Use faster inverse FFT of real data in correlation / delay functions. * Improve ifft's in other modules to improve TDOA speeds * Sorting mess of spec plugin graphics Have got the Ishmael ones scrolling, but when scrolling, there is an offset in the data due to the lag of the correlation functions. Quite hard to fix with available timing data * Improve ish spectrogram plugin Sorted scaling and scrollling problems.
This commit is contained in:
parent
274997b6c4
commit
3c27717479
1
.gitignore
vendored
1
.gitignore
vendored
@ -116,3 +116,4 @@ settings.xml
|
||||
.classpath
|
||||
.settings/org.eclipse.jdt.core.prefs
|
||||
.classpath
|
||||
.project
|
||||
|
2
.project
2
.project
@ -1,6 +1,6 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<projectDescription>
|
||||
<name>PAMGuard</name>
|
||||
<name>PAMGuard_DG</name>
|
||||
<comment></comment>
|
||||
<projects>
|
||||
</projects>
|
||||
|
@ -22,5 +22,5 @@ org.eclipse.jdt.core.compiler.problem.enablePreviewFeatures=disabled
|
||||
org.eclipse.jdt.core.compiler.problem.enumIdentifier=error
|
||||
org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning
|
||||
org.eclipse.jdt.core.compiler.problem.reportPreviewFeatures=warning
|
||||
org.eclipse.jdt.core.compiler.release=enabled
|
||||
org.eclipse.jdt.core.compiler.release=disabled
|
||||
org.eclipse.jdt.core.compiler.source=19
|
||||
|
39
src/IshmaelDetector/IshDataBlock.java
Normal file
39
src/IshmaelDetector/IshDataBlock.java
Normal file
@ -0,0 +1,39 @@
|
||||
package IshmaelDetector;
|
||||
|
||||
import IshmaelDetector.tethys.IshmaelSpeciesManager;
|
||||
import IshmaelDetector.tethys.IshmaelTethysProvider;
|
||||
import PamguardMVC.PamDataBlock;
|
||||
import PamguardMVC.PamProcess;
|
||||
import tethys.TethysControl;
|
||||
import tethys.pamdata.TethysDataProvider;
|
||||
import tethys.species.DataBlockSpeciesManager;
|
||||
|
||||
public class IshDataBlock extends PamDataBlock<IshDetection> {
|
||||
|
||||
private IshmaelTethysProvider ishmaelTethysProvider;
|
||||
|
||||
private IshmaelSpeciesManager ishmaelSpeciesManager;
|
||||
|
||||
public IshDataBlock(String dataName, PamProcess parentProcess, int channelMap) {
|
||||
super(IshDetection.class, dataName, parentProcess, channelMap);
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public TethysDataProvider getTethysDataProvider(TethysControl tethysControl) {
|
||||
if (ishmaelTethysProvider == null) {
|
||||
ishmaelTethysProvider = new IshmaelTethysProvider(tethysControl, this);
|
||||
}
|
||||
return ishmaelTethysProvider;
|
||||
}
|
||||
|
||||
@Override
|
||||
public DataBlockSpeciesManager<IshDetection> getDatablockSpeciesManager() {
|
||||
if (ishmaelSpeciesManager == null) {
|
||||
ishmaelSpeciesManager = new IshmaelSpeciesManager(this);
|
||||
}
|
||||
return ishmaelSpeciesManager;
|
||||
}
|
||||
|
||||
|
||||
}
|
@ -120,10 +120,16 @@ public abstract class IshDetControl extends PamControlledUnit implements PamSett
|
||||
* then they would show up in the Data Model.
|
||||
*/
|
||||
public void prepareNonDetProcesses() {
|
||||
if (ishDetGraphics != null) {
|
||||
ishDetGraphics.prepareForRun();
|
||||
}
|
||||
if (ishPeakProcess != null) {
|
||||
ishPeakProcess.prepareForRun();
|
||||
}
|
||||
if (ishDetSave != null) {
|
||||
ishDetSave.prepareForRun();
|
||||
}
|
||||
}
|
||||
|
||||
public JMenuItem createDetectionMenu(Frame parentFrame, String menuString) {
|
||||
JMenuItem menuItem = new JMenuItem(menuString);
|
||||
|
@ -58,9 +58,12 @@ public abstract class IshDetFnProcess extends PamProcess
|
||||
outputDataBlock = new PamDataBlock(
|
||||
IshDetFnDataUnit.class, getLongName(), this, 1 << channel);
|
||||
|
||||
|
||||
ishmealBinaryDataSource = new IshFnBinarySource(this, outputDataBlock, "Ishmael Fn");
|
||||
outputDataBlock.setBinaryDataSource(ishmealBinaryDataSource);
|
||||
/*
|
||||
* Remove binary output from the detection functions. Was useful in debugging
|
||||
* but the volume of the MatchFilter output is way to big.
|
||||
*/
|
||||
// ishmealBinaryDataSource = new IshFnBinarySource(this, outputDataBlock, "Ishmael Fn");
|
||||
// outputDataBlock.setBinaryDataSource(ishmealBinaryDataSource);
|
||||
|
||||
// outputDataBlock = new RecyclingDataBlock<IshDetFnDataUnit>(
|
||||
// IshDetFnDataUnit.class, getLongName(), this, 1 << channel);
|
||||
|
@ -2,6 +2,7 @@ package IshmaelDetector;
|
||||
|
||||
import java.awt.Color;
|
||||
import java.awt.Graphics2D;
|
||||
import java.awt.Point;
|
||||
import java.awt.image.BufferedImage;
|
||||
import java.io.Serializable;
|
||||
|
||||
@ -12,6 +13,8 @@ import Layout.DisplayPanel;
|
||||
import Layout.DisplayPanelContainer;
|
||||
import Layout.DisplayPanelProvider;
|
||||
import Layout.DisplayProviderList;
|
||||
import Layout.PamAxis;
|
||||
import Layout.RepeatedAxis;
|
||||
import PamController.PamControlledUnitSettings;
|
||||
import PamController.PamController;
|
||||
import PamController.PamSettingManager;
|
||||
@ -19,6 +22,8 @@ import PamController.PamSettings;
|
||||
import PamUtils.PamUtils;
|
||||
import PamView.PamColors;
|
||||
import PamView.PamColors.PamColor;
|
||||
import PamView.PamSymbol;
|
||||
import PamView.PamSymbolType;
|
||||
import PamguardMVC.PamConstants;
|
||||
import PamguardMVC.PamDataBlock;
|
||||
import PamguardMVC.PamDataUnit;
|
||||
@ -48,7 +53,7 @@ public class IshDetGraphics implements DisplayPanelProvider, PamSettings {
|
||||
/**
|
||||
* True for the very first line draw
|
||||
*/
|
||||
private boolean firstTime;
|
||||
private boolean firstTime = true;
|
||||
|
||||
/**
|
||||
* The active channels.
|
||||
@ -69,7 +74,7 @@ public class IshDetGraphics implements DisplayPanelProvider, PamSettings {
|
||||
/**
|
||||
* Display parameters for the Ishamel
|
||||
*/
|
||||
IshDisplayParams ishDisplayParams = new IshDisplayParams();
|
||||
private IshDisplayParams ishDisplayParams = new IshDisplayParams();
|
||||
|
||||
public IshDetGraphics(IshDetControl ishDetControl, PamDataBlock ishDetDataBlock){
|
||||
super();
|
||||
@ -116,6 +121,12 @@ public class IshDetGraphics implements DisplayPanelProvider, PamSettings {
|
||||
*/
|
||||
private int lastImageHeight;
|
||||
|
||||
PamAxis westAxis, eastAxis;
|
||||
|
||||
private long lastSample;
|
||||
|
||||
private PamSymbol symbol = new PamSymbol(PamSymbolType.SYMBOL_CROSS2, 6, 6, true, Color.GREEN, Color.GREEN);
|
||||
|
||||
/**
|
||||
* Holds some display infor for each channel.
|
||||
* @author
|
||||
@ -127,12 +138,14 @@ public class IshDetGraphics implements DisplayPanelProvider, PamSettings {
|
||||
int yLo, yHi; //Y-bounds of my display region
|
||||
int lastX, lastY, lastThreshy; //where the last drawn line ended, in pixels
|
||||
float yScale;
|
||||
public IshDetFnDataUnit lastDataUnit;
|
||||
|
||||
public PerChannelInfo(int yLo, int yHi, float yScale) {
|
||||
this.yLo = yLo;
|
||||
this.yHi = yHi;
|
||||
this.yScale = yScale;
|
||||
lastX = 0;
|
||||
lastX = Integer.MIN_VALUE;
|
||||
lastThreshy = Integer.MIN_VALUE;
|
||||
xAdvance = -2; //special case meaning "skip first 2 values"
|
||||
lastY = yHi;
|
||||
}
|
||||
@ -173,6 +186,9 @@ public class IshDetGraphics implements DisplayPanelProvider, PamSettings {
|
||||
if (ishDetDataBlock != null) {
|
||||
ishDetDataBlock.addObserver(this); //call my update() when unit added
|
||||
}
|
||||
westAxis = new PamAxis(0, 1, 0, 1, 0, 1, PamAxis.ABOVE_LEFT, "", PamAxis.LABEL_NEAR_CENTRE, "%3.2f");
|
||||
westAxis.setCrampLabels(true);
|
||||
makeChannelInfo();
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -225,6 +241,16 @@ public class IshDetGraphics implements DisplayPanelProvider, PamSettings {
|
||||
*/
|
||||
private float yScale = 1;
|
||||
|
||||
private double currentXPixel = 0;
|
||||
|
||||
private double scrollRemainder;
|
||||
|
||||
private long currentImageTime = 0;
|
||||
|
||||
private double currentImageX;
|
||||
|
||||
private boolean lastWrap = false;
|
||||
|
||||
@Override
|
||||
public void masterClockUpdate(long milliSeconds, long sampleNumber) {
|
||||
// TODO Auto-generated method stub
|
||||
@ -238,29 +264,227 @@ public class IshDetGraphics implements DisplayPanelProvider, PamSettings {
|
||||
|
||||
@Override
|
||||
public void addData(PamObservable o, PamDataUnit dataUnit1) {
|
||||
IshDetFnDataUnit dataUnit = (IshDetFnDataUnit)dataUnit1;
|
||||
IshDetFnDataUnit ishDataUnit = (IshDetFnDataUnit)dataUnit1;
|
||||
/*
|
||||
* Need a serious rethink to work out how to stabilise this display,
|
||||
* particularly for scrolling with multiple channels.
|
||||
* Key parameters are display time (millis) and display X from
|
||||
* spectrogram, then need to a) work oout how much to scroll the
|
||||
* underlying image and b) work out the x limits for the new data
|
||||
* based on the scroll time.
|
||||
*/
|
||||
double[][] ishDetectorData = ishDataUnit.getDetData();
|
||||
if (ishDetectorData == null || ishDetectorData.length == 0) {
|
||||
return;
|
||||
}
|
||||
int ishTypes = ishDetectorData.length;
|
||||
int ishSamples = ishDetectorData[0].length;
|
||||
|
||||
checkYScale(ishDetectorData[0]);
|
||||
boolean wrap = getDisplayPanelContainer().wrapDisplay();
|
||||
if (wrap != lastWrap) {
|
||||
clearImage();
|
||||
lastWrap = wrap;
|
||||
}
|
||||
|
||||
if (ishDataUnit.getStartSample() < lastSample) {
|
||||
clearImage();
|
||||
firstTime = true;
|
||||
}
|
||||
lastSample = ishDataUnit.getStartSample();
|
||||
|
||||
long spectrogramMillis = 0;
|
||||
double spectrogramX = 0;
|
||||
double pixelsPerMilli = 0;
|
||||
synchronized (displayPanelContainer) {
|
||||
spectrogramX = displayPanelContainer.getCurrentXPixel();
|
||||
spectrogramMillis = displayPanelContainer.getCurrentXTime();
|
||||
pixelsPerMilli = getInnerWidth()/ displayPanelContainer.getXDuration();
|
||||
}
|
||||
|
||||
// need to know if this is the first channel in the datablock.
|
||||
int firstBlockChan = PamUtils.getLowestChannel(ishDetDataBlock.getChannelMap());
|
||||
int firstUnitChan = PamUtils.getLowestChannel(ishDataUnit.getChannelBitmap());
|
||||
boolean firstChan = (firstBlockChan == firstUnitChan);
|
||||
|
||||
PerChannelInfo chanInfo = perChannelInfo[firstUnitChan];
|
||||
|
||||
|
||||
BufferedImage image = getDisplayImage();
|
||||
if (image == null) return;
|
||||
// graphics handle to the image.
|
||||
Graphics2D g2d = (Graphics2D) image.getGraphics();
|
||||
int imageWidth = image.getWidth();
|
||||
int imageHeight = image.getHeight();
|
||||
|
||||
int threshY = (int) westAxis.getPosition(ishDetControl.ishDetParams.thresh);
|
||||
|
||||
/*
|
||||
* The spectrogram x and t are changing fast and asynchronously, so when the
|
||||
* first channel arrives, we need to work out a t and a x for the image, then stick
|
||||
* with them, possibly through multiple calls for multiple channels.
|
||||
*/
|
||||
if (firstChan) {
|
||||
if (!wrap) {
|
||||
/*
|
||||
* need to scroll. If currentImageTime is 0, then the display has just
|
||||
* been cleared, so it doesn't really matter where we start. Otherwise
|
||||
* we'll have to shift left to realign the times.
|
||||
*/
|
||||
if (currentImageTime == 0) {
|
||||
currentImageTime = spectrogramMillis;
|
||||
currentImageX = imageWidth;
|
||||
}
|
||||
else {
|
||||
double scrollPix = (spectrogramMillis - currentImageTime) * pixelsPerMilli;
|
||||
int intScrollPix = (int) Math.round(scrollPix);
|
||||
if (intScrollPix > 0) {
|
||||
g2d.drawImage(image, 0, 0, imageWidth-intScrollPix, imageHeight, intScrollPix, 0, imageWidth, imageHeight, null);
|
||||
// and clear the end bit (important!)
|
||||
g2d.setColor(plotBackground);
|
||||
clearImage(imageWidth-intScrollPix, imageWidth);
|
||||
}
|
||||
currentImageTime = (long) (spectrogramMillis-(scrollPix-intScrollPix)/pixelsPerMilli);
|
||||
currentImageX = imageWidth;
|
||||
for (int i = 0; i < perChannelInfo.length; i++) {
|
||||
if (perChannelInfo[i].lastX != Integer.MIN_VALUE) {
|
||||
perChannelInfo[i].lastX -= intScrollPix;
|
||||
}
|
||||
}
|
||||
g2d.setColor(Color.RED);
|
||||
g2d.drawLine(imageWidth-intScrollPix, threshY, imageWidth, threshY);
|
||||
}
|
||||
}
|
||||
else {
|
||||
currentImageTime = spectrogramMillis;
|
||||
currentImageX = spectrogramX;
|
||||
}
|
||||
}
|
||||
/*
|
||||
* should now have an imageX and and image time to scale and draw everything off.
|
||||
* note that everything above was just calculated for the display, and the actual data
|
||||
* may have happened a while ago, so recalclate the positions of the start and end of
|
||||
* the actual data.
|
||||
*/
|
||||
int dataStartX = (int) (currentImageX + (ishDataUnit.getTimeMilliseconds()-currentImageTime)*pixelsPerMilli);
|
||||
int dataEndX = (int) (currentImageX + (ishDataUnit.getEndTimeInMilliseconds()-currentImageTime)*pixelsPerMilli);
|
||||
if (wrap) {
|
||||
clearImage(dataStartX, dataEndX, true);
|
||||
g2d.setColor(Color.RED);
|
||||
g2d.drawLine(dataStartX, threshY, dataEndX, threshY);
|
||||
}
|
||||
double xScale = (double) (dataEndX-dataStartX)/(double) ishSamples;
|
||||
g2d.setColor(PamColors.getInstance().getChannelColor(firstUnitChan));
|
||||
for (int i = 0; i < ishSamples; i++) {
|
||||
int x = (int) (dataStartX + i*xScale)-1;
|
||||
if (wrap && x > imageWidth) {
|
||||
x -= imageWidth;
|
||||
chanInfo.lastX -= imageWidth;
|
||||
}
|
||||
int y = (int) westAxis.getPosition(ishDetectorData[0][i]);
|
||||
if (chanInfo.lastX > -imageWidth && x>=chanInfo.lastX-2) {
|
||||
g2d.drawLine(chanInfo.lastX, chanInfo.lastY, x, y);
|
||||
}
|
||||
chanInfo.lastX = x;
|
||||
chanInfo.lastY = y;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private void checkYScale(double[] ishData) {
|
||||
double yScale = westAxis.getMaxVal(); // current value
|
||||
if (ishDisplayParams.autoScale) {
|
||||
// do something clever ...
|
||||
double max = getMax(ishData); // max of the data
|
||||
// and always make sure it's > the threshold value.
|
||||
max = Math.max(max, ishDetControl.ishDetParams.thresh * 1.1);
|
||||
max = PamUtils.roundNumberUpP(max, 2);
|
||||
if (max > yScale) {
|
||||
yScale = max;
|
||||
}
|
||||
}
|
||||
else {
|
||||
yScale = ishDisplayParams.verticalScaleFactor;
|
||||
}
|
||||
|
||||
if (westAxis.getMaxVal() != yScale) {
|
||||
westAxis.setMaxVal(yScale);
|
||||
displayPanelContainer.panelNotify(DisplayPanelContainer.DRAW_BORDER);
|
||||
}
|
||||
}
|
||||
|
||||
private double getMax(double[] data) {
|
||||
double d = 0;
|
||||
for (int i = 0; i < data.length; i++) {
|
||||
d = Math.max(d, data[i]);
|
||||
}
|
||||
return d;
|
||||
}
|
||||
|
||||
public void addDataOld(PamObservable o, PamDataUnit dataUnit1) {
|
||||
IshDetFnDataUnit ishDataUnit = (IshDetFnDataUnit)dataUnit1;
|
||||
//This is called whenever new data arrives from my parent.
|
||||
//PamDataBlock inputDataBlock = (PamDataBlock)o;
|
||||
|
||||
double[][] ishDetectorData = dataUnit.getDetData();
|
||||
double[][] ishDetectorData = ishDataUnit.getDetData();
|
||||
if (ishDetectorData == null || ishDetectorData.length == 0) {
|
||||
return;
|
||||
}
|
||||
int ishTypes = ishDetectorData.length;
|
||||
int ishSamples = ishDetectorData[0].length;
|
||||
|
||||
if (ishDataUnit.getStartSample() < lastSample) {
|
||||
clearImage();
|
||||
firstTime = true;
|
||||
}
|
||||
lastSample = ishDataUnit.getStartSample();
|
||||
/*
|
||||
* this is the number of samples at the original sample rate, some
|
||||
* detectors put out fewer levels than original samples, e.g. one per FFT.
|
||||
*/
|
||||
Long ishDuration = ishDataUnit.getSampleDuration();
|
||||
if (ishDuration == null) {
|
||||
return;
|
||||
}
|
||||
int ishScale = ishDuration.intValue() / ishSamples;
|
||||
|
||||
// need to know if this is the first channel in the datablock.
|
||||
int firstBlockChan = PamUtils.getLowestChannel(ishDetDataBlock.getChannelMap());
|
||||
int firstUnitChan = PamUtils.getLowestChannel(ishDataUnit.getChannelBitmap());
|
||||
boolean firstChan = (firstBlockChan == firstUnitChan);
|
||||
|
||||
//if (ishDetectorData.length != 1)
|
||||
//ishDetectorData[0] = 0;
|
||||
|
||||
//int totalChannels = PamUtils.getNumChannels(dataBlock.getChannelMap());
|
||||
// int chanIx = PamUtils.getSingleChannel(dataUnit.getChannelBitmap());
|
||||
int chanIx = PamUtils.getSingleChannel(dataUnit.getSequenceBitmap());
|
||||
|
||||
int chanIx = PamUtils.getSingleChannel(ishDataUnit.getSequenceBitmap());
|
||||
BufferedImage image = getDisplayImage();
|
||||
|
||||
if (image == null) return;
|
||||
|
||||
// graphics handle to the image.
|
||||
Graphics2D g2d = (Graphics2D) image.getGraphics();
|
||||
boolean wrap = getDisplayPanelContainer().wrapDisplay();
|
||||
|
||||
int len = ishDetectorData[0].length; //length of the detector output
|
||||
int len2 = ishDetectorData.length; //number of data streams;
|
||||
//if the length is greater than 1 then the actual detector energy is at index 2,
|
||||
//otherwise it is at index 1 (the trigger data is same as the d data so point in replicating data)
|
||||
int detIndex = len2 > 1 ? 2 : 0;
|
||||
|
||||
// need to work out from the dataUnits start time in
|
||||
// milliseconds where it should start drawing ...
|
||||
double containerDrawPix = 0;
|
||||
long containerTime = 0;
|
||||
double pixelsPerMilli = 0;
|
||||
synchronized (displayPanelContainer) {
|
||||
containerDrawPix = displayPanelContainer.getCurrentXPixel();
|
||||
containerTime = displayPanelContainer.getCurrentXTime();
|
||||
pixelsPerMilli = getInnerWidth()/ displayPanelContainer.getXDuration();
|
||||
}
|
||||
// System.out.println(String.format("DR Ch %d t = %d", dataUnit.getChannelBitmap(), dataUnit.timeMilliseconds));
|
||||
int currentPixel = (int) containerDrawPix;
|
||||
|
||||
//the height scale values.
|
||||
float yScale = this.yScale;
|
||||
@ -272,23 +496,27 @@ public class IshDetGraphics implements DisplayPanelProvider, PamSettings {
|
||||
}
|
||||
}
|
||||
if (autoCount==1 || autoCount%autoscaleCount==0) {
|
||||
// if (yScale<autoScaleMax) {
|
||||
// yScale = (float) (1.2*yScale);
|
||||
// }
|
||||
// else {
|
||||
// //the maximum slowly decays.
|
||||
// yScale=(float) (0.8*yScale);
|
||||
// }
|
||||
// if (yScale<autoScaleMax) {
|
||||
// yScale = (float) (1.2*yScale);
|
||||
// }
|
||||
// else {
|
||||
// //the maximum slowly decays.
|
||||
// yScale=(float) (0.8*yScale);
|
||||
// }
|
||||
yScale=(float) (1.05*autoScaleMax);
|
||||
autoScaleMax = -Double.MAX_VALUE; //reset
|
||||
}
|
||||
autoCount++;
|
||||
// System.out.println("Auto vert factor: " + yScale + " " + autoScaleMax + " autoCount: " +autoCount);
|
||||
// System.out.println("Auto vert factor: " + yScale + " " + autoScaleMax + " autoCount: " +autoCount);
|
||||
}
|
||||
//if a static vertical scale factor then just use that.
|
||||
else {
|
||||
yScale = (float) ishDisplayParams.verticalScaleFactor;
|
||||
}
|
||||
if (westAxis.getMaxVal() != yScale) {
|
||||
westAxis.setMaxVal(yScale);
|
||||
displayPanelContainer.panelNotify(DisplayPanelContainer.DRAW_BORDER);
|
||||
}
|
||||
this.yScale=yScale;
|
||||
|
||||
//check for resize;
|
||||
@ -299,59 +527,153 @@ public class IshDetGraphics implements DisplayPanelProvider, PamSettings {
|
||||
|
||||
if (firstTime) {
|
||||
clearLastX = 0;
|
||||
// chan.lastDataUnit = null;
|
||||
activeChannels = ishDetControl.getActiveChannels();
|
||||
int nChansActive = PamUtils.getNumChannels(activeChannels); //no need to recalc this every iteration.
|
||||
|
||||
int chanN = 0;
|
||||
for (int i = 0; i < perChannelInfo.length; i++) {
|
||||
if ((activeChannels & (1 << i)) != 0) {
|
||||
// if ((activeChannels & (1 << i)) != 0) {
|
||||
int yLo = Math.round((float) chanN / (float) nChansActive * lastImageHeight);
|
||||
int yHi = Math.round((float)(chanN + 1) / (float) nChansActive * lastImageHeight);
|
||||
perChannelInfo[i] = new PerChannelInfo(yLo, yHi, yScale);
|
||||
chanN++;
|
||||
}
|
||||
else perChannelInfo[i]=null; //need to reset to nul//
|
||||
// }
|
||||
// else perChannelInfo[i]=null; //need to reset to nul//
|
||||
//System.out.println("nChansActive: " + nChansActive +" chanN: " + chanN + " yLo: "+ yLo + " yHi: " + yHi);
|
||||
}
|
||||
firstTime = false;
|
||||
}
|
||||
|
||||
//Figure out which channel this data is for.
|
||||
PerChannelInfo chan = perChannelInfo[chanIx];
|
||||
if (chan == null) return;
|
||||
|
||||
/*
|
||||
* Sort out x position and scaling
|
||||
*/
|
||||
double drawStartPix = containerDrawPix - (containerTime-ishDataUnit.getTimeMilliseconds()) * pixelsPerMilli;
|
||||
if (!wrap) {
|
||||
drawStartPix = currentXPixel;
|
||||
}
|
||||
/*
|
||||
* Check for wrap and either shuffle the image along, or clear the end of it.
|
||||
*/
|
||||
//calculate the current x (time) position.
|
||||
//Figure out which channel this data is for.
|
||||
int xEnd = (int) displayPanelContainer.getCurrentXPixel();
|
||||
int xStart = chan.lastX;
|
||||
|
||||
/**
|
||||
xEnd = (int) (xStart + ishDataUnit.getDurationInMilliseconds()*pixelsPerMilli);
|
||||
int firstPixel = (int) Math.floor(drawStartPix); /**
|
||||
* OK, there is a a problem here...the position of the cursor does not
|
||||
* necessarily correspond to the current time of the data unit which was
|
||||
* previously assumed. So take the time of the cursor and count pixels back.
|
||||
*/
|
||||
double pixelsback = ((double) (displayPanelContainer.getCurrentXTime() - dataUnit1.getTimeMilliseconds()))/displayPanelContainer.getXDuration();
|
||||
pixelsback = pixelsback*getInnerWidth();
|
||||
if (firstPixel > image.getWidth()) {
|
||||
firstPixel = (int) ((dataUnit1.getTimeMilliseconds()-displayPanelContainer.getCurrentXTime())*pixelsPerMilli);
|
||||
}
|
||||
// double pixelsback = ((double) (displayPanelContainer.getCurrentXTime() - dataUnit1.getTimeMilliseconds()))/displayPanelContainer.getXDuration();
|
||||
// pixelsback = pixelsback*getInnerWidth();
|
||||
|
||||
long currentXTime = getDisplayPanelContainer().getCurrentXTime();
|
||||
/*
|
||||
* Drawng off the currentXtime is a disaster, since it's out of synch with the incoming
|
||||
* correlation data.
|
||||
*/
|
||||
// currentXTime = ishDataUnit.getEndTimeInMilliseconds();
|
||||
//convert to pixels.
|
||||
xEnd= (int) (xEnd - pixelsback);
|
||||
// xEnd= (int) (xEnd - pixelsback);
|
||||
if (!wrap && firstChan) {
|
||||
/*
|
||||
* If we just use the number of samples in the data, this
|
||||
* Doesn't work for FFT based data because of the FFT overlap.
|
||||
* So we need to work out the millis difference between this unit
|
||||
* and the preceding one.
|
||||
* It's actually a bit more complex, for the correlation detectors there is quite a lag
|
||||
* in the data, e.g. the first MF data may say it has 2095 samples, but the actual data
|
||||
* started 4096 samples before now. This means we never ever have the latest data (the length
|
||||
* of the kernel), so need to move everything left by that ammount. Can we do it off the
|
||||
* millis time of the data unit. Need to work out two x values, for start and end of
|
||||
* each block.
|
||||
*/
|
||||
// this is the absolute number of pixels we're going to scroll by.
|
||||
double scrollPixs = ((double) (ishSamples * 1000. * ishScale) / sampleRate * pixelsPerMilli);
|
||||
if (chan.lastDataUnit != null && chan.lastDataUnit.getTimeMilliseconds() < ishDataUnit.getTimeMilliseconds()) {
|
||||
scrollPixs = (ishDataUnit.getTimeMilliseconds()-chan.lastDataUnit.getTimeMilliseconds())*pixelsPerMilli;
|
||||
// scrollPixs = (currentXTime-ishDataUnit.getEndTimeInMilliseconds())*pixelsPerMilli;
|
||||
}
|
||||
int w = image.getWidth();
|
||||
// but we may be starting earlier, due to lag in correlation detectors.
|
||||
// double lag = currentXTime-ishDataUnit.getEndTimeInMilliseconds();
|
||||
double scrollXEnd = w;// - (lag*pixelsPerMilli);
|
||||
// double scrollXStart = w - scrollPixs; //- ((currentXTime-ishDataUnit.getTimeMilliseconds())*pixelsPerMilli);
|
||||
// scrollPixs = scrollXEnd-scrollXStart;
|
||||
int intScrollPixs = (int) scrollPixs;
|
||||
double scrollXStart = scrollXEnd-intScrollPixs;
|
||||
scrollRemainder += (scrollPixs-intScrollPixs);
|
||||
// deal with itty bitty non integer bits.
|
||||
if (scrollRemainder >= 1) {
|
||||
scrollRemainder -= 1;
|
||||
intScrollPixs++;
|
||||
}
|
||||
// shuffle the image along.
|
||||
g2d.drawImage(image, 0, 0, w-intScrollPixs, image.getHeight(), intScrollPixs, 0, w, image.getHeight(), null);
|
||||
|
||||
// if (wrap) {
|
||||
// g2d.fillRect(w-intScrollPixs, 0, intScrollPixs, image.getHeight());
|
||||
// }
|
||||
firstPixel = (int) scrollXStart;
|
||||
xStart = firstPixel;
|
||||
xEnd = (int) scrollXEnd;
|
||||
// clear end of imate.
|
||||
g2d.setBackground(plotBackground);
|
||||
g2d.fillRect(firstPixel, 0, w-firstPixel, image.getHeight());
|
||||
// draw the threshold.
|
||||
g2d.setColor(Color.RED);
|
||||
int yT = (int) westAxis.getPosition(ishDetControl.ishDetParams.thresh);
|
||||
g2d.drawLine(xStart-1, yT, xEnd, yT);
|
||||
chan.lastDataUnit = ishDataUnit;
|
||||
// shuffle all the chan lastX values.
|
||||
for (int i = 0; i < perChannelInfo.length; i++) {
|
||||
if (perChannelInfo[i] != null) {
|
||||
if (perChannelInfo[i].lastX == 0) {
|
||||
perChannelInfo[i].lastX = xStart;
|
||||
}
|
||||
else {
|
||||
perChannelInfo[i].lastX -= intScrollPixs;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
else if (firstChan) {
|
||||
clearImage(xStart, xEnd, true);
|
||||
}
|
||||
|
||||
|
||||
// //HACK to get autoscale working. This is code is a total mess.
|
||||
|
||||
|
||||
|
||||
|
||||
// //HACK to get autoscale working. This is code is a total mess.
|
||||
chan.yScale=yScale;
|
||||
|
||||
// sometimes xEnd < clearLastX, and the whole plugin window gets erased. So make sure, in that case,
|
||||
// we don't call the clearImage method
|
||||
// if (clearLastX != xEnd) {
|
||||
if (clearLastX < xEnd) {
|
||||
// if (clearLastX != xEnd) {
|
||||
if (clearLastX < xEnd & wrap) {
|
||||
clearImage(clearLastX + 1, xEnd + 1, true);
|
||||
clearLastX = xEnd;
|
||||
}
|
||||
int w = image.getWidth();
|
||||
// System.out.printf("Drawing between xpixels %d and %d 0f %d\n", xStart, xEnd, w);
|
||||
for (int i = 0; i < len; i++) {
|
||||
int x = (len == 1) ? xEnd :
|
||||
int x = (len == 1) ? xEnd-1 :
|
||||
(int) Math.round(PamUtils.linterp(0,len-1,xStart,xEnd,i));
|
||||
|
||||
//plot detection data
|
||||
int y = (int) (chan.yHi - (ishDetectorData[detIndex][i]/chan.yScale) * (chan.yHi-chan.yLo));
|
||||
y = (int) westAxis.getPosition(ishDetectorData[detIndex][i]);
|
||||
// if (i == 0) {
|
||||
// symbol.draw(g2d, new Point(x,image.getHeight()/2));
|
||||
// }
|
||||
|
||||
int threshY;
|
||||
if (len2>1) {
|
||||
@ -365,21 +687,32 @@ public class IshDetGraphics implements DisplayPanelProvider, PamSettings {
|
||||
|
||||
y = Math.min(chan.yHi, Math.max(y, chan.yLo));
|
||||
//if (x >= lastX) {
|
||||
Graphics2D g2d = (Graphics2D) image.getGraphics();
|
||||
|
||||
g2d.setColor(PamColors.getInstance().getColor(PamColor.PlOTWINDOW));
|
||||
g2d.fillRect(chan.lastX+1, 0, Math.abs(x-chan.lastX), image.getHeight());
|
||||
// g2d.setColor(PamColors.getInstance().getColor(PamColor.PlOTWINDOW));
|
||||
// if (wrap) {
|
||||
// g2d.fillRect(chan.lastX+1, 0, Math.abs(x-chan.lastX), image.getHeight());
|
||||
// }
|
||||
|
||||
g2d.setColor(PamColors.getInstance().getChannelColor(chanIx));
|
||||
|
||||
if (wrap) {
|
||||
if (x > image.getWidth()) {
|
||||
x -= image.getWidth();
|
||||
}
|
||||
}
|
||||
//detection function
|
||||
if (x >= chan.lastX) {
|
||||
if (x >= chan.lastX-1) {
|
||||
g2d.drawLine(chan.lastX, chan.lastY, x, y);
|
||||
}
|
||||
if (wrap) {
|
||||
if (x >= chan.lastX && threshY >= chan.yLo && threshY < chan.yHi) {
|
||||
g2d.setColor(Color.RED); //threshold line
|
||||
if (chan.lastThreshy == Integer.MIN_VALUE) {
|
||||
chan.lastThreshy = threshY;
|
||||
}
|
||||
g2d.drawLine(chan.lastX, chan.lastThreshy, x, threshY);
|
||||
}
|
||||
}
|
||||
chan.xAdvance = (chan.xAdvance <= 2) ? chan.xAdvance+1
|
||||
: Math.max(chan.xAdvance, x - chan.lastX);
|
||||
|
||||
@ -400,6 +733,27 @@ public class IshDetGraphics implements DisplayPanelProvider, PamSettings {
|
||||
// TODO Auto-generated method stub
|
||||
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public PamAxis getWestAxis() {
|
||||
return westAxis;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public void clearImage() {
|
||||
super.clearImage();
|
||||
currentImageTime = 0;
|
||||
makeChannelInfo();
|
||||
}
|
||||
|
||||
private void makeChannelInfo() {
|
||||
perChannelInfo = new PerChannelInfo[PamConstants.MAX_CHANNELS];
|
||||
for (int i = 0; i < perChannelInfo.length; i++) {
|
||||
perChannelInfo[i] = new PerChannelInfo(0, 0, 0);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
@ -79,4 +79,13 @@ public class IshDetection extends PamDataUnit<PamDataUnit, SuperDetection> imple
|
||||
setPeakTimeSam(peakTimeSam); //relative to start of PAMGUARD run
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public String getSummaryString() {
|
||||
String str = super.getSummaryString();
|
||||
str += String.format("<br>Peak %5.3f", peakHeight);
|
||||
|
||||
return str;
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -36,7 +36,7 @@ public class IshFnBinarySource extends BinaryDataSource {
|
||||
*/
|
||||
private IshDetFnProcess ishDetFnProcess;
|
||||
|
||||
private static final int currentVersion = 1;
|
||||
private static final int currentVersion = 2;
|
||||
|
||||
public IshFnBinarySource(IshDetFnProcess ishDetFnProcess, PamDataBlock sisterDataBlock, String streamName) {
|
||||
super(sisterDataBlock);
|
||||
@ -84,10 +84,22 @@ public class IshFnBinarySource extends BinaryDataSource {
|
||||
int nDet = dis.readInt();
|
||||
|
||||
double[][] detData = new double[nDet][];
|
||||
if (moduleVersion < 2) {
|
||||
for (int i=0; i<nDet; i++) {
|
||||
//the first double is peak, the second is noise.
|
||||
detData[i] = new double[] {dis.readDouble(), dis.readDouble()};
|
||||
}
|
||||
}
|
||||
else {
|
||||
int n2 = dis.readInt();
|
||||
detData = new double[nDet][n2];
|
||||
for (int i = 0; i < nDet; i++) {
|
||||
for (int i2 = 0; i2 < n2; i2++) {
|
||||
detData[i][i2] = dis.readDouble();
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
if (baseData.getChannelBitmap()==0) {
|
||||
baseData.setChannelBitmap(1);
|
||||
@ -139,10 +151,18 @@ public class IshFnBinarySource extends BinaryDataSource {
|
||||
}
|
||||
|
||||
try {
|
||||
dos.writeInt(ishDet.getDetData().length);
|
||||
double[][] data = ishDet.getDetData();
|
||||
int len1 = data.length;
|
||||
int len2 = 0;
|
||||
if (len1>0) {
|
||||
len2 = data[0].length;
|
||||
}
|
||||
dos.writeInt(len1);
|
||||
dos.writeInt(len2);
|
||||
for (int i=0; i<ishDet.getDetData().length; i++) {
|
||||
dos.writeDouble(ishDet.getDetData()[i][0]);
|
||||
dos.writeDouble(ishDet.getDetData()[i][1]);
|
||||
for (int i2 = 0; i2 < len2; i2++) {
|
||||
dos.writeDouble(data[i][i2]);
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (IOException e) {
|
||||
|
@ -30,10 +30,10 @@ public class IshPeakProcess extends PamProcess
|
||||
//public Complex[] inputData;
|
||||
//IshDetCalcIntfc ishDetCalcIntfc;
|
||||
//IshDetIoIntfc ishDetIoIntfc;
|
||||
// FFTDataSource parentProcess;
|
||||
// FFTDataSource parentProcess;
|
||||
IshDetControl ishDetControl;
|
||||
|
||||
PamDataBlock<IshDetection> outputDataBlock;
|
||||
IshDataBlock outputDataBlock;
|
||||
|
||||
int savedFftLength = -1; //-1 forces recalculation
|
||||
|
||||
@ -59,10 +59,10 @@ public class IshPeakProcess extends PamProcess
|
||||
super(ishDetControl, null);
|
||||
this.ishDetControl = ishDetControl;
|
||||
setParentDataBlock(parentDataBlock);
|
||||
// outputDataBlock = new PamDataBlock<IshDetection>(IshDetection.class,
|
||||
// ishDetControl.getUnitName() + " events", this, parentDataBlock.getChannelMap());
|
||||
outputDataBlock = new PamDataBlock<IshDetection>(IshDetection.class,
|
||||
ishDetControl.getUnitName() + " events", this, parentDataBlock.getSequenceMap());
|
||||
// outputDataBlock = new PamDataBlock<IshDetection>(IshDetection.class,
|
||||
// ishDetControl.getUnitName() + " events", this, parentDataBlock.getChannelMap());
|
||||
outputDataBlock = new IshDataBlock(ishDetControl.getUnitName() + " events",
|
||||
this, parentDataBlock.getSequenceMap());
|
||||
outputDataBlock.setCanClipGenerate(true);
|
||||
addOutputDataBlock(outputDataBlock);
|
||||
|
||||
@ -70,6 +70,7 @@ public class IshPeakProcess extends PamProcess
|
||||
ishmealBinaryDataSource = new IshBinaryDataSource(this, outputDataBlock, "Ishmael Detections");
|
||||
outputDataBlock.setBinaryDataSource(ishmealBinaryDataSource);
|
||||
|
||||
|
||||
//graphics for the map and spectrogram.
|
||||
PamDetectionOverlayGraphics overlayGraphics = new PamDetectionOverlayGraphics(outputDataBlock, new PamSymbol(defaultSymbol));
|
||||
outputDataBlock.setOverlayDraw(overlayGraphics);
|
||||
@ -114,7 +115,7 @@ public class IshPeakProcess extends PamProcess
|
||||
//setParentDataBlock(detfnDataBlock);
|
||||
|
||||
prepareMyParams();
|
||||
// outputDataBlock.setChannelMap(p.channelList);
|
||||
// outputDataBlock.setChannelMap(p.channelList);
|
||||
PamDataBlock inputDataBlock = getParentDataBlock();
|
||||
outputDataBlock.sortOutputMaps(inputDataBlock.getChannelMap(), inputDataBlock.getSequenceMapObject(), p.groupedSourceParmas.getChanOrSeqBitmap());
|
||||
//setProcessName("Peak-picker: threshold " + p.thresh);
|
||||
@ -162,89 +163,120 @@ public class IshPeakProcess extends PamProcess
|
||||
double[] inputData = ishFnDataUnit.getDetData()[0];
|
||||
|
||||
//See if the channel is one we want before doing anything.
|
||||
// if ((arg.getChannelBitmap() & ishDetControl.ishDetParams.channelList) == 0)
|
||||
// if ((arg.getChannelBitmap() & ishDetControl.ishDetParams.channelList) == 0)
|
||||
if ((activeChannels & ishFnDataUnit.getSequenceBitmap())==0) {
|
||||
return;
|
||||
}
|
||||
// int chanIx = PamUtils.getSingleChannel(arg.getChannelBitmap());
|
||||
// int chanIx = PamUtils.getSingleChannel(arg.getChannelBitmap());
|
||||
|
||||
int chanIx = PamUtils.getSingleChannel(ishFnDataUnit.getSequenceBitmap());
|
||||
PerChannelInfo chan = perChannelInfo[chanIx];
|
||||
|
||||
//The actual peak-picking calculation. Check whether the peak is over calculation and
|
||||
//that the peak has not exceeded the maximum time...
|
||||
if (isIshOverThreshold(inputData[0]) && chan.nOverThresh <=maxTimeN+1) {
|
||||
int nSamples = inputData.length;
|
||||
for (int iSamp = 0; iSamp < nSamples; iSamp++) {
|
||||
if (isIshOverThreshold(inputData[iSamp]) && chan.nOverThresh <=maxTimeN+1) {
|
||||
if (chan.nOverThresh == 0) {
|
||||
chan.peakHeight = Double.NEGATIVE_INFINITY;
|
||||
chan.startSample = ishFnDataUnit.getStartSample()+iSamp;
|
||||
}
|
||||
chan.endSample = ishFnDataUnit.getStartSample()+iSamp;
|
||||
chan.nUnderThresh = 0;
|
||||
chan.nOverThresh++;
|
||||
// System.out.println("above threshold, startSample= " + String.valueOf(ishFnDataUnit.getStartSample()) + ", inputData= " + String.valueOf(inputData[0]) + ", nOverThresh= "+ String.valueOf(chan.nOverThresh));
|
||||
if (inputData[0] >= chan.peakHeight) {
|
||||
chan.peakHeight = inputData[0];
|
||||
chan.peakTimeSam = ishFnDataUnit.getStartSample(); //
|
||||
// System.out.println("above threshold, startSample= " + String.valueOf(ishFnDataUnit.getStartSample()) + ", inputData= " + String.valueOf(inputData[0]) + ", nOverThresh= "+ String.valueOf(chan.nOverThresh));
|
||||
if (inputData[iSamp] >= chan.peakHeight) {
|
||||
// System.out.printf("Inc peak from %4.2f to %4.2f at samp %d\n", chan.peakHeight, inputData[iSamp], ishFnDataUnit.getStartSample()+iSamp);
|
||||
chan.peakHeight = inputData[iSamp];
|
||||
chan.peakTimeSam = ishFnDataUnit.getStartSample()+iSamp; //
|
||||
}
|
||||
}
|
||||
else {
|
||||
// System.out.println("below threshold, nOverThresh = "+ String.valueOf(chan.nOverThresh) + ", minTimeN= "+ String.valueOf(minTimeN));
|
||||
// System.out.println("below threshold, nOverThresh = "+ String.valueOf(chan.nOverThresh) + ", minTimeN= "+ String.valueOf(minTimeN));
|
||||
// Below threshold or over the maximum time allowed.
|
||||
// Check to see if we were over enough times, and if so,
|
||||
//add an output unit. Data has to be less then the max time
|
||||
if (chan.nOverThresh > 0 && chan.nOverThresh >= minTimeN && chan.nOverThresh<=maxTimeN) {
|
||||
chan.nUnderThresh++;
|
||||
if (chan.nUnderThresh < refactoryTimeSam) {
|
||||
/*
|
||||
* just wait until we've reached this time before ending the call.
|
||||
* Calls cross threshold many times, so it's possible it will go back
|
||||
* over threshold in a couple more samples. So do absolutely nothing here.
|
||||
*/
|
||||
|
||||
}
|
||||
else if (chan.nOverThresh > 0 && chan.durationSamples() >= minTimeN && chan.nOverThresh<=maxTimeN) {
|
||||
/*
|
||||
* If we're here, the call duration is long enough and it's been below threshold for longer
|
||||
* than the minimum allowed gap, so it is time to properly end the call.
|
||||
*/
|
||||
|
||||
//Append the new data to the end of the data stream.
|
||||
long startSam = ishFnDataUnit.getStartSample();
|
||||
long durationSam = chan.nOverThresh;
|
||||
durationSam *= sampleRate / (ishDetControl.ishDetFnProcess.getDetSampleRate());
|
||||
long startMsec = absSamplesToMilliseconds(startSam - durationSam);
|
||||
long endMsec = absSamplesToMilliseconds(startSam) +
|
||||
// long startSam = ishFnDataUnit.getStartSample();
|
||||
long durationSam = chan.endSample-chan.startSample+1;
|
||||
// durationSam *= sampleRate / (ishDetControl.ishDetFnProcess.getDetSampleRate());
|
||||
// long startMsec = absSamplesToMilliseconds(startSam - durationSam);
|
||||
long startMsec = absSamplesToMilliseconds(chan.startSample);
|
||||
long endMsec = startMsec +
|
||||
Math.round(1000.0 * (float)durationSam * ishDetControl.ishDetFnProcess.getDetSampleRate());
|
||||
float lowFreq = ishDetControl.ishDetFnProcess.getLoFreq();
|
||||
float highFreq = ishDetControl.ishDetFnProcess.getHiFreq();
|
||||
|
||||
IshDetection iDet = outputDataBlock.getRecycledUnit();
|
||||
if (iDet != null) { //refurbished
|
||||
// iDet.setInfo(startMsec, 1 << chanIx, startSam, durationSam,
|
||||
// lowFreq, highFreq, chan.peakTimeSam, chan.peakHeight);
|
||||
iDet.setInfo(startMsec, arg1.getChannelBitmap(), startSam, durationSam,
|
||||
lowFreq, highFreq, chan.peakTimeSam, chan.peakHeight);
|
||||
iDet.setSequenceBitmap(arg1.getSequenceBitmapObject());
|
||||
} else { //new
|
||||
// iDet = new IshDetection(startMsec, endMsec, lowFreq, highFreq, chan.peakTimeSam,
|
||||
// chan.peakHeight, outputDataBlock, 1 << chanIx, startSam, durationSam);
|
||||
IshDetection iDet;
|
||||
// = outputDataBlock.getRecycledUnit();
|
||||
// if (iDet != null) { //refurbished
|
||||
//// iDet.setInfo(startMsec, 1 << chanIx, startSam, durationSam,
|
||||
//// lowFreq, highFreq, chan.peakTimeSam, chan.peakHeight);
|
||||
// iDet.setInfo(startMsec, arg1.getChannelBitmap(), startSam, durationSam,
|
||||
// lowFreq, highFreq, chan.peakTimeSam, chan.peakHeight);
|
||||
// iDet.setSequenceBitmap(arg1.getSequenceBitmapObject());
|
||||
// } else { //new
|
||||
// iDet = new IshDetection(startMsec, endMsec, lowFreq, highFreq, chan.peakTimeSam,
|
||||
// chan.peakHeight, outputDataBlock, 1 << chanIx, startSam, durationSam);
|
||||
//11/03/2020 - major bug fix - start sample of Ishamel detector data unit was wrong - it was at the end of the data unit for some reason.
|
||||
iDet = new IshDetection(startMsec, endMsec, lowFreq, highFreq, chan.peakTimeSam,
|
||||
chan.peakHeight, outputDataBlock, arg1.getChannelBitmap(), startSam - durationSam, durationSam);
|
||||
chan.peakHeight, outputDataBlock, arg1.getChannelBitmap(), chan.startSample, durationSam);
|
||||
|
||||
iDet.setSequenceBitmap(arg1.getSequenceBitmapObject());
|
||||
iDet.setParentDataBlock(outputDataBlock);
|
||||
}
|
||||
// }
|
||||
|
||||
//now check if the refactory time is OK
|
||||
|
||||
|
||||
double iDISam =Math.abs((startSam) - (chan.lastStartSam+durationSam));
|
||||
double iDISam =Math.abs(chan.startSample - (chan.lastStartSam+chan.lastDurationSam));
|
||||
|
||||
// System.out.println("Samplediff: " + iDISam
|
||||
// + " maxTimeN: " + this.maxTimeN + " sR: " + ishDetControl.ishDetFnProcess.getDetSampleRate());
|
||||
if (chan.lastStartSam==-1 || startSam<chan.lastStartSam || iDISam>=refactoryTimeSam) {
|
||||
// System.out.println("Samplediff: " + iDISam
|
||||
// + " maxTimeN: " + this.maxTimeN + " sR: " + ishDetControl.ishDetFnProcess.getDetSampleRate());
|
||||
if (chan.lastStartSam==-1 || chan.startSample<chan.lastStartSam || iDISam>=refactoryTimeSam) {
|
||||
outputDataBlock.addPamData(iDet);
|
||||
// System.out.println("");
|
||||
// System.out.println("Ishmael data unit accepted because of refactory time: "
|
||||
// + startSam + " " + chan.lastStartSam + " minIDI: (samples): " + refactoryTimeSam
|
||||
// + " minIDI (s) " + this.ishDetControl.ishDetParams.refractoryTime);
|
||||
// System.out.println(iDet.getSummaryString());
|
||||
// System.out.println("");
|
||||
// System.out.println("Ishmael data unit accepted because of refactory time: "
|
||||
// + startSam + " " + chan.lastStartSam + " minIDI: (samples): " + refactoryTimeSam
|
||||
// + " minIDI (s) " + this.ishDetControl.ishDetParams.refractoryTime);
|
||||
}
|
||||
else {
|
||||
// System.out.println("");
|
||||
// System.out.println("Ishmael data unit REJECTED because of refactory time: " + startSam
|
||||
// + " " + chan.lastStartSam + " minIDI: (samples): " + refactoryTimeSam
|
||||
// + " minIDI (s) " + this.ishDetControl.ishDetParams.refractoryTime);
|
||||
// System.out.println("");
|
||||
// System.out.println("Ishmael data unit REJECTED because of refactory time: " + startSam
|
||||
// + " " + chan.lastStartSam + " minIDI: (samples): " + refactoryTimeSam
|
||||
// + " minIDI (s) " + this.ishDetControl.ishDetParams.refractoryTime);
|
||||
}
|
||||
//keep a reference to the last time and duration
|
||||
chan.lastStartSam = startSam;
|
||||
chan.lastStartSam = chan.startSample;
|
||||
chan.lastDurationSam = durationSam;
|
||||
|
||||
chan.reset();
|
||||
}
|
||||
else {
|
||||
/*
|
||||
* Get here if there may have been a call building, but it didn't get adequate duration and
|
||||
* has now been below threshold for a while. So reset the detector and allow it to start a
|
||||
* new detection.
|
||||
*/
|
||||
chan.reset();
|
||||
}
|
||||
}
|
||||
chan.nOverThresh = 0;
|
||||
}
|
||||
}
|
||||
|
||||
@ -264,6 +296,9 @@ public class IshPeakProcess extends PamProcess
|
||||
private class PerChannelInfo {
|
||||
|
||||
|
||||
public long endSample;
|
||||
public int nUnderThresh;
|
||||
public long startSample;
|
||||
/**
|
||||
* The sample time of the start of the last saved detection (samples)
|
||||
*/
|
||||
@ -272,6 +307,16 @@ public class IshPeakProcess extends PamProcess
|
||||
int nOverThresh = 0; //number of times we've been over threshold in units of FFT length
|
||||
double peakHeight; //height of peak within the current event
|
||||
long peakTimeSam; //sample number at which that peak occurred
|
||||
|
||||
private long durationSamples() {
|
||||
return endSample-startSample+1;
|
||||
}
|
||||
|
||||
public void reset() {
|
||||
nOverThresh = 0;
|
||||
startSample = endSample = 0;
|
||||
peakHeight = 0;
|
||||
}
|
||||
}
|
||||
|
||||
@Override public void pamStart() {
|
||||
|
@ -33,7 +33,7 @@ public class MatchFiltControl extends IshDetControl implements PamSettings
|
||||
|
||||
@Override
|
||||
public IshDetFnProcess getNewDetProcess(PamDataBlock defaultDataBlock) {
|
||||
return new MatchFiltProcess(this, defaultDataBlock);
|
||||
return new MatchFiltProcess2(this, defaultDataBlock);
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
|
@ -17,6 +17,7 @@ import fftManager.FFT;
|
||||
|
||||
/* @author Dave Mellinger
|
||||
*/
|
||||
@Deprecated // use MAtchFiltProc2 which uses faster FFT and is generally more efficient.
|
||||
public class MatchFiltProcess extends IshDetFnProcess
|
||||
{
|
||||
/**
|
||||
@ -148,6 +149,14 @@ public class MatchFiltProcess extends IshDetFnProcess
|
||||
double[] x = fftMgr.crossCorrelation(buffer, 0, buffer.length,
|
||||
kernel, 0, kernel.length);
|
||||
|
||||
// debug output the max of x.
|
||||
// double maxX = 0;
|
||||
// double totX = 0;
|
||||
// for (int ix = 0; ix < x.length; ix++) {
|
||||
// maxX = Math.max(maxX, x[ix]);
|
||||
// totX += x[ix];
|
||||
// }
|
||||
// System.out.printf("Mean and Max correlation value is %7.5f, %7.5f\n", totX/x.length, maxX);
|
||||
//Extract the useful values from x into an output buffer. The
|
||||
//non-useful values are those "polluted" by the circular nature
|
||||
//of FFT-based cross-correlation.
|
||||
|
279
src/IshmaelDetector/MatchFiltProcess2.java
Normal file
279
src/IshmaelDetector/MatchFiltProcess2.java
Normal file
@ -0,0 +1,279 @@
|
||||
package IshmaelDetector;
|
||||
|
||||
import java.io.File;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
|
||||
import javax.sound.sampled.AudioFormat;
|
||||
import javax.sound.sampled.AudioInputStream;
|
||||
import javax.sound.sampled.AudioSystem;
|
||||
|
||||
import Acquisition.FileInputSystem;
|
||||
import PamDetection.RawDataUnit;
|
||||
import PamUtils.PamUtils;
|
||||
import PamUtils.complex.ComplexArray;
|
||||
import PamView.GroupedSourceParameters;
|
||||
import PamView.dialog.warn.WarnOnce;
|
||||
import PamguardMVC.PamConstants;
|
||||
import PamguardMVC.PamDataBlock;
|
||||
import PamguardMVC.PamDataUnit;
|
||||
import PamguardMVC.PamObservable;
|
||||
import fftManager.FastFFT;
|
||||
|
||||
public class MatchFiltProcess2 extends IshDetFnProcess {
|
||||
|
||||
private MatchFiltControl matchFiltControl;
|
||||
|
||||
private ChannelDetector[] channelDetectors = new ChannelDetector[PamConstants.MAX_CHANNELS];
|
||||
|
||||
private double[] kernel;
|
||||
|
||||
private int fftLength;
|
||||
|
||||
private FastFFT fastFFT;
|
||||
|
||||
private ComplexArray complexKernel;
|
||||
|
||||
private int usefulSamples;
|
||||
|
||||
private int logFFTLength;
|
||||
|
||||
private double normConst;
|
||||
|
||||
public MatchFiltProcess2(MatchFiltControl matchFiltControl, PamDataBlock parentDataBlock) {
|
||||
super(matchFiltControl, parentDataBlock);
|
||||
this.matchFiltControl = matchFiltControl;
|
||||
}
|
||||
|
||||
|
||||
//This is called when the user chooses Start Detection off the menu.
|
||||
|
||||
@Override
|
||||
protected void prepareMyParams() {
|
||||
MatchFiltParams params = (MatchFiltParams) ishDetControl.ishDetParams;
|
||||
channelDetectors = new ChannelDetector[PamConstants.MAX_CHANNELS];
|
||||
GroupedSourceParameters groups = params.groupedSourceParmas;
|
||||
// want the first channel of each group ...
|
||||
int nGroup = groups.countChannelGroups();
|
||||
if (nGroup == 0) {
|
||||
// just try to do channel 0
|
||||
channelDetectors[0] = new ChannelDetector(0);
|
||||
}
|
||||
else {
|
||||
for (int i = 0; i < nGroup; i++) {
|
||||
int groupMap = groups.getGroupChannels(i);
|
||||
if (groupMap == 0) {
|
||||
continue;
|
||||
}
|
||||
int firstChan = PamUtils.getLowestChannel(groupMap);
|
||||
channelDetectors[firstChan] = new ChannelDetector(firstChan);
|
||||
}
|
||||
}
|
||||
|
||||
boolean kernelOK = prepareKernel(params);
|
||||
|
||||
|
||||
prepareProcess();
|
||||
}
|
||||
|
||||
private boolean prepareKernel(MatchFiltParams params) {
|
||||
ArrayList<String> kernelList = params.kernelFilenameList;
|
||||
if (kernelList.size() == 0) {
|
||||
WarnOnce.showWarning("Ish Matched Filter", "No Kernel file specified in configuration", WarnOnce.OK_OPTION);
|
||||
return false;
|
||||
}
|
||||
File kernelFile = new File(kernelList.get(0));
|
||||
if (kernelFile.exists() == false) {
|
||||
WarnOnce.showWarning("Ish Matched Filter", "Kernel file " + kernelFile.getAbsolutePath() + " doesn't exist", WarnOnce.OK_OPTION);
|
||||
return false;
|
||||
}
|
||||
|
||||
// now get the kernel from the file.
|
||||
try {
|
||||
byte[] byteArray = new byte[(int)kernelFile.length()]; //longer than needed
|
||||
AudioInputStream audioStream = AudioSystem.getAudioInputStream(kernelFile);
|
||||
int nBytesRead = audioStream.read(byteArray, 0, byteArray.length);
|
||||
AudioFormat audioFormat = audioStream.getFormat();
|
||||
long nBytesToDo =
|
||||
nBytesRead - (nBytesRead % audioFormat.getFrameSize());
|
||||
kernel = FileInputSystem.bytesToSamples(byteArray,
|
||||
nBytesToDo, 0, audioFormat);
|
||||
audioStream.close();
|
||||
} catch (Exception ex) {
|
||||
kernel = new double[0];
|
||||
WarnOnce.showWarning("Ish Matched Filter", "Unable to read Kernel file " + kernelFile.getAbsolutePath(), WarnOnce.OK_OPTION);
|
||||
return false;
|
||||
}
|
||||
if (kernel == null) {
|
||||
return false;
|
||||
}
|
||||
int bufLen = Math.max((int)Math.round(sampleRate * 0.1), kernel.length * 2);
|
||||
fftLength = FastFFT.nextBinaryExp(bufLen);
|
||||
logFFTLength = FastFFT.log2(fftLength);
|
||||
// pack it slightly differently so that the data are at the end.
|
||||
double[] packedKernel = Arrays.copyOf(kernel, fftLength);
|
||||
// double[] packedKernel = new double[fftLength];
|
||||
// for (int i = 0, j = fftLength-packedKernel.length; i < kernel.length; i++, j++) {
|
||||
// packedKernel[j] = kernel[i];
|
||||
// }
|
||||
|
||||
|
||||
fastFFT = new FastFFT();
|
||||
// this will initialise the fastFFT for bufLen, to reuse in subsequent calls
|
||||
complexKernel = fastFFT.rfft(packedKernel, fftLength);
|
||||
// usefulSamples = fftLength/2;//-kernel.length;
|
||||
usefulSamples = fftLength-kernel.length;
|
||||
normConst = 0;
|
||||
for (int i = 0; i < kernel.length; i++) {
|
||||
normConst += Math.pow(kernel[i], 2);
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public void prepareProcess() {
|
||||
super.prepareProcess();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean prepareProcessOK() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void newData(PamObservable o, PamDataUnit arg) {
|
||||
if (arg instanceof RawDataUnit == false) {
|
||||
return;
|
||||
}
|
||||
int chan = PamUtils.getSingleChannel(arg.getSequenceBitmap());
|
||||
if (channelDetectors[chan] == null) {
|
||||
return;
|
||||
}
|
||||
channelDetectors[chan].newData((RawDataUnit) arg);
|
||||
}
|
||||
|
||||
private class ChannelDetector {
|
||||
|
||||
int iChannel;
|
||||
|
||||
int bufferIndex = 0;
|
||||
|
||||
long totalSamples = 0;
|
||||
|
||||
double[] dataBuffer;
|
||||
|
||||
private ChannelDetector(int iChan) {
|
||||
this.iChannel = iChan;
|
||||
}
|
||||
|
||||
public void newData(RawDataUnit rawDataUnit) {
|
||||
double[] raw = rawDataUnit.getRawData();
|
||||
if (dataBuffer == null) {
|
||||
dataBuffer = new double[fftLength];
|
||||
bufferIndex = 0;
|
||||
}
|
||||
for (int i = 0; i < raw.length; i++) {
|
||||
dataBuffer[bufferIndex++] = raw[i];
|
||||
totalSamples++;
|
||||
if (bufferIndex == fftLength) {
|
||||
processBuffer();
|
||||
shuffleBuffer();
|
||||
bufferIndex -= usefulSamples;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Move data back to start of buffer by useful samples.
|
||||
*/
|
||||
private void shuffleBuffer() {
|
||||
for (int i = 0, j = usefulSamples; j < fftLength; i++, j++) {
|
||||
dataBuffer[i] = dataBuffer[j];
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Process a completed data buffer
|
||||
*/
|
||||
private void processBuffer() {
|
||||
// do the xCorr here, it's as easy as in a separate class given the
|
||||
// functions in ComplexArray
|
||||
ComplexArray fftData = fastFFT.rfft(dataBuffer, fftLength);
|
||||
ComplexArray xCorrDat = fftData.conjTimes(complexKernel);
|
||||
// because it was a rfft, these data are only half of what we need for the inv FFt.
|
||||
// xCorrDat = xCorrDat.fillConjugateHalf();
|
||||
double[] xCorr = fastFFT.realInverse(xCorrDat);
|
||||
/**
|
||||
* For normalisation, we need to sum the energy in the waveform for each sample of
|
||||
* the correlation. So add the first block, the length of the kernel, then for
|
||||
* each sample add one in and take one out
|
||||
*/
|
||||
double norm2 = 0;
|
||||
for (int i = 0; i < kernel.length; i++) {
|
||||
norm2 += Math.pow(dataBuffer[i],2);
|
||||
}
|
||||
|
||||
long startSamp = totalSamples - fftLength;
|
||||
long bufferStartMillis = absSamplesToMilliseconds(startSamp);
|
||||
|
||||
double[] dataOut = new double[usefulSamples];
|
||||
for (int i = 0, j = kernel.length; i < usefulSamples; i++, j++) {
|
||||
dataOut[i] = xCorr[i]/Math.sqrt(normConst*norm2);
|
||||
norm2 -= Math.pow(dataBuffer[i],2); // remove first
|
||||
norm2 += Math.pow(dataBuffer[j],2); // add next
|
||||
}
|
||||
// now throw that at a new data unit ...
|
||||
IshDetFnDataUnit outData = new IshDetFnDataUnit(bufferStartMillis, 1<<iChannel, startSamp, usefulSamples, dataOut);
|
||||
|
||||
// double maxOut = 0;
|
||||
// for (int i = 0; i < dataOut.length; i++) {
|
||||
// maxOut = Math.max(maxOut, dataOut[i]);
|
||||
// }
|
||||
// System.out.printf("MAx correlation value is %5.3f\n", maxOut);
|
||||
|
||||
outputDataBlock.addPamData(outData);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getLongName() {
|
||||
return "Matched filter detector data";
|
||||
}
|
||||
|
||||
public String getNumberName() {
|
||||
MatchFiltParams p = (MatchFiltParams)ishDetControl.ishDetParams;
|
||||
return "Matched filter: " + p.getKernelFilename();
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public Class inputDataClass() {
|
||||
return RawDataUnit.class;
|
||||
}
|
||||
|
||||
@Override
|
||||
public float getDetSampleRate() {
|
||||
return sampleRate;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public float getHiFreq() {
|
||||
return sampleRate / 2;
|
||||
}
|
||||
|
||||
@Override
|
||||
public float getLoFreq() {
|
||||
return 0;
|
||||
}
|
||||
|
||||
//Keep the compiler happy -- these are abstract in the superclass.
|
||||
@Override public void pamStart() { }
|
||||
@Override public void pamStop() { }
|
||||
|
||||
|
||||
}
|
14
src/IshmaelDetector/tethys/IshmaelSpeciesManager.java
Normal file
14
src/IshmaelDetector/tethys/IshmaelSpeciesManager.java
Normal file
@ -0,0 +1,14 @@
|
||||
package IshmaelDetector.tethys;
|
||||
|
||||
import PamguardMVC.PamDataBlock;
|
||||
import tethys.species.FixedSpeciesManager;
|
||||
|
||||
public class IshmaelSpeciesManager extends FixedSpeciesManager {
|
||||
private static final int itisCode = 180403;
|
||||
private static final String name = "Cetacea";
|
||||
private static final String callType = "Ishmael Detection";
|
||||
public IshmaelSpeciesManager(PamDataBlock dataBlock) {
|
||||
super(dataBlock, itisCode, name, callType);
|
||||
}
|
||||
|
||||
}
|
30
src/IshmaelDetector/tethys/IshmaelTethysProvider.java
Normal file
30
src/IshmaelDetector/tethys/IshmaelTethysProvider.java
Normal file
@ -0,0 +1,30 @@
|
||||
package IshmaelDetector.tethys;
|
||||
|
||||
import IshmaelDetector.IshDetection;
|
||||
import PamguardMVC.PamDataBlock;
|
||||
import PamguardMVC.PamDataUnit;
|
||||
import nilus.Detection;
|
||||
import tethys.TethysControl;
|
||||
import tethys.output.StreamExportParams;
|
||||
import tethys.output.TethysExportParams;
|
||||
import tethys.pamdata.AutoTethysProvider;
|
||||
|
||||
public class IshmaelTethysProvider extends AutoTethysProvider {
|
||||
|
||||
public IshmaelTethysProvider(TethysControl tethysControl, PamDataBlock pamDataBlock) {
|
||||
super(tethysControl, pamDataBlock);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Detection createDetection(PamDataUnit dataUnit, TethysExportParams tethysExportParams,
|
||||
StreamExportParams streamExportParams) {
|
||||
Detection detection = super.createDetection(dataUnit, tethysExportParams, streamExportParams);
|
||||
if (detection == null) {
|
||||
return null;
|
||||
}
|
||||
IshDetection ishData = (IshDetection) dataUnit;
|
||||
detection.getParameters().setScore(roundSignificantFigures(ishData.getPeakHeight(), 4));
|
||||
|
||||
return detection;
|
||||
}
|
||||
}
|
@ -164,7 +164,8 @@ abstract public class DisplayPanel {
|
||||
g2d.setColor(PamColors.getInstance().getColor(PamColor.PlOTWINDOW));
|
||||
if (x1 <= x2) {
|
||||
g2d.fillRect(x1, 0, x2-x1+1, imageHeight);
|
||||
if (drawLine) {
|
||||
boolean wrap = getDisplayPanelContainer().wrapDisplay();
|
||||
if (drawLine && wrap) {
|
||||
// g2d.setColor(PamColors.getInstance().getColor(PamColor.PLAIN));
|
||||
g2d.setColor(Color.RED);
|
||||
g2d.drawLine(x2+1, 0, x2+1, imageHeight);
|
||||
|
@ -194,31 +194,31 @@ public class Correlations {
|
||||
return getDelay(complexArray, complexArray2, fftLength, maxDelaySamples, binLims);
|
||||
}
|
||||
|
||||
/**
|
||||
* Measure the time delay between pulses on two channels. Inputs in this case are the
|
||||
* spectrum data (most of the cross correlation is done in the frequency domain)
|
||||
* @param f1 complex spectrum on channel 1
|
||||
* @param f2 complex spectrum on channel 2
|
||||
* @param delayMeasurementParams Measurement parameters.
|
||||
* @param fftLength FFT length to use (data will be packed or truncated as necessary)
|
||||
* @param maxDelay Maximum feasible delay between channels.
|
||||
* @return time delay in samples.
|
||||
* @deprecated use ComplexArray for everything. This is now never called.
|
||||
*/
|
||||
public double getDelay(Complex[] f1, Complex[] f2, DelayMeasurementParams delayMeasurementParams, int fftLength, double maxDelay) {
|
||||
/*
|
||||
* Everything ultimately ends up here and we can assume that the complex data are from a waveform and not
|
||||
* an envelope at this point. We may therefore have to both filter the data and take a hilbert transform
|
||||
* in order to satisfy new options in delayMeasurementParams. It's possible that f1 and f2 are references back
|
||||
* into the original spectra which we don't want to mess with.
|
||||
* If it's complex wave data and we want to do the envelope, then we'll also need to Hilbert transform and
|
||||
* then call right back into the correlations call which uses the waveform data, which will end up in a second call back into here
|
||||
* once it's done yet another FFT of the analytic waveform (or it's derivative). What a lot of options !
|
||||
*/
|
||||
|
||||
|
||||
return getDelay(f1, f2, fftLength, maxDelay);
|
||||
}
|
||||
// /**
|
||||
// * Measure the time delay between pulses on two channels. Inputs in this case are the
|
||||
// * spectrum data (most of the cross correlation is done in the frequency domain)
|
||||
// * @param f1 complex spectrum on channel 1
|
||||
// * @param f2 complex spectrum on channel 2
|
||||
// * @param delayMeasurementParams Measurement parameters.
|
||||
// * @param fftLength FFT length to use (data will be packed or truncated as necessary)
|
||||
// * @param maxDelay Maximum feasible delay between channels.
|
||||
// * @return time delay in samples.
|
||||
// * @deprecated use ComplexArray for everything. This is now never called.
|
||||
// */
|
||||
// public double getDelay(Complex[] f1, Complex[] f2, DelayMeasurementParams delayMeasurementParams, int fftLength, double maxDelay) {
|
||||
// /*
|
||||
// * Everything ultimately ends up here and we can assume that the complex data are from a waveform and not
|
||||
// * an envelope at this point. We may therefore have to both filter the data and take a hilbert transform
|
||||
// * in order to satisfy new options in delayMeasurementParams. It's possible that f1 and f2 are references back
|
||||
// * into the original spectra which we don't want to mess with.
|
||||
// * If it's complex wave data and we want to do the envelope, then we'll also need to Hilbert transform and
|
||||
// * then call right back into the correlations call which uses the waveform data, which will end up in a second call back into here
|
||||
// * once it's done yet another FFT of the analytic waveform (or it's derivative). What a lot of options !
|
||||
// */
|
||||
//
|
||||
//
|
||||
// return getDelay(f1, f2, fftLength, maxDelay);
|
||||
// }
|
||||
/**
|
||||
* Measure the time delay between pulses on two channels. Inputs in this case are the
|
||||
* spectrum data (most of the cross correlation is done in the frequency domain)<p>
|
||||
@ -283,19 +283,31 @@ public class Correlations {
|
||||
// complex conjugate of the other
|
||||
// and at the same time, fill in the other half of it.
|
||||
// also normalise it.
|
||||
ComplexArray corrData = f1.conjTimes(f2, binRange).fillConjugateHalf();
|
||||
ComplexArray corrData = f1.conjTimes(f2, binRange);//.fillConjugateHalf();
|
||||
double scale1=0, scale2=0;
|
||||
for (int i = binRange[0]; i < binRange[1]; i++) {
|
||||
scale1 += f1.magsq(i);
|
||||
scale2 += f2.magsq(i);
|
||||
}
|
||||
// now take the inverse FFT ...
|
||||
fastFFT.ifft(corrData, fftLength);
|
||||
double scale = Math.sqrt(scale1*scale2)*2; // double it for negative freq energy not incl in original sums.
|
||||
// fastFFT.ifft(corrData, fftLength);
|
||||
// ComplexArray oldMeth = corrData.fillConjugateHalf();
|
||||
// fastFFT.ifft(oldMeth, fftLength);
|
||||
|
||||
double[] newPeak = getInterpolatedPeak(corrData, scale, maxDelay);
|
||||
double[] xCorr = fastFFT.realInverse(corrData);
|
||||
double scale = Math.sqrt(scale1*scale2)*2; // double it for negative freq energy not incl in original sums.
|
||||
/*
|
||||
* Because scale was worked out in frequency domain, it's fftLength bigger than
|
||||
* the scale would be if the were the magnitude of the wave data. Therefore the
|
||||
* scale will need to be scaled down by fftLength for it to add up right.
|
||||
*/
|
||||
scale /= fftLength;
|
||||
|
||||
// double[] oldPeak = getInterpolatedPeak(oldMeth, scale, maxDelay);
|
||||
double[] newPeak = getInterpolatedPeak(xCorr, scale, maxDelay);
|
||||
correlationValue = newPeak[1];
|
||||
return new TimeDelayData(newPeak[0], newPeak[1]);
|
||||
// System.out.printf("Corrrelation peak height is %6.4f\n", correlationValue);
|
||||
return new TimeDelayData(newPeak[0], correlationValue);
|
||||
}
|
||||
/**
|
||||
* Get the value of the last correlation calculated by
|
||||
@ -460,6 +472,24 @@ public class Correlations {
|
||||
return parabolicPeak;
|
||||
}
|
||||
|
||||
public double[] getInterpolatedPeak(double[] invFFTData, double scale, double maxDelay) {
|
||||
int fftLength = invFFTData.length;
|
||||
int maxCorrLen = (int) (Math.ceil(maxDelay) + 2);
|
||||
maxCorrLen = Math.min(fftLength/2, maxCorrLen);
|
||||
double[] linCorr = new double[maxCorrLen*2];
|
||||
for (int i = 0; i < maxCorrLen; i++) {
|
||||
linCorr[i+maxCorrLen] = invFFTData[i];
|
||||
linCorr[maxCorrLen-1-i] = invFFTData[fftLength-1-i];
|
||||
}
|
||||
double[] parabolicPeak = getInterpolatedPeak(linCorr);
|
||||
parabolicPeak[1] /= scale;
|
||||
parabolicPeak[0] -= maxCorrLen;
|
||||
parabolicPeak[0] = -parabolicPeak[0];
|
||||
parabolicPeak[0] = Math.max(-maxDelay, Math.min(maxDelay, parabolicPeak[0]));
|
||||
lastPeak = parabolicPeak;
|
||||
return parabolicPeak;
|
||||
}
|
||||
|
||||
// public double[] getInterpolatedPeak(ComplexArray complexData, double scale, double minDelay, double maxDelay) {
|
||||
// /**
|
||||
// * Normally, minDelay will be -ve and equal to maxDelay. However, in some special
|
||||
|
@ -583,7 +583,7 @@ public class ComplexArray implements Cloneable, Serializable {
|
||||
*/
|
||||
public ComplexArray fillConjugateHalf() {
|
||||
double[] fData = Arrays.copyOf(data, data.length*2);
|
||||
for (int re1 = 0, im1 = 1, re2 = fData.length-2, im2 = fData.length-1; re1 < data.length; re1+=2, im1+=2, re2-=2, im2-=2) {
|
||||
for (int re1 = 0, im1 = re1+1, re2 = fData.length-2, im2 = re2+1; re1 < data.length; re1+=2, im1+=2, re2-=2, im2-=2) {
|
||||
fData[re2] = data[re1];
|
||||
fData[im2] = -data[im1];
|
||||
}
|
||||
@ -654,4 +654,22 @@ public class ComplexArray implements Cloneable, Serializable {
|
||||
return s;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
String fmt = "%6.3f";
|
||||
return toString(fmt);
|
||||
}
|
||||
|
||||
private String toString(String fmt) {
|
||||
if (data == null || data.length< 2) {
|
||||
return null;
|
||||
}
|
||||
String lFmt = fmt + ", " + fmt;
|
||||
String str = "";
|
||||
for (int i = 0, j = 1; i < data.length; i+=2, j+=2) {
|
||||
str += String.format(lFmt, data[i], data[j]) + "\n";
|
||||
}
|
||||
return str;
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -94,6 +94,7 @@ public class RawDataDisplay implements DisplayPanelProvider {
|
||||
westAxis = new RepeatedAxis(0, 10, 0, 20, -1, 1, true, "", "%2.2g");
|
||||
westAxis.setInterval(1);
|
||||
westAxis.setRepeatCount(nChan);
|
||||
westAxis.setCrampLabels(true);
|
||||
eastAxis = new RepeatedAxis(0, 10, 0, 20, -1, 1, false, "", "%2.2g");
|
||||
eastAxis.setInterval(1);
|
||||
eastAxis.setRepeatCount(nChan);
|
||||
|
@ -409,7 +409,7 @@ public class GenericTOADCalculator implements TOADCalculator, PamSettings {
|
||||
private double[] getSlidingDelay(FFTDataUnit[] fftListA, FFTDataUnit[] fftListB, int startA, int startB) {
|
||||
int halfFFFTLen = fftListA[0].getFftData().length();
|
||||
int fftLength = halfFFFTLen*2;
|
||||
ComplexArray conjArray = new ComplexArray(fftLength);
|
||||
ComplexArray conjArray = new ComplexArray(fftLength/2);
|
||||
double[] conjData = conjArray.getData();
|
||||
double totPowerA=0, totPowerB=0;
|
||||
int nA = fftListA.length;
|
||||
@ -456,16 +456,21 @@ public class GenericTOADCalculator implements TOADCalculator, PamSettings {
|
||||
if (totalOverlap == 0) {
|
||||
return null;
|
||||
}
|
||||
double scale = Math.sqrt(totPowerA*totPowerB)*2/fftLength;
|
||||
/**
|
||||
* Now fill in the second half of the conj array...
|
||||
* Don't do this any more since we're now using the realInverse function, so don't
|
||||
* have to calculate the second half of the input to the inverse FFt. Faster and more accurate.
|
||||
*/
|
||||
double scale = Math.sqrt(totPowerA*totPowerB)*2;
|
||||
for (int re = 0, im = 1, re2=fftLength*2-2, im2 = fftLength*2-1; re < fftLength; re+=2, im+=2, re2-=2, im2-=2) {
|
||||
conjData[re2] = conjData[re];
|
||||
conjData[im2] = -conjData[im];
|
||||
}
|
||||
correlations.getFastFFT().ifft(conjArray, fftLength);
|
||||
double[] delayAndHeight = correlations.getInterpolatedPeak(conjArray, scale, halfFFFTLen);
|
||||
// for (int re = 0, im = 1, re2=fftLength*2-2, im2 = fftLength*2-1; re < fftLength; re+=2, im+=2, re2-=2, im2-=2) {
|
||||
// conjData[re2] = conjData[re];
|
||||
// conjData[im2] = -conjData[im];
|
||||
// }
|
||||
// double[] newDat = Arrays.copyOf(conjData, fftLength/2);
|
||||
// ComplexArray oth = new ComplexArray(newDat);
|
||||
// double[] xCorr = correlations.getFastFFT().realInverse(oth);
|
||||
double[] xCorr = correlations.getFastFFT().realInverse(conjArray);
|
||||
double[] delayAndHeight = correlations.getInterpolatedPeak(xCorr, scale, halfFFFTLen);
|
||||
delayAndHeight = Arrays.copyOf(delayAndHeight, 3);
|
||||
delayAndHeight[2] = totalOverlap;
|
||||
// System.out.printf("Offsets %d, %d, corr %3.3f, overlap %d of %d = %3.1f%%\n",
|
||||
|
@ -12,10 +12,14 @@ import PamguardMVC.PamDataUnit;
|
||||
import PamguardMVC.dataSelector.DataSelector;
|
||||
import PamguardMVC.dataSelector.DataSelectorCreator;
|
||||
import PamguardMVC.datamenus.DataMenuParent;
|
||||
import clipgenerator.localisation.ClipFFTOrganiser;
|
||||
import fftManager.fftorganiser.FFTDataOrganiser;
|
||||
import fftManager.fftorganiser.OrganisedFFTData;
|
||||
|
||||
public class ClipDataBlock extends ClipDisplayDataBlock<ClipDataUnit> {
|
||||
public class ClipDataBlock extends ClipDisplayDataBlock<ClipDataUnit> implements OrganisedFFTData {
|
||||
|
||||
private ClipProcess clipProcess;
|
||||
private ClipFFTOrganiser fftOrganiser;
|
||||
|
||||
public ClipDataBlock(String dataName,
|
||||
ClipProcess clipProcess, int channelMap) {
|
||||
@ -76,4 +80,14 @@ public class ClipDataBlock extends ClipDisplayDataBlock<ClipDataUnit> {
|
||||
// DataSelector newDS = super.getDataSelector("Randomuiname", allowScores, selectorType);
|
||||
return ds;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public FFTDataOrganiser getFFTDataOrganiser() {
|
||||
if (fftOrganiser == null) {
|
||||
fftOrganiser = new ClipFFTOrganiser(clipProcess.getClipControl(), this);
|
||||
}
|
||||
return fftOrganiser;
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -10,6 +10,7 @@ import PamController.PamController;
|
||||
import PamDetection.AbstractLocalisation;
|
||||
import PamDetection.PamDetection;
|
||||
import PamUtils.PamUtils;
|
||||
import PamUtils.complex.ComplexArray;
|
||||
import PamguardMVC.PamConstants;
|
||||
import PamguardMVC.PamDataBlock;
|
||||
import PamguardMVC.PamDataUnit;
|
||||
@ -226,6 +227,44 @@ public class ClipDataUnit extends PamDataUnit<PamDataUnit, SuperDetection> imple
|
||||
}
|
||||
return specData;
|
||||
}
|
||||
/**
|
||||
* Generate complex spectrogram data for the clip.
|
||||
* @param channel
|
||||
* @param fftLength FFT length
|
||||
* @param fftHop FFT hop
|
||||
* @return Array of ComplexArray FFT data (half fft length, due to real input)
|
||||
*/
|
||||
public ComplexArray[] generateSpectrogramArrays(int channel, int fftLength, int fftHop) {
|
||||
// TODO Auto-generated method stub
|
||||
double[] wave = getSpectrogramWaveData(channel, getDisplaySampleRate());
|
||||
if (wave == null) {
|
||||
return null;
|
||||
}
|
||||
int nFFT = (wave.length - (fftLength-fftHop)) / fftHop;
|
||||
if (nFFT <= 0) {
|
||||
return null;
|
||||
}
|
||||
ComplexArray[] specData = new ComplexArray[nFFT];
|
||||
double[] waveBit = new double[fftLength];
|
||||
double[] winFunc = getWindowFunc(fftLength);
|
||||
Complex[] complexOutput = Complex.allocateComplexArray(fftLength/2);
|
||||
int wPos = 0;
|
||||
getFastFFT(fftLength);
|
||||
int m = FastFFT.log2(fftLength);
|
||||
for (int i = 0; i < nFFT; i++) {
|
||||
wPos = i*fftHop;
|
||||
for (int j = 0; j < fftLength; j++) {
|
||||
// waveBit[j] = wave[j+wPos]*winFunc[j];
|
||||
waveBit[j] = wave[j+wPos]; // no windowing for this since used in cross correlation.
|
||||
}
|
||||
specData[i] = fastFFT.rfft(waveBit, fftLength);
|
||||
// fastFFT.rfft(waveBit, complexOutput, m);
|
||||
// for (int j = 0; j < fftLength/2; j++) {
|
||||
// specData[i][j] = complexOutput[j].clone();
|
||||
// }
|
||||
}
|
||||
return specData;
|
||||
}
|
||||
|
||||
protected static FastFFT fastFFT;
|
||||
protected static FastFFT getFastFFT(int fftLength) {
|
||||
|
@ -94,7 +94,7 @@ public class ClipProcess extends SpectrogramMarkProcess {
|
||||
symbolManager.addSymbolOption(StandardSymbolManager.HAS_LINE_AND_LENGTH);
|
||||
clipDataBlock.setPamSymbolManager(symbolManager);
|
||||
addOutputDataBlock(clipDataBlock);
|
||||
clipDelays = new ClipDelays(clipControl);
|
||||
clipDelays = new ClipDelays(clipControl, clipDataBlock);
|
||||
buoyLocaliserManager = new BuoyLocaliserManager();
|
||||
manualAnnotaionHandler = new ManualAnnotationHandler(clipControl, clipDataBlock);
|
||||
clipDataBlock.setAnnotationHandler(manualAnnotaionHandler);
|
||||
@ -505,6 +505,9 @@ public class ClipProcess extends SpectrogramMarkProcess {
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
if (rawDataBlock == null) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
double[][] rawData = null;
|
||||
try {
|
||||
@ -548,6 +551,7 @@ public class ClipProcess extends SpectrogramMarkProcess {
|
||||
clipDataUnit.setTriggerDataUnit(dataUnit);
|
||||
clipDataUnit.setFrequency(dataUnit.getFrequency());
|
||||
lastClipDataUnit = clipDataUnit;
|
||||
clipDataUnit.setParentDataBlock(clipDataBlock);
|
||||
if (bearingLocaliser != null) {
|
||||
localiseClip(clipDataUnit, bearingLocaliser, hydrophoneMap);
|
||||
}
|
||||
|
@ -1,11 +1,19 @@
|
||||
package clipgenerator.localisation;
|
||||
|
||||
import java.util.ArrayList;
|
||||
|
||||
import Array.ArrayManager;
|
||||
import Array.SnapshotGeometry;
|
||||
import Localiser.algorithms.Correlations;
|
||||
import PamguardMVC.PamDataUnit;
|
||||
import PamguardMVC.PamProcess;
|
||||
import PamguardMVC.PamRawDataBlock;
|
||||
import PamguardMVC.toad.GenericTOADCalculator;
|
||||
import clipgenerator.ClipControl;
|
||||
import clipgenerator.ClipDataBlock;
|
||||
import clipgenerator.ClipDataUnit;
|
||||
import fftManager.Complex;
|
||||
import group3dlocaliser.algorithm.toadbase.TOADInformation;
|
||||
|
||||
/**
|
||||
* Class for calculating clip delays when clip has
|
||||
@ -19,84 +27,119 @@ public class ClipDelays {
|
||||
|
||||
private ClipControl clipControl;
|
||||
|
||||
private SpectrogramCorrelator spectrogramCorrelator = new SpectrogramCorrelator();
|
||||
// private SpectrogramCorrelator spectrogramCorrelator = new SpectrogramCorrelator();
|
||||
|
||||
public ClipDelays(ClipControl clipControl) {
|
||||
// private Correlations correlations;
|
||||
private GenericTOADCalculator toadCalculator;
|
||||
|
||||
public ClipDelays(ClipControl clipControl, ClipDataBlock clipDataBlock) {
|
||||
this.clipControl = clipControl;
|
||||
// correlations = new Correlations();
|
||||
toadCalculator = new GenericTOADCalculator(clipControl);
|
||||
toadCalculator.setFftDataOrganiser(new ClipFFTOrganiser(clipControl, clipDataBlock));
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate all the delays from a clip.
|
||||
* Calculate all the delays from a clip. USe the better, more standardised functions.
|
||||
* @param clipDataUnit
|
||||
* @return an array of delays between the different channels.
|
||||
*/
|
||||
public double[] getDelays(ClipDataUnit clipDataUnit) {
|
||||
int nChan = PamUtils.PamUtils.getNumChannels(clipDataUnit.getChannelBitmap());
|
||||
if (nChan <= 1) {
|
||||
double sampleRate = clipControl.getClipDataBlock().getSampleRate();
|
||||
SnapshotGeometry geometry = ArrayManager.getArrayManager().getSnapshotGeometry(clipDataUnit.getHydrophoneBitmap(), clipDataUnit.getTimeMilliseconds());
|
||||
ArrayList<PamDataUnit> clips = new ArrayList(1);
|
||||
clips.add(clipDataUnit);
|
||||
TOADInformation toadInfo = toadCalculator.getTOADInformation(clips, sampleRate, clipDataUnit.getChannelBitmap(), geometry);
|
||||
if (toadInfo == null) {
|
||||
return null;
|
||||
}
|
||||
/**
|
||||
* Check the hydrophone spacings to get the maximum delay
|
||||
*/
|
||||
int hydrophoneMap = clipDataUnit.getChannelBitmap();
|
||||
try {
|
||||
PamRawDataBlock rawDataBlock = clipDataUnit.getParentDataBlock().getRawSourceDataBlock();
|
||||
if (rawDataBlock != null) {
|
||||
hydrophoneMap = rawDataBlock.getChannelListManager().channelIndexesToPhones(hydrophoneMap);
|
||||
double[][] secs = toadInfo.getToadSeconds();
|
||||
int nChan = secs.length;
|
||||
int nDelay = nChan*(nChan-1)/2;
|
||||
double[] delays = new double[nDelay];
|
||||
for (int i = 0, k = 0; i < nChan; i++) {
|
||||
for (int j = i+1; j < nChan; j++, k++) {
|
||||
delays[k] = secs[i][j]*sampleRate;
|
||||
}
|
||||
}
|
||||
catch (Exception e) { }
|
||||
float sampleRate = clipControl.getClipProcess().getSampleRate();
|
||||
double[] maxDelays =spectrogramCorrelator.correlations.getMaxDelays(sampleRate,
|
||||
hydrophoneMap, clipDataUnit.getTimeMilliseconds());
|
||||
|
||||
int fftLength = 512;
|
||||
int fftHop = fftLength/2;
|
||||
StashedSpectrum[] stashedSpectra = new StashedSpectrum[nChan];
|
||||
for (int i = 0; i < nChan; i++) {
|
||||
stashedSpectra[i] = new StashedSpectrum(i, clipDataUnit.generateComplexSpectrogram(i, fftLength, fftHop));
|
||||
}
|
||||
int nDelays = ((nChan-1) * nChan) / 2;
|
||||
double[] delays = new double[nDelays];
|
||||
int iDelay = 0;
|
||||
|
||||
/**
|
||||
* Limit bearing analysis to frequency range of the block.
|
||||
*/
|
||||
int[] analBins = {0, fftLength/2};
|
||||
double[] fRange = clipDataUnit.getFrequency();
|
||||
if (fRange != null) for (int i = 0; i < 2; i++) {
|
||||
analBins[i] = (int) Math.round(fRange[i] * fftLength / sampleRate);
|
||||
analBins[i] = Math.max(0, Math.min(fftLength/2, analBins[i]));
|
||||
}
|
||||
if (analBins[1] == 0) analBins[1] = fftLength/2;
|
||||
|
||||
|
||||
for (int i = 0; i < nChan; i++) {
|
||||
for (int j = i+1; j < nChan; j++) {
|
||||
double[] d = spectrogramCorrelator.getDelay(stashedSpectra[i].specData, stashedSpectra[j].specData,
|
||||
maxDelays[iDelay], analBins);
|
||||
if (d == null) {
|
||||
return null;
|
||||
}
|
||||
delays[iDelay++] = d[0];
|
||||
// System.out.println(String.format("Clip delay channels %d and %d = %3.2f samples, height %3.2f",
|
||||
// i, j, d[0], d[1]));
|
||||
}
|
||||
}
|
||||
|
||||
return delays;
|
||||
}
|
||||
|
||||
private class StashedSpectrum {
|
||||
Complex[][] specData;
|
||||
int iChan;
|
||||
public StashedSpectrum(int iChan, Complex[][] specData) {
|
||||
super();
|
||||
this.iChan = iChan;
|
||||
this.specData = specData;
|
||||
}
|
||||
// /**
|
||||
// * Calculate all the delays from a clip.
|
||||
// * @param clipDataUnit
|
||||
// * @return an array of delays between the different channels.
|
||||
// */
|
||||
// @Deprecated
|
||||
// public double[] getDelays(ClipDataUnit clipDataUnit) {
|
||||
// int nChan = PamUtils.PamUtils.getNumChannels(clipDataUnit.getChannelBitmap());
|
||||
// if (nChan <= 1) {
|
||||
// return null;
|
||||
// }
|
||||
// /**
|
||||
// * Check the hydrophone spacings to get the maximum delay
|
||||
// */
|
||||
// int hydrophoneMap = clipDataUnit.getChannelBitmap();
|
||||
// try {
|
||||
// PamRawDataBlock rawDataBlock = clipDataUnit.getParentDataBlock().getRawSourceDataBlock();
|
||||
// if (rawDataBlock != null) {
|
||||
// hydrophoneMap = rawDataBlock.getChannelListManager().channelIndexesToPhones(hydrophoneMap);
|
||||
// }
|
||||
// }
|
||||
// catch (Exception e) { }
|
||||
// float sampleRate = clipControl.getClipProcess().getSampleRate();
|
||||
// double[] maxDelays =spectrogramCorrelator.correlations.getMaxDelays(sampleRate,
|
||||
// hydrophoneMap, clipDataUnit.getTimeMilliseconds());
|
||||
//
|
||||
// int fftLength = 512;
|
||||
// int fftHop = fftLength/2;
|
||||
// StashedSpectrum[] stashedSpectra = new StashedSpectrum[nChan];
|
||||
// for (int i = 0; i < nChan; i++) {
|
||||
// stashedSpectra[i] = new StashedSpectrum(i, clipDataUnit.generateComplexSpectrogram(i, fftLength, fftHop));
|
||||
// }
|
||||
// int nDelays = ((nChan-1) * nChan) / 2;
|
||||
// double[] delays = new double[nDelays];
|
||||
// int iDelay = 0;
|
||||
//
|
||||
// /**
|
||||
// * Limit bearing analysis to frequency range of the block.
|
||||
// */
|
||||
// int[] analBins = {0, fftLength/2};
|
||||
// double[] fRange = clipDataUnit.getFrequency();
|
||||
// if (fRange != null) for (int i = 0; i < 2; i++) {
|
||||
// analBins[i] = (int) Math.round(fRange[i] * fftLength / sampleRate);
|
||||
// analBins[i] = Math.max(0, Math.min(fftLength/2, analBins[i]));
|
||||
// }
|
||||
// if (analBins[1] == 0) analBins[1] = fftLength/2;
|
||||
//
|
||||
//
|
||||
// for (int i = 0; i < nChan; i++) {
|
||||
// for (int j = i+1; j < nChan; j++) {
|
||||
// double[] d = spectrogramCorrelator.getDelay(stashedSpectra[i].specData, stashedSpectra[j].specData,
|
||||
// maxDelays[iDelay], analBins);
|
||||
// if (d == null) {
|
||||
// return null;
|
||||
// }
|
||||
// delays[iDelay++] = d[0];
|
||||
//// System.out.println(String.format("Clip delay channels %d and %d = %3.2f samples, height %3.2f",
|
||||
//// i, j, d[0], d[1]));
|
||||
// }
|
||||
// }
|
||||
//
|
||||
//
|
||||
// return delays;
|
||||
// }
|
||||
|
||||
}
|
||||
|
||||
// private class StashedSpectrum {
|
||||
// Complex[][] specData;
|
||||
// int iChan;
|
||||
// public StashedSpectrum(int iChan, Complex[][] specData) {
|
||||
// super();
|
||||
// this.iChan = iChan;
|
||||
// this.specData = specData;
|
||||
// }
|
||||
//
|
||||
// }
|
||||
|
||||
}
|
||||
|
55
src/clipgenerator/localisation/ClipFFTOrganiser.java
Normal file
55
src/clipgenerator/localisation/ClipFFTOrganiser.java
Normal file
@ -0,0 +1,55 @@
|
||||
package clipgenerator.localisation;
|
||||
|
||||
import clipgenerator.ClipControl;
|
||||
import clipgenerator.ClipDataBlock;
|
||||
import fftManager.fftorganiser.FFTDataOrganiser;
|
||||
import fftManager.fftorganiser.FFTInputTypes;
|
||||
|
||||
public class ClipFFTOrganiser extends FFTDataOrganiser {
|
||||
|
||||
public ClipFFTOrganiser(ClipControl clipControl, ClipDataBlock clipDataBlock) {
|
||||
super(clipControl);
|
||||
setInput(clipDataBlock, FFTInputTypes.RAWDataHolder);
|
||||
setFftLength(512);
|
||||
setFftHop(256);
|
||||
}
|
||||
|
||||
// @Override
|
||||
// public FFTDataList createFFTDataList(PamDataUnit pamDataUnit, double sampleRate, int channelMap)
|
||||
// throws FFTDataException {
|
||||
// ClipDataUnit clip = (ClipDataUnit) pamDataUnit;
|
||||
// double[][] wave = clip.getRawData();
|
||||
// int nChan = wave.length;
|
||||
// if (wave == null || wave.length == 0) {
|
||||
// return null;
|
||||
// }
|
||||
// int samples = wave[0].length;
|
||||
// int fftLength = getFftLength();
|
||||
// int fftHop = getFftHop();
|
||||
// int nFFT = (samples - (fftLength-fftHop)) / fftHop;
|
||||
// if (nFFT <= 0) {
|
||||
// return null;
|
||||
// }
|
||||
// ComplexArray[] specData = new ComplexArray[nFFT];
|
||||
// double[] waveBit = new double[fftLength];
|
||||
//// double[] winFunc = getWindowFunc(fftLength);
|
||||
// Complex[] complexOutput = Complex.allocateComplexArray(fftLength/2);
|
||||
// int wPos = 0;
|
||||
// int m = FastFFT.log2(fftLength);
|
||||
// for (int i = 0; i < nFFT; i++) {
|
||||
// wPos = i*fftHop;
|
||||
// for (int j = 0; j < fftLength; j++) {
|
||||
//// waveBit[j] = wave[j+wPos]*winFunc[j];
|
||||
// waveBit[j] = wave[j+wPos]; // no windowing for this since used in cross correlation.
|
||||
// }
|
||||
// specData[i] = fastFFT.rfft(waveBit, fftLength);
|
||||
//// fastFFT.rfft(waveBit, complexOutput, m);
|
||||
//// for (int j = 0; j < fftLength/2; j++) {
|
||||
//// specData[i][j] = complexOutput[j].clone();
|
||||
//// }
|
||||
// }
|
||||
// return specData;
|
||||
//
|
||||
// }
|
||||
|
||||
}
|
@ -1,130 +0,0 @@
|
||||
package clipgenerator.localisation;
|
||||
|
||||
import Localiser.algorithms.Correlations;
|
||||
import fftManager.Complex;
|
||||
import fftManager.FastFFT;
|
||||
|
||||
/**
|
||||
* Class with some functions for calculating cross correlations
|
||||
* between lumps of spectrogram.
|
||||
* @author Doug Gillespie
|
||||
*
|
||||
*/
|
||||
public class SpectrogramCorrelator {
|
||||
|
||||
protected Correlations correlations = new Correlations();
|
||||
|
||||
private FastFFT fastFFT = new FastFFT();
|
||||
|
||||
public SpectrogramCorrelator() {
|
||||
// TODO Auto-generated constructor stub
|
||||
}
|
||||
|
||||
/**
|
||||
* Find the maxima in the cross correlation function from
|
||||
* two lumps of spectrogram data. Note that the spectrogram
|
||||
* data will only go to half the original FFT length.
|
||||
* @param specData1 block of complex spectrogram data
|
||||
* @param specData2 second block of complex spectrogram data
|
||||
* @return cross correlation position and value.
|
||||
*/
|
||||
public double[] getDelay(Complex[][] specData1, Complex[][] specData2) {
|
||||
if (specData1 == null || specData1.length == 0) {
|
||||
return null;
|
||||
}
|
||||
return getDelay(specData1, specData2, specData1[0].length);
|
||||
}
|
||||
/**
|
||||
* Find the maxima in the cross correlation function from
|
||||
* two lumps of spectrogram data. Note that the spectrogram
|
||||
* data will only go to half the original FFT length.
|
||||
* @param specData1 block of complex spectrogram data
|
||||
* @param specData2 second block of complex spectrogram data
|
||||
* @param max possible delay maximum possible delay between channels.
|
||||
* @return cross correlation position and value.
|
||||
*/
|
||||
public double[] getDelay(Complex[][] specData1, Complex[][] specData2, double maxDelay) {
|
||||
if (specData1 == null || specData1[0] == null) {
|
||||
return null;
|
||||
}
|
||||
int[] fBins = {0, specData1[0].length};
|
||||
return getDelay(specData1, specData2, maxDelay, fBins);
|
||||
}
|
||||
/**
|
||||
* Find the maxima in the cross correlation function from
|
||||
* two lumps of spectrogram data. Note that the spectrogram
|
||||
* data will only go to half the original FFT length.
|
||||
* @param specData1 block of complex spectrogram data
|
||||
* @param specData2 second block of complex spectrogram data
|
||||
* @param analBins frequency bin range for the analysis
|
||||
* @param max possible delay maximum possible delay between channels.
|
||||
* @return cross correlation position and value.
|
||||
*/
|
||||
public double[] getDelay(Complex[][] specData1, Complex[][] specData2, double maxDelay, int[] analBins) {
|
||||
|
||||
if (specData1 == null || specData2 == null) {
|
||||
return null;
|
||||
}
|
||||
if (specData1.length != specData2.length) {
|
||||
return null;
|
||||
}
|
||||
if (specData1.length == 0 || specData1[0].length != specData2[0].length) {
|
||||
return null;
|
||||
}
|
||||
|
||||
int nSlice = specData1.length;
|
||||
int halfFFT = specData1[0].length;
|
||||
int fftLength = halfFFT * 2;
|
||||
|
||||
if (analBins == null) {
|
||||
int[] ab = {0, halfFFT};
|
||||
analBins = ab;
|
||||
}
|
||||
|
||||
Complex[] conData = Complex.allocateComplexArray(halfFFT*2);
|
||||
Complex c1, c2;
|
||||
double scale1 = 0, scale2 = 0;
|
||||
// (a + ib)*(c-id) = a*c*b*d + i(b*c-a*d)
|
||||
for (int iSlice = 0; iSlice < nSlice; iSlice++) {
|
||||
// for (int iSlice = nSlice/2; iSlice < nSlice; iSlice++) {
|
||||
scale1 = scale2 = 0;
|
||||
for (int i = 0; i < halfFFT; i++) {
|
||||
c1 = specData1[iSlice][i];
|
||||
c2 = specData2[iSlice][i];
|
||||
scale1 += c1.magsq();
|
||||
scale2 += c2.magsq();
|
||||
}
|
||||
scale1 = Math.sqrt(scale1*scale2)*2;
|
||||
for (int i = analBins[0]; i < analBins[1]; i++) {
|
||||
c1 = specData1[iSlice][i];
|
||||
c2 = specData2[iSlice][i];
|
||||
conData[i].real += (c1.real*c2.real + c1.imag*c2.imag)/scale1;
|
||||
conData[i].imag += (c1.imag*c2.real - c1.real*c2.imag)/scale1;
|
||||
// realData2[iSlice][i] = c1.magsq();
|
||||
}
|
||||
// break;
|
||||
}
|
||||
// now fill in the back half of the fft data prior to taking the inverse fft
|
||||
conData[0].assign(0,0);
|
||||
for (int i = 0; i < halfFFT-1; i++) {
|
||||
conData[fftLength-i-1].real = conData[i].real;
|
||||
conData[fftLength-i-1].imag = -conData[i].imag;
|
||||
}
|
||||
// now take the ifft to get the cross correlation function.
|
||||
int m = FastFFT.log2(fftLength);
|
||||
fastFFT.ifft(conData, m);
|
||||
|
||||
double xCorrResult[] = correlations.getInterpolatedPeak(conData, 1, maxDelay);
|
||||
|
||||
return xCorrResult;
|
||||
//
|
||||
// double[] crossX = new double[fftLength];
|
||||
// for (int i = 0; i < halfFFT; i++) {
|
||||
// crossX[i+halfFFT] = conData[i].real;
|
||||
// crossX[i] = conData[i+halfFFT].real;
|
||||
// }
|
||||
//
|
||||
// return crossX;
|
||||
}
|
||||
|
||||
}
|
@ -208,6 +208,30 @@ public class FastFFT {
|
||||
ifft( x, n, false);
|
||||
}
|
||||
|
||||
/**
|
||||
* Inverse transform of what was real data. Will automatically
|
||||
* assume a second half complex conj of the first half. This can
|
||||
* be used to transform back data used in cross correlations rather
|
||||
* than trying to fill in the second half of the conjugate, which is not
|
||||
* really possible with the missing middle point of the FFT.
|
||||
* @param x Complex input data, which originally came from real.
|
||||
* @return real data array 2x the length of the Complex input (i.e. same length as
|
||||
* double array within complex input).
|
||||
*/
|
||||
public synchronized double[] realInverse(ComplexArray x) {
|
||||
double[] data = x.getData().clone();
|
||||
int n = data.length; // i.e. 2* the number of complex numbers.
|
||||
/*
|
||||
* Check the transform has been created with the right fft length
|
||||
*/
|
||||
if (doubleFFT_1D == null || transformSize != n) {
|
||||
doubleFFT_1D = new DoubleFFT_1D(transformSize = n);
|
||||
}
|
||||
|
||||
doubleFFT_1D.realInverse(data, true);
|
||||
return data;
|
||||
}
|
||||
|
||||
/**
|
||||
* Inverse FFT for Complex data.
|
||||
* <br> I FFT is performed 'in place' so data are overwritten
|
||||
|
@ -17,6 +17,35 @@ public class FftTest {
|
||||
* @param args
|
||||
*/
|
||||
public static void main(String[] args) {
|
||||
new FftTest().conjugateTest();
|
||||
}
|
||||
|
||||
private void conjugateTest() {
|
||||
// check exactly how it does complex conj of real data.
|
||||
FastFFT fastFFT = new FastFFT();
|
||||
int n = 8;
|
||||
double[] data = new double[n];
|
||||
double totSq = 0;
|
||||
for (int i = 0; i < n; i++) {
|
||||
data[i] = Math.random();
|
||||
totSq += Math.pow(data[i],2);
|
||||
}
|
||||
ComplexArray halfDat = fastFFT.rfft(data, n);
|
||||
double fTot = 0;
|
||||
for (int i = 0; i < halfDat.length(); i++) {
|
||||
fTot += halfDat.magsq(i);
|
||||
}
|
||||
System.out.printf("Total %3.3f, %3.3f ratio %3.3f\n", totSq, fTot, fTot/totSq);
|
||||
|
||||
double[] fullDat = data.clone();
|
||||
ComplexArray other = fastFFT.rfftFull(fullDat, n);
|
||||
double[] reverse = fastFFT.realInverse(halfDat);
|
||||
|
||||
System.out.println(halfDat.toString());
|
||||
System.out.println(other.toString());
|
||||
|
||||
}
|
||||
private void speedTest() {
|
||||
int nTrial = 100000;
|
||||
int fftLen = 2000;
|
||||
long startTime, endTime;
|
||||
@ -34,18 +63,20 @@ public class FftTest {
|
||||
|
||||
Filter filter = new IirfFilter(0, sampleRate, filterParams);
|
||||
|
||||
try {
|
||||
if (args.length > 0) {
|
||||
nTrial = Integer.valueOf(args[0]);
|
||||
}
|
||||
if (args.length > 1) {
|
||||
fftLen = Integer.valueOf(args[1]);
|
||||
}
|
||||
}
|
||||
catch(NumberFormatException e) {
|
||||
e.printStackTrace();
|
||||
return;
|
||||
}
|
||||
// try {
|
||||
// if (args.length > 0) {
|
||||
// nTrial = Integer.valueOf(args[0]);
|
||||
// }
|
||||
// if (args.length > 1) {
|
||||
// fftLen = Integer.valueOf(args[1]);
|
||||
// }
|
||||
// }
|
||||
nTrial = 10;
|
||||
fftLen = 512;
|
||||
// catch(NumberFormatException e) {
|
||||
// e.printStackTrace();
|
||||
// return;
|
||||
// }
|
||||
|
||||
int m = PamUtils.log2(fftLen);
|
||||
|
||||
|
@ -141,9 +141,9 @@ public class FFTDataOrganiser {
|
||||
* @see FFTInputTypes
|
||||
*/
|
||||
public FFTDataList createFFTDataList(PamDataUnit pamDataUnit, double sampleRate, int channelMap) throws FFTDataException {
|
||||
if (rawOrFFTData == null) {
|
||||
return null;
|
||||
}
|
||||
// if (rawOrFFTData == null) {
|
||||
// return null;
|
||||
// }
|
||||
|
||||
// if (PamController.getInstance().getRunMode() == PamController.RUN_PAMVIEW) {
|
||||
// checkOfflineDataLoad(rawOrFFTData, pamDataUnit.getTimeMilliseconds(), pamDataUnit.getEndTimeInMilliseconds());
|
||||
|
@ -19,5 +19,16 @@ public class IshmaelDataUnit extends PamDataUnit {
|
||||
this.ishmaelData = ishmaelData;
|
||||
}
|
||||
|
||||
@Override
|
||||
public double[] getFrequency() {
|
||||
if (ishmaelData.lofreq == 0 && ishmaelData.hifreq == 0) {
|
||||
return null;
|
||||
}
|
||||
double[] freq = new double[2];
|
||||
freq[0] = ishmaelData.lofreq;
|
||||
freq[1] = ishmaelData.hifreq;
|
||||
return freq;
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
@ -184,8 +184,8 @@ public class WhistleDelays {
|
||||
}
|
||||
|
||||
private void clear(int fftLength) {
|
||||
if (complexData == null || complexData.length() != fftLength) {
|
||||
complexData = new ComplexArray(fftLength);
|
||||
if (complexData == null || complexData.length() != fftLength/2) {
|
||||
complexData = new ComplexArray(fftLength/2);
|
||||
this.fftLength = fftLength;
|
||||
}
|
||||
else {
|
||||
@ -216,21 +216,21 @@ public class WhistleDelays {
|
||||
}
|
||||
|
||||
private double getDelay(double maxDelay) {
|
||||
int i2;
|
||||
for (int i = 0; i < fftLength / 2; i++) {
|
||||
i2 = fftLength - 1 - i;
|
||||
complexData.setReal(i2, complexData.getReal(i));
|
||||
complexData.setImag(i2, -complexData.getImag(i));
|
||||
}
|
||||
fft.ifft(complexData, fftLength);
|
||||
double scale = Math.sqrt(scale1*scale2)*2;
|
||||
ComplexArray bearingData = complexData;
|
||||
// double maxInd = -getPeakPos(bearingData);
|
||||
double[] corrPeak = correlations.getInterpolatedPeak(complexData, scale, maxDelay);
|
||||
// int i2;
|
||||
// for (int i = 0; i < fftLength / 2; i++) {
|
||||
// i2 = fftLength - 1 - i;
|
||||
// complexData.setReal(i2, complexData.getReal(i));
|
||||
// complexData.setImag(i2, -complexData.getImag(i));
|
||||
// }
|
||||
// fft.ifft(complexData, fftLength);
|
||||
// ComplexArray bearingData = complexData;
|
||||
// double[] corrPeak = correlations.getInterpolatedPeak(complexData, scale, maxDelay);
|
||||
// swap to use more efficient inverse FFT that assumes conjugate. Faster and more accurate.
|
||||
double scale = Math.sqrt(scale1*scale2)*2/fftLength;
|
||||
double[] xCorr = fft.realInverse(complexData);
|
||||
double[] corrPeak = correlations.getInterpolatedPeak(xCorr, scale, maxDelay);
|
||||
return corrPeak[0];
|
||||
|
||||
|
||||
// return maxInd;
|
||||
}
|
||||
|
||||
private double getPeakPos(Complex[] bearingData) {
|
||||
|
Loading…
Reference in New Issue
Block a user