Merge from DG (#150)

* Localization output

* update localiser output

* Start effort management system

* Start of Effort plotting

Strat of effort plotting on map. Framework for using Effort data in other areas (such as Tethys output).

* Logger forms update

Effort and Symbol selectors working with Logger forms. Also functions to add, edit and delete form rows in Viewer mode.

* Update LoggerFormGraphics.java

add in correct symbol managemet to forms graphics.

* Effort lines on map

Sort of working OK in real time mode.

* Working effort system

Currently only for map, but seems to work OK

* Update Tethys to latest nilus schema

* Raven importer

Start of a system for a raven importer. Not quite working yet.

* Raven import

Basic functionality working. Not nice to use though.

* Tethys Localization work

Abstracting out writing of localization objects and document header information so that individual localisers can give fine scale control of this stuff.

* updated Nilus

A few updates around track and target motion measures.

* FX Plot for raven data

Also sorted out symbols a bit and improved symbol selector in Generic plots.

* Update spectrogram mark bearing display

Remove the 90-angle bit

* Raven extra columns

Logging of data from additional Raven table columns
This commit is contained in:
Douglas Gillespie 2024-08-23 09:15:40 +01:00 committed by GitHub
parent cb1b28423e
commit 34ba7ebceb
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
10 changed files with 339 additions and 39 deletions

View File

@ -11,9 +11,9 @@ org.eclipse.jdt.core.codeComplete.staticFinalFieldPrefixes=
org.eclipse.jdt.core.codeComplete.staticFinalFieldSuffixes=
org.eclipse.jdt.core.compiler.codegen.inlineJsrBytecode=enabled
org.eclipse.jdt.core.compiler.codegen.methodParameters=do not generate
org.eclipse.jdt.core.compiler.codegen.targetPlatform=17
org.eclipse.jdt.core.compiler.codegen.targetPlatform=19
org.eclipse.jdt.core.compiler.codegen.unusedLocal=preserve
org.eclipse.jdt.core.compiler.compliance=17
org.eclipse.jdt.core.compiler.compliance=19
org.eclipse.jdt.core.compiler.debug.lineNumber=generate
org.eclipse.jdt.core.compiler.debug.localVariable=generate
org.eclipse.jdt.core.compiler.debug.sourceFile=generate
@ -23,4 +23,4 @@ org.eclipse.jdt.core.compiler.problem.enumIdentifier=error
org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning
org.eclipse.jdt.core.compiler.problem.reportPreviewFeatures=warning
org.eclipse.jdt.core.compiler.release=enabled
org.eclipse.jdt.core.compiler.source=17
org.eclipse.jdt.core.compiler.source=19

View File

@ -402,10 +402,10 @@ public class EmptyTableDefinition implements Cloneable {
protected EmptyTableDefinition clone() {
try {
EmptyTableDefinition clone = (EmptyTableDefinition) super.clone();
clone.pamTableItems = new ArrayList<>();
for (PamTableItem tableItem : this.pamTableItems) {
clone.pamTableItems.add(tableItem.clone());
}
// clone.pamTableItems = new ArrayList<>();
// for (PamTableItem tableItem : this.pamTableItems) {
// clone.pamTableItems.add(tableItem.clone());
// }
return clone;
} catch (CloneNotSupportedException e) {
e.printStackTrace();

View File

@ -0,0 +1,35 @@
package ravendata;
import java.io.Serializable;
/**
* Information on a Raven table non standard column.
* @author dg50
*
*/
public class RavenColumnInfo implements Serializable {
private static final long serialVersionUID = 1L;
public int ravenTableIndex;
public String name;
public int maxStrLength;
public Integer sqlType;
public RavenColumnInfo(int ravenTableIndex, String name) {
super();
this.ravenTableIndex = ravenTableIndex;
this.name = name;
}
public RavenColumnInfo(String name, int maxStrLength, Integer sqlType) {
super();
this.name = name;
this.maxStrLength = maxStrLength;
this.sqlType = sqlType;
}
}

View File

@ -9,16 +9,11 @@ import java.util.ArrayList;
import javax.swing.JMenuItem;
import Acquisition.AcquisitionControl;
import PamController.InputStoreInfo;
import PamController.PamConfiguration;
import PamController.PamControlledUnit;
import PamController.PamControlledUnitSettings;
import PamController.PamController;
import PamController.PamSettingManager;
import PamController.PamSettings;
import PamView.dialog.warn.WarnOnce;
import pamViewFX.PamSettingsMenuPane;
import ravendata.swing.RavenImportDialog;
/**
@ -40,6 +35,9 @@ public class RavenControl extends PamControlledUnit implements PamSettings {
this.ravenProcess = new RavenProcess(this);
addPamProcess(ravenProcess);
PamSettingManager.getInstance().registerSettings(this);
if (isViewer()) {
ravenProcess.getRavenLogging().addExtraColumns(ravenParameters.getExtraColumns());
}
}
public RavenControl(PamConfiguration pamConfiguration, String unitType, String unitName) {
@ -77,11 +75,29 @@ public class RavenControl extends PamControlledUnit implements PamSettings {
e.printStackTrace();
return;
}
sortExtraColumns(fileReader, ravenData);
if (ravenData != null) {
ravenProcess.createPAMGuardData(fileReader, ravenData);
}
}
/**
* Check some information on column types.
* @param fileReader
* @param ravenData
*/
private void sortExtraColumns(RavenFileReader fileReader, ArrayList<RavenDataRow> ravenData) {
/*
* primarily need to add the extra columns to the SQL interface to make sure it's going to
* save the additional data.
* Also put them into the module parameters.
*/
ravenParameters.setExtraColumns(fileReader.getExtraColumns());
ravenProcess.getRavenLogging().addExtraColumns(fileReader.getExtraColumns());
}
@Override

View File

@ -1,5 +1,8 @@
package ravendata;
import java.util.ArrayList;
import java.util.HashMap;
public class RavenDataRow {
private int iRow;
@ -11,15 +14,16 @@ public class RavenDataRow {
private double endT;
private double f1;
private double f2;
private int[] dataIndexes;
// private int[] dataIndexes;
private boolean unpackOK;
private HashMap<String, String> extraData = new HashMap<>();
// data on a row of raven data from a table.
public RavenDataRow(int iRow, String[] data, int[] dataIndexes) {
public RavenDataRow(RavenFileReader ravenReader, int iRow, String[] data) {
this.iRow = iRow;
this.data = data;
this.dataIndexes = dataIndexes;
unpackOK = unpackRow(dataIndexes);
// this.dataIndexes = dataIndexes;
unpackOK = unpackRow(ravenReader);
}
/**
@ -74,10 +78,12 @@ public class RavenDataRow {
/**
* Unpack the row into more useful columns using the column indexes.
* @param mainIndexes
* @param ravenReader
* @return
*/
private boolean unpackRow(int[] mainIndexes) {
private boolean unpackRow(RavenFileReader ravenReader) {
int[] mainIndexes = ravenReader.getMainIndexes();
ArrayList<RavenColumnInfo> extraColumns = ravenReader.getExtraColumns();
try {
selection = getInteger(mainIndexes[0]);
view = getString(mainIndexes[1]);
@ -86,6 +92,18 @@ public class RavenDataRow {
endT = getDouble(mainIndexes[4]);
f1 = getDouble(mainIndexes[5]);
f2 = getDouble(mainIndexes[6]);
// and add all the extra data into a HashMap
if (extraColumns == null) {
return true;
}
for (RavenColumnInfo col : extraColumns) {
String data = getString(col.ravenTableIndex);
if (data != null) {
col.maxStrLength = Math.max(col.maxStrLength, data.length());
extraData.put(col.name, data);
}
}
}
catch (Exception e) {
return false;
@ -93,6 +111,18 @@ public class RavenDataRow {
return true;
}
/**
* Get data from the extras map.
* @param name column name.
* @return
*/
public String getExtraData(String name) {
if (extraData == null) {
return null;
}
return extraData.get(name);
}
/**
* @return the selection
*/
@ -142,17 +172,11 @@ public class RavenDataRow {
return f2;
}
/**
* @return the dataIndexes
*/
protected int[] getDataIndexes() {
return dataIndexes;
}
/**
* @return the unpackOK
*/
protected boolean isUnpackOK() {
public boolean isUnpackOK() {
return unpackOK;
}
@ -165,4 +189,8 @@ public class RavenDataRow {
return this.channel == oth.channel && this.beginT == oth.beginT && this.endT == oth.endT && this.f1 == oth.f1 && this.f2 == oth.f2;
}
public HashMap<String, String> getExtraData() {
return extraData;
}
}

View File

@ -1,5 +1,9 @@
package ravendata;
import java.util.HashMap;
import java.util.Map.Entry;
import java.util.Set;
import PamDetection.PamDetection;
import PamguardMVC.AcousticDataUnit;
import PamguardMVC.DataUnitBaseData;
@ -7,6 +11,8 @@ import PamguardMVC.PamDataUnit;
public class RavenDataUnit extends PamDataUnit implements AcousticDataUnit, PamDetection {
private HashMap<String, String> extraData;
public RavenDataUnit(long timeMilliseconds, int channelMap, long durationMillis, double f1, double f2) {
super(timeMilliseconds);
setChannelBitmap(channelMap);
@ -15,6 +21,28 @@ public class RavenDataUnit extends PamDataUnit implements AcousticDataUnit, PamD
setFrequency(freq);
}
public HashMap<String, String> getExtraData() {
return extraData;
}
public void setExtraData(HashMap<String, String> extraData) {
this.extraData = extraData;
}
@Override
public String getSummaryString() {
String base = super.getSummaryString();
if (extraData == null) {
return base;
}
Set<Entry<String, String>> entries = extraData.entrySet();
for (Entry<String, String> e : entries) {
base += String.format("<br>%s: %s", e.getKey(), e.getValue());
}
return base;
}
}

View File

@ -5,6 +5,7 @@ import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
import java.sql.Types;
import java.util.ArrayList;
public class RavenFileReader {
@ -13,6 +14,7 @@ public class RavenFileReader {
private File ravenFile;
private ArrayList<String> columnNames;
private BufferedReader fileReader;
private ArrayList<RavenColumnInfo> extraColumns;
private static String[] standardColumns = {"Selection", "View", "Channel", "Begin Time (s)", "End Time (s)", "Low Freq (Hz)", "High Freq (Hz)"};
// private static String[] otherColumns = {"Selection", "View", "Channel", "Begin Time (s)", "End Time (s)", "end", "Low Freq (Hz)", "High Freq (Hz)",
@ -37,6 +39,14 @@ private int columnErrors;
}
}
public ArrayList<RavenColumnInfo> getExtraColumns() {
return extraColumns;
}
public int[] getMainIndexes() {
return mainIndexes;
}
/**
* open the file, read the file header and get a list of column names.
* @throws IOException
@ -64,17 +74,107 @@ private int columnErrors;
break;
}
String[] split = aLine.split("\t");
RavenDataRow aRow = new RavenDataRow(iRow++, split, mainIndexes);
RavenDataRow aRow = new RavenDataRow(this, iRow++, split);
allData.add(aRow);
}
checkExtraColumns(allData);
return allData;
}
private void checkExtraColumns(ArrayList<RavenDataRow> allData) {
if (extraColumns == null) {
return;
}
for (int i = 0; i < extraColumns.size(); i++) {
RavenColumnInfo col = extraColumns.get(i);
boolean allNull = isAllNull(col.name, allData);
if (allNull) {
continue;
}
boolean allInt = isAllInt(col.name, allData);
if (allInt) {
col.sqlType = Types.INTEGER;
continue;
}
boolean allDouble = isAllDouble(col.name, allData);
if (allDouble) {
col.sqlType = Types.DOUBLE;
continue;
}
// otherwise keep as string
col.sqlType = Types.CHAR;
}
}
/**
* Is data in every column null ?
* @param colName
* @param allData
* @return
*/
private boolean isAllNull(String colName, ArrayList<RavenDataRow> allData) {
for (RavenDataRow aRow : allData) {
String xData = aRow.getExtraData(colName);
if (xData != null && xData.length() > 0) {
return false;
}
}
return true;
}
/**
* Is data in every column either integer or null ?
* @param colName
* @param allData
* @return
*/
private boolean isAllInt(String colName, ArrayList<RavenDataRow> allData) {
for (RavenDataRow aRow : allData) {
String xData = aRow.getExtraData(colName);
if (xData == null || xData.length() == 0) {
continue;
}
try {
int val = Integer.valueOf(xData);
}
catch (NumberFormatException e) {
return false;
}
}
return true;
}
/**
* Is data in every column either integer or null ?
* @param colName
* @param allData
* @return
*/
private boolean isAllDouble(String colName, ArrayList<RavenDataRow> allData) {
for (RavenDataRow aRow : allData) {
String xData = aRow.getExtraData(colName);
if (xData == null || xData.length() == 0) {
continue;
}
try {
double val = Double.valueOf(xData);
}
catch (NumberFormatException e) {
return false;
}
}
return true;
}
/**
* Check we have the correct standard columns.
*/
private int checkColumnNames() {
mainIndexes = new int[standardColumns.length];
boolean[] isStandard = new boolean[columnNames.size()];
columnErrors = 0;
for (int i = 0; i < standardColumns.length; i++) {
mainIndexes[i] = columnNames.indexOf(standardColumns[i]);
@ -82,7 +182,18 @@ private int columnErrors;
System.out.printf("Raven error: Unable to find column \"%s\" in data table\n", standardColumns[i]);
columnErrors++;
}
else {
isStandard[mainIndexes[i]] = true;
}
}
// now find any extra columns hidden away in there.
extraColumns = new ArrayList<>();
for (int i = 0; i < columnNames.size(); i++) {
if (isStandard[i] == false) {
extraColumns.add(new RavenColumnInfo(i, columnNames.get(i)));
}
}
return columnErrors;
}

View File

@ -1,18 +1,22 @@
package ravendata;
import java.sql.Types;
import java.util.ArrayList;
import java.util.HashMap;
import PamDetection.AcousticSQLLogging;
import PamguardMVC.PamDataBlock;
import PamguardMVC.PamDataUnit;
import generalDatabase.DBControlUnit;
import generalDatabase.PamTableDefinition;
import generalDatabase.PamTableItem;
import generalDatabase.SQLLogging;
import generalDatabase.SQLTypes;
public class RavenLogging extends AcousticSQLLogging {
private RavenDataBlock ravenDataBlock;
private RavenControl ravenControl;
private PamTableItem f1;
private ArrayList<RavenColumnInfo> usedExtraColumns;
private PamTableItem[] extraItems;
public RavenLogging(RavenControl ravenControl, RavenDataBlock pamDataBlock) {
super(pamDataBlock, ravenControl.getUnitName());
@ -21,9 +25,60 @@ public class RavenLogging extends AcousticSQLLogging {
}
@Override
protected PamDataUnit createDataUnit(SQLTypes sqlTypes, long timeMilliseconds, int chanMap, long duration,
protected RavenDataUnit createDataUnit(SQLTypes sqlTypes, long timeMilliseconds, int chanMap, long duration,
double[] f) {
return new RavenDataUnit(timeMilliseconds, chanMap, duration, f[0], f[1]);
RavenDataUnit ravenDataUnit = new RavenDataUnit(timeMilliseconds, chanMap, duration, f[0], f[1]);
if (extraItems == null || usedExtraColumns == null) {
return ravenDataUnit;
}
HashMap<String, String> extraData = new HashMap<>();
for (int i = 0; i < usedExtraColumns.size(); i++) {
String data = extraItems[i].getDeblankedStringValue();
if (data != null && data.length() > 0) {
extraData.put(usedExtraColumns.get(i).name, data);
}
}
ravenDataUnit.setExtraData(extraData);
return ravenDataUnit;
}
public void addExtraColumns(ArrayList<RavenColumnInfo> extraColumns) {
this.usedExtraColumns = new ArrayList<RavenColumnInfo>();
PamTableDefinition baseTable = getBaseTableDefinition();
for (int i = 0; i < extraColumns.size(); i++) {
if (extraColumns.get(i).sqlType == null) {
continue;
}
usedExtraColumns.add(extraColumns.get(i));
}
extraItems = new PamTableItem[usedExtraColumns.size()];
for (int i = 0; i < usedExtraColumns.size(); i++) {
RavenColumnInfo col = usedExtraColumns.get(i);
extraItems[i] = new PamTableItem(col.name, Types.CHAR, Math.max(col.maxStrLength,30));
baseTable.addTableItem(extraItems[i]);
}
setTableDefinition(baseTable);
DBControlUnit dbc = DBControlUnit.findDatabaseControl();
if (dbc != null) {
dbc.getDbProcess().checkTable(baseTable);
}
}
@Override
public void setTableData(SQLTypes sqlTypes, PamDataUnit pamDataUnit) {
super.setTableData(sqlTypes, pamDataUnit);
// and do the extras
RavenDataUnit ravenDataUnit = (RavenDataUnit) pamDataUnit;
HashMap<String, String> extraData = ravenDataUnit.getExtraData();
if (extraData == null || extraItems == null || usedExtraColumns == null) {
return;
}
for (int i = 0; i < usedExtraColumns.size(); i++) {
String data = extraData.get(usedExtraColumns.get(i).name);
extraItems[i].setValue(data);
}
}

View File

@ -1,10 +1,26 @@
package ravendata;
import java.io.Serializable;
import java.util.ArrayList;
public class RavenParameters implements Serializable, Cloneable {
public static final long serialVersionUID = 1L;
public String importFile;
private ArrayList<RavenColumnInfo> extraColumns;
public ArrayList<RavenColumnInfo> getExtraColumns() {
if (extraColumns == null) {
extraColumns = new ArrayList<>();
}
return extraColumns;
}
public void setExtraColumns(ArrayList<RavenColumnInfo> extraColumns) {
this.extraColumns = extraColumns;
}
}

View File

@ -43,14 +43,17 @@ public class RavenProcess extends PamProcess {
@Override
public void pamStart() {
// TODO Auto-generated method stub
}
@Override
public void pamStop() {
// TODO Auto-generated method stub
}
public RavenLogging getRavenLogging() {
return ravenLogging;
}
protected void createPAMGuardData(RavenFileReader fileReader, ArrayList<RavenDataRow> ravenData) {
/**
* Need to find the acquisition module and then get detailed times of every file, not just
@ -84,6 +87,12 @@ public class RavenProcess extends PamProcess {
ravenDataBlock.clearAll();
ravenLogging.deleteData(0, System.currentTimeMillis()*2);
/**
* Had to add an offset for some messed up Raven data. May or may not have to include
* this as an option in future releases.
*/
long offsetMillis = 0;//2843100;
RavenDataRow prevRow = null;
for (RavenDataRow ravenRow : ravenData) {
if (ravenRow.equals(prevRow) == false) {
@ -91,17 +100,19 @@ public class RavenProcess extends PamProcess {
* A lot of Raven data appear twice, with different view values.
* No need to import both. so only doing this if they are different.
*/
int fileInd = getTimeIndex(ravenRow.getBeginT()*1000, absTime);
double ravenStart = ravenRow.getBeginT()*1000 + offsetMillis;
int fileInd = getTimeIndex(ravenStart, absTime);
if (fileInd == absTime.length) {
String msg = String.format("Data at time %6.4f is beyond the end of available sound file data", ravenRow.getBeginT());
WarnOnce.showWarning("Error importing RAVEN data", msg, WarnOnce.WARNING_MESSAGE);
break;
}
long fileStart = fileStarts[fileInd];
long absStart = fileStart + (long) (ravenRow.getBeginT()*1000.)-absTime[fileInd];
long absStart = fileStart + (long) (ravenStart)-absTime[fileInd];
long duration = (long) ((ravenRow.getEndT()-ravenRow.getBeginT())*1000.);
int chanMap = 1<<(ravenRow.getChannel()-1);
RavenDataUnit rdu = new RavenDataUnit(absStart, chanMap, duration, ravenRow.getF1(), ravenRow.getF2());
rdu.setExtraData(ravenRow.getExtraData());
getRavenDataBlock().addPamData(rdu);
ravenLogging.logData(DBControlUnit.findConnection(), rdu);
}