diff --git a/WEB-INF/classes/com/cohort/util/Calendar2.java b/WEB-INF/classes/com/cohort/util/Calendar2.java
index 40b21cca..c7ec582d 100644
--- a/WEB-INF/classes/com/cohort/util/Calendar2.java
+++ b/WEB-INF/classes/com/cohort/util/Calendar2.java
@@ -6,17 +6,12 @@
import com.cohort.array.DoubleArray;
import com.cohort.array.PrimitiveArray;
-import com.cohort.array.StringArray;
-import java.text.SimpleDateFormat;
import java.util.Arrays;
import java.util.BitSet;
import java.util.Calendar;
-import java.util.Date;
import java.util.GregorianCalendar;
import java.util.HashMap;
-import java.util.Locale;
-import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.TimeZone;
diff --git a/WEB-INF/classes/gov/noaa/pfel/coastwatch/pointdata/Table.java b/WEB-INF/classes/gov/noaa/pfel/coastwatch/pointdata/Table.java
index aa22ed13..3e3a1450 100644
--- a/WEB-INF/classes/gov/noaa/pfel/coastwatch/pointdata/Table.java
+++ b/WEB-INF/classes/gov/noaa/pfel/coastwatch/pointdata/Table.java
@@ -5820,856 +5820,6 @@ public void readNDNc(String fullName, String loadVariableNames[],
}
}
-
- /**
- * This reads and flattens a group of variables which share dimensions
- * from a multidimensional .nc file. (A new alternative to readNDNc().)
- * One difference between using this and readNcCF: this doesn't require/expect
- * that the file follows the nc CF DSG MA standard.
- *
This does not unpack the values or convert to standardMissingValues.
- *
For strings, this always calls String2.trimEnd(s)
- *
- * @param fullName This may be a local file name, an "http:" address of a
- * .nc file, an .ncml file (which must end with ".ncml"), or an opendap url.
- *
If the fullName is an http address, the name needs to start with "http://"
- * or "https://" (upper or lower case) and the server needs to support "byte ranges"
- * (see ucar.nc2.NetcdfFile documentation).
- * But this is very slow, so not recommended.
- * @param loadVarNames
- * If loadVarNames is specified, those variables will be loaded.
- * If loadVarNames isn't specified, this method reads vars which use
- * the specified loadDimNames and scalar vars.
- *
If a specified var isn't in the file, there won't be a column
- * in the results table for it and it isn't an error.
- * @param loadDimNames. If loadVarNames is specified, this is ignored.
- * If loadDimNames is used, all variables using any of these dimensions
- * (and dimension-less variables) will be loaded, plus all scalar vars.
- * Don't include string-length dimensions.
- * Just include the last treatDimensionsAs dimension (if any).
- * Almost always, there will be 1+ variables which use all of these dimensions.
- * If a given dimension isn't it the file, it is removed from the list.
- * If loadDimNames isn't specified (or size=0), this method finds the var which uses
- * the most dimensions, and uses for loadDimNames.
- * So if you want to get just the scalar vars, request a nonexistent
- * dimension (e.g., ZZTOP).
- * @param treatDimensionsAs Lists of dimension names that
- * should be treated as another dimension (the last in each list).
- * Within a list, all dimensions that are in the file must be the same length.
- * E.g. "Lat,Lon,Time" says to treat Lat and Lon as if they were Time.
- * @param getMetadata if true, global and variable metadata is read
- * @param standardizeWhat see Attributes.unpackVariable's standardizeWhat
- * @param removeMVRows This removes any block of rows at the
- * end of a group where all the values are missing_value, _FillValue,
- * or the CoHort ...Array native missing value (or char=#32 for CharArrays).
- * This is for the CF DSG Multidimensional Array file type and similar files.
- * If true, this does the proper test and so always loads all the
- * max dim variables, so it may take extra time.
- * @param conVars the names of the constraint variables. May be null.
- * It is up to this method how much they will be used.
- * Currently, the constraints are just used for *quick* tests to see if the
- * file has no matching data.
- * If a conVar isn't in the loadVarNames (provided or derived),
- * then the constraint isn't used.
- * If standardizeWhat != 0, the constaints are applied to the unpacked variables.
- * @param conOps the operators for the constraints.
- * All ERDDAP ops are supported. May be null.
- * @param conVals the values of the constraints. May be null.
- * @throws Exception if unexpected trouble.
- * But if none of the specified loadVariableNames are present
- * or a requested dimension's size=0,
- * it is not an error and it returns an empty table.
- */
- public void readMultidimNc(String fullName,
- StringArray loadVarNames,
- StringArray loadDimNames,
- String treatDimensionsAs[][], //will be null if not used
- boolean getMetadata, //before 2016-11-29, this had a boolean trimStrings parameter, now it always trimEnd's all strings
- int standardizeWhat,
- boolean removeMVRows,
- StringArray conVars, StringArray conOps, StringArray conVals) throws Exception {
-
- //clear the table
- clear();
- if (loadVarNames == null)
- loadVarNames = new StringArray();
- if (loadDimNames == null)
- loadDimNames = new StringArray();
- if (standardizeWhat != 0 || removeMVRows)
- getMetadata = true;
- String msg = " Table.readMultidimNc " + fullName +
- "\n loadVars=" + loadVarNames;
- long time = System.currentTimeMillis();
- String warningInMethod = "Table.readMultidimNc read " + fullName + ":\n";
- boolean haveConstraints =
- conVars != null && conVars.size() > 0 &&
- conOps != null && conOps.size() == conVars.size() &&
- conVals != null && conVals.size() == conVars.size();
- if (treatDimensionsAs == null || treatDimensionsAs.length == 0)
- treatDimensionsAs = null;
- int nd0 = treatDimensionsAs == null? 0 : treatDimensionsAs.length;
- if (nd0 > 0) {
- for (int d0 = 0; d0 < nd0; d0++) {
- if (treatDimensionsAs[d0] == null)
- throw new RuntimeException(warningInMethod +
- "treatDimensionAs[" + d0 + "] is null!");
- else if (treatDimensionsAs[d0].length < 2)
- throw new RuntimeException(warningInMethod +
- "treatDimensionAs[" + d0 + "].length=" +
- treatDimensionsAs[d0].length + " must be >1: " +
- String2.toCSSVString(treatDimensionsAs[d0]));
- if (debugMode)
- msg +=" treatDimensionsAs[" + d0 + "]=" + String2.toCSSVString(treatDimensionsAs[d0]);
- }
- }
-
- //read the file
- Attributes gridMappingAtts = null;
- NetcdfFile ncFile = NcHelper.openFile(fullName);
- try {
-
- //load the global metadata
- if (getMetadata)
- NcHelper.getGroupAttributes(ncFile.getRootGroup(), globalAttributes());
-
- //treatDimensionsAs
- Dimension tDimsAs[][] = null;
- if (nd0 > 0) {
- tDimsAs = new Dimension[nd0][];
- for (int d0 = 0; d0 < nd0; d0++) {
- int nd1 = treatDimensionsAs[d0].length;
- tDimsAs[d0] = new Dimension[nd1];
- int tDimsSize = -1;
- for (int d1 = 0; d1 < nd1; d1++) {
- tDimsAs[d0][d1] = ncFile.findDimension(treatDimensionsAs[d0][d1]);
- if (tDimsAs[d0][d1] == null) {
- msg = warningInMethod +
- "treatDimensionAs[" + d0 + "][" + d1 + "]=" + treatDimensionsAs[d0][d1] +
- " isn't in the file.";
- if (d1 == nd1-1) //the 'to' dim must be in the file
- throw new RuntimeException(msg);
- if (debugMode) String2.log(msg);
- continue;
- }
- if (tDimsSize < 0)
- tDimsSize = tDimsAs[d0][d1].getLength();
- else Test.ensureEqual(tDimsAs[d0][d1].getLength(), tDimsSize,
- warningInMethod +
- "All of the treatDimensionsAs dimensions (" +
- String2.toCSSVString(treatDimensionsAs[d0]) +
- ") must be the same length ([" + d0 + "][" + d1 + "]).");
- }
- }
- }
-
- //In order to be able to tell the difference between multiDimensional
- //char vars and String vars,
- //I need to know which dims are non-String-length dimensions.
- //This isn't perfect / fool-proof.
- //(It is done this way because it easier/more reliable to generate this list
- //than the list of string dimensions.)
- HashSet notStringLengthDims = new HashSet();
- List allVars = ncFile.getVariables();
- int nAllVars = allVars.size();
- for (int v = 0; v < nAllVars; v++) {
- Variable tVar = allVars.get(v);
- List tDims = tVar.getDimensions(); //won't be null
- int tnDims = tDims.size();
- //here, assume the last dim of any multiDim char var
- //is the string length dimension, so skip it
- if (tVar.getDataType() == DataType.CHAR)
- tnDims--;
- for (int d = 0; d < tnDims; d++)
- notStringLengthDims.add(tDims.get(d));
- }
-
- // *** first half: make loadVars
- ArrayList loadVars = new ArrayList(); //which we will actually load
- ArrayList loadDims = new ArrayList(); //which we actually need
- if (loadVarNames.size() > 0) {
- //loadVarNames was specified
-
- //gather the loadVars and loadDims (not including the aliases)
- loadDimNames.clear();
- for (int v = 0; v < loadVarNames.size(); v++) {
- Variable var = ncFile.findVariable(loadVarNames.get(v));
- if (var == null) {
- loadVarNames.remove(v--); //var not in file, so don't try to load it
- } else {
- loadVars.add(var);
- List tDims = var.getDimensions(); //won't be null
- boolean isCharArray = var.getDataType() == DataType.CHAR &&
- tDims.size() > 0 &&
- !notStringLengthDims.contains(tDims.get(tDims.size() - 1));
- int ntDims = tDims.size() - (isCharArray? 1 : 0);
- for (int d = 0; d < ntDims; d++) {
- Dimension tDim = tDims.get(d);
- if (loadDims.indexOf(tDim) < 0) { //not yet in the list
- for (int d0 = 0; d0 < nd0; d0++) {
- if (String2.indexOfObject(tDimsAs[d0], tDim) >= 0) {
- //convert to the 'as' dimension
- tDim = tDimsAs[d0][tDimsAs[d0].length - 1];
- break;
- }
- }
- if (loadDims.indexOf(tDim) < 0) { //possibly different tDim not yet in the list
- loadDims.add(tDim);
- loadDimNames.add(tDim.getName());
- }
- }
- }
- }
- }
- if (loadVars.size() == 0) {
- if (verbose) String2.log(warningInMethod +
- "Returning an empty table because none of the requested variables are in the file. " +
- "time=" + (System.currentTimeMillis() - time));
- return;
- }
-
- } else {
- //loadVarNames wasn't specified
-
- if (loadDimNames.size() == 0) {
- //loadDimNames wasn't specified either
-
- //find var(s) that use the most dimensions
- try {
- Variable tVars[] = NcHelper.findMaxDVariables(ncFile, ""); //throws Exception if no vars with dimensions
-
- //gather loadDims from the first of those vars
- //(so it won't include aliases)
- Variable tVar = tVars[0];
- List tDims = tVar.getDimensions(); //won't be null
- boolean isCharArray = tVar.getDataType() == DataType.CHAR &&
- tDims.size() > 0 &&
- !notStringLengthDims.contains(tDims.get(tDims.size() - 1));
- int ntDims = tDims.size() - (isCharArray? 1 : 0);
- for (int d = 0; d < ntDims; d++) {
- Dimension dim = tDims.get(d);
- loadDims.add(dim);
- loadDimNames.add(dim.getName());
- }
- } catch (Exception e) {
- //FUTURE: read all static variables
- String2.log("Table.readMultidimNc caught: " + e.toString());
- }
-
- } else {
- //loadDimNames was specified (it doesn't include aliases)
- //gather the specified loadDims
- for (int d = 0; d < loadDimNames.size(); d++) {
- String dimName = loadDimNames.get(d);
- Dimension dim = ncFile.findDimension(dimName);
- if (dim == null) {
- String2.log("Removing dimName=" + dimName + ": it isn't in the file.");
- loadDimNames.remove(d--);
- } else {
- loadDims.add(dim);
- }
- }
- if (loadDimNames.size() == 0)
- String2.log("None of the requested loadDimNames is in the file.");
- }
-
- //now, loadDims is known, but loadVars isn't
- //find vars that use any subset of loadDims (and no others)
- //including scalar vars
- boolean dimUsed[] = new boolean[loadDims.size()];
- LOADVARS_V:
- for (int v = 0; v < nAllVars; v++) {
- Variable var = allVars.get(v);
- List tDims = var.getDimensions(); //won't be null
- boolean isCharArray = var.getDataType() == DataType.CHAR &&
- tDims.size() > 0 &&
- !notStringLengthDims.contains(tDims.get(tDims.size() - 1));
- int ntDims = tDims.size() - (isCharArray? 1 : 0);
- for (int d = 0; d < ntDims; d++) {
- Dimension tDim = tDims.get(d);
- int whichDim = loadDims.indexOf(tDim);
- if (whichDim < 0) {
- //is it one of the aliases?
- for (int d0 = 0; d0 < nd0; d0++) {
- if (String2.indexOfObject(tDimsAs[d0], tDim) >= 0) {
- //change to the 'as' dimension?
- whichDim = loadDims.indexOf(tDimsAs[d0][tDimsAs[d0].length - 1]);
- if (whichDim >= 0)
- break;
- }
- }
- }
- if (whichDim < 0)
- continue LOADVARS_V;
- dimUsed[whichDim] = true;
- }
- loadVars.add(var);
- loadVarNames.add(var.getFullName());
- }
- if (loadVars.size() == 0) {
- if (verbose) String2.log(warningInMethod +
- "Returning an empty table because there are no scalar variables " +
- "and no variables in the file use any of these dimensions: " +
- loadDimNames + ". " +
- "time=" + (System.currentTimeMillis() - time));
- return;
- }
-
- //remove unused dimensions
- for (int d = loadDims.size() - 1; d >= 0; d--) { //backwards since may delete
- if (!dimUsed[d])
- loadDims.remove(d);
- }
- if (loadDims.size() == 0)
- String2.log("After analysis, loadDims.size is now 0!");
- }
-
- //loadVars is known and only uses loadDims
- //loadDims is known and only has dims used by loadVars
- if (debugMode) String2.log(
- ">> loadVars=" + loadVarNames +
- "\n>> loadDims=" + loadDimNames);
- int nLoadVars = loadVars.size();
-
-
- // *** quick reject file? (by testing constraints on small (scalar and 1D) vars)
-//FUTURE: this could be a little smarter: maintain separate keep bitsets for each 1D var
-//(and for scalars) so that the constraints are cumulative for each dimension.
- PrimitiveArray knownPAs[] = new PrimitiveArray[nLoadVars]; //read vars will be cached here
- Attributes knownAtts[] = new Attributes[nLoadVars]; //read var's atts will be cached here
- if (haveConstraints) {
- int nCons = conVars.size();
-
- //go through the load vars looking for 0D or 1D vars that have constraints
- for (int v = 0; v < nLoadVars; v++) {
- //is there at least 1 constraint of this var?
- String varName = loadVarNames.get(v);
- int con1 = conVars.indexOf(varName);
- if (con1 < 0)
- continue;
-
- //is this a 0D or 1D var?
- Variable tVar = loadVars.get(v);
- List tDims = tVar.getDimensions(); //won't be null
- boolean isCharArray = tVar.getDataType() == DataType.CHAR &&
- tDims.size() > 0 &&
- !notStringLengthDims.contains(tDims.get(tDims.size() - 1));
- int ntDims = tDims.size() - (isCharArray? 1 : 0);
- if (ntDims > 1)
- continue;
-
- //read info
- PrimitiveArray pa = NcHelper.getPrimitiveArray(tVar, isCharArray);
- if (pa instanceof StringArray t)
- t.trimEndAll();
- Attributes atts = new Attributes();
- if (getMetadata)
- NcHelper.getVariableAttributes(tVar, atts);
- pa = atts.standardizeVariable(standardizeWhat, varName, pa);
- knownPAs[v] = pa;
- knownAtts[v] = atts;
-
- //test constraints
- BitSet keep = new BitSet();
- keep.set(0, pa.size());
- for (int con = con1; con < nCons; con++) {
- if (!conVars.get(con).equals(varName))
- continue;
- if (pa.applyConstraint(false, //less precise, so more likely to pass the test
- keep, conOps.get(con), conVals.get(con)) == 0) {
- if (verbose) String2.log(warningInMethod +
- "Returning an empty table because var=" + varName +
- " failed its constraints, including " +
- conOps.get(con) + conVals.get(con) +
- ". time=" + (System.currentTimeMillis() - time) + "ms");
- return;
- }
- }
- }
- }
-
-
- // *** second half: load the loadVars
-
- //find vars with all of the loadDims
- //If loadDims size=0, this finds scalar vars
- BitSet loaded = new BitSet(nLoadVars); //all false
- int shape[] = new int[loadDims.size()];
- ALL_DIMS_V:
- for (int v = 0; v < nLoadVars; v++) {
- Variable tVar = loadVars.get(v);
- List tDims = tVar.getDimensions(); //won't be null
- boolean isCharArray = tVar.getDataType() == DataType.CHAR &&
- tDims.size() > 0 &&
- !notStringLengthDims.contains(tDims.get(tDims.size() - 1));
- int ntDims = tDims.size() - (isCharArray? 1 : 0);
- if (ntDims != loadDims.size())
- continue;
- if (nColumns() == 0) {
- //first var with all dims: set loadDims to be in that order
- for (int d = 0; d < ntDims; d++) {
- Dimension dim = tDims.get(d);
- for (int d0 = 0; d0 < nd0; d0++) {
- if (String2.indexOfObject(tDimsAs[d0], dim) >= 0) {
- //convert to the 'as' dimension
- dim = tDimsAs[d0][tDimsAs[d0].length - 1];
- break;
- }
- }
- loadDims.set(d, dim); //perhaps change loadDims to different order
- loadDimNames.set(d, dim.getName());
- shape[d] = dim.getLength();
- if (shape[d] == 0) {
- if (verbose) String2.log(warningInMethod +
- "Returning an empty table because dim=" + dim.getName() +
- "'s length=0! " +
- "time=" + (System.currentTimeMillis() - time));
- return;
- }
- }
- } else {
- //subsequent vars
- //ensure same order of same dims. If not, read the var in next section.
- for (int d = 0; d < ntDims; d++) {
- Dimension dim = tDims.get(d);
- for (int d0 = 0; d0 < nd0; d0++) {
- if (String2.indexOfObject(tDimsAs[d0], dim) >= 0) {
- //convert to the 'as' dimension
- dim = tDimsAs[d0][tDimsAs[d0].length - 1];
- break;
- }
- }
- if (!loadDims.get(d).equals(dim))
- continue ALL_DIMS_V;
- }
- }
- //yes, load this var, it has all of the dimensions in the expected order
- PrimitiveArray pa = knownPAs[v]; //v is loadVars v
- Attributes atts = knownAtts[v]; //v is loadVars v
- knownPAs[v] = null;
- knownAtts[v] = null;
- if (pa == null) {
- //String2.log(">> tVar=" + tVar.getFullName() + " isCharArray=" + isCharArray);
- pa = NcHelper.getPrimitiveArray(tVar, isCharArray);
- if (pa instanceof StringArray t)
- t.trimEndAll();
- atts = new Attributes();
- if (getMetadata)
- NcHelper.getVariableAttributes(tVar, atts);
- pa = atts.standardizeVariable(standardizeWhat, tVar.getFullName(), pa);
- }
- loaded.set(v);
- addColumn(nColumns(), tVar.getFullName(), pa, atts);
-
- //does this var point to the pseudo-data var with CF grid_mapping (projection) information?
- if (getMetadata &&
- gridMappingAtts == null) {
- gridMappingAtts = NcHelper.getGridMappingAtts(ncFile,
- atts.getString("grid_mapping"));
- if (gridMappingAtts != null)
- globalAttributes.add(gridMappingAtts);
- }
- }
- if (debugMode) String2.log(Math2.memoryString() + "\n" +
- ">> this table after load varsWithAllDims:\n" +
- dataToString(5));
-
- //if loadDims size is 0, we're done because all scalars have been read
- if (loadDims.size() == 0) {
- if (haveConstraints) {
- BitSet keep = new BitSet();
- keep.set(0, nRows()); //should be just 1 row, all true
- int nAfter = tryToApplyConstraints(-1, conVars, conOps, conVals, keep);
- if (nAfter == 0) {
- if (verbose) String2.log(warningInMethod +
- "Returning an empty table after applying constraints to scalars. " +
- "time=" + (System.currentTimeMillis() - time));
- clear();
- return;
- } //else: no need to justKeep() because there is 1 row and it is valid
- }
- return; //empty table if no scalars
- }
-
- //make a table with index columns for all indices
- if (nColumns() == 0) {
- //no vars have all loadDims
- if (debugMode) String2.log(Math2.memoryString() + "\n" +
- ">> no vars have all loadDims");
- for (int d = 0; d < loadDims.size(); d++) {
- Dimension dim = loadDims.get(d);
- shape[d] = dim.getLength();
- if (shape[d] == 0) {
- if (verbose) String2.log(warningInMethod +
- "Returning an empty table because dim=" + dim.getName() +
- "'s length=0! " +
- "time=" + (System.currentTimeMillis() - time));
- return;
- }
- }
- }
- Table allIndicesTable = new Table();
- allIndicesTable.addIndexColumns(shape);
- if (debugMode) String2.log(Math2.memoryString() + "\n" +
- ">> allIndicesTable=" +
- allIndicesTable.dataToString(5));
-
-
- //*** removeMVRows
- if (removeMVRows && nColumns() > 0) {
- //ensure all vars that use all loadDims are loaded
- int onCols = nColumns();
- REMOVE_MV_V:
- for (int v = 0; v < nAllVars; v++) {
- Variable tVar = allVars.get(v);
- if (findColumnNumber(tVar.getFullName()) >= 0) //already in the table
- continue;
- List tDims = tVar.getDimensions(); //won't be null
- boolean isCharArray = tVar.getDataType() == DataType.CHAR &&
- tDims.size() > 0 &&
- !notStringLengthDims.contains(tDims.get(tDims.size() - 1));
- int ntDims = tDims.size() - (isCharArray? 1 : 0);
- if (ntDims != loadDims.size())
- continue;
- //ensure same order of same dims
- for (int d = 0; d < ntDims; d++) {
- Dimension dim = tDims.get(d);
- for (int d0 = 0; d0 < nd0; d0++) {
- if (String2.indexOfObject(tDimsAs[d0], dim) >= 0) {
- //convert to the 'as' dimension
- dim = tDimsAs[d0][tDimsAs[d0].length - 1];
- break;
- }
- }
- if (!loadDims.get(d).equals(dim))
- continue REMOVE_MV_V;
- }
- //yes, load this var TEMPORARILY, it has all of the dimensions in the expected order
- //don't use knownPAs here: different vars and different v's.
- PrimitiveArray pa = NcHelper.getPrimitiveArray(tVar, isCharArray);
- //FUTURE: be smarter? just trim values that are STRING_LENGTH long?
- if (pa instanceof StringArray t)
- t.trimEndAll();
- Attributes atts = new Attributes();
- NcHelper.getVariableAttributes(tVar, atts); //needed for removeMVRows
- pa = atts.standardizeVariable(standardizeWhat, tVar.getFullName(), pa);
- addColumn(nColumns(), tVar.getFullName(), pa, atts);
- }
-
- //move all the allIndices columns into the main table
- int nLoadDims = loadDims.size();
- for (int d = 0; d < nLoadDims; d++)
- addColumn(d, allIndicesTable.getColumnName(d),
- allIndicesTable.getColumn(d),
- allIndicesTable.columnAttributes(d));
- int nColumns = nColumns(); //including indicesColumns
- int onRows = nRows();
-
- //gather the missing_value and _FillValue values for each column
- boolean isDouble[] = new boolean[nColumns];
- boolean isULong[] = new boolean[nColumns];
- boolean isLong[] = new boolean[nColumns];
- boolean isChar[] = new boolean[nColumns];
- double doubleMvs[] = new double[nColumns];
- double doubleFvs[] = new double[nColumns];
- BigInteger ulongMvs[] = new BigInteger[nColumns];
- BigInteger ulongFvs[] = new BigInteger[nColumns];
- long longMvs[] = new long[nColumns];
- long longFvs[] = new long[nColumns];
- for (int c = nLoadDims; c < nColumns; c++) {
- PrimitiveArray pa = columns.get(c);
- isDouble[c] = pa instanceof FloatArray ||
- pa instanceof DoubleArray;
- isULong[c] = pa instanceof ULongArray;
- isLong[c] = pa.isIntegerType() && !(pa instanceof ULongArray);
- isChar[c] = pa instanceof CharArray;
- if (isDouble[c]) {
- doubleMvs[c] = columnAttributes(c).getDouble("missing_value");
- doubleFvs[c] = columnAttributes(c).getDouble("_FillValue");
- } else if (isULong[c]) {
- ulongMvs[c] = columnAttributes(c).getULong("missing_value");
- ulongFvs[c] = columnAttributes(c).getULong("_FillValue");
- } else if (isLong[c]) {
- longMvs[c] = columnAttributes(c).getLong("missing_value");
- longFvs[c] = columnAttributes(c).getLong("_FillValue");
- }
- }
-
- //walk backwards. Work within each cycle of the last dim.
- BitSet keep = new BitSet();
- keep.set(0, onRows); //all true
- PrimitiveArray lastDimCol = columns.get(nLoadDims - 1);
- for (int row = onRows - 1; row >= 0; row--) {
- boolean hasData = false;
- for (int c = nLoadDims; c < nColumns; c++) {
- //if (debugMode && row > onRows - 200)
- // String2.log(">> row=" + row + " col=" + c + " " +
- // (isDouble[c]) + " " + (isLong[c]) + " " + (isChar[c]) + " " +
- // " val=" + columns.get(c).getString(row));
- if (isDouble[c]) {
- double d = columns.get(c).getDouble(row);
- if (Double.isNaN(d) ||
- Math2.almostEqual(5, d, doubleMvs[c]) ||
- Math2.almostEqual(5, d, doubleFvs[c])) {
- } else {
- hasData = true;
- break;
- }
- } else if (isULong[c]) {
- BigInteger ul = columns.get(c).getULong(row);
- if (ul.equals(ULongArray.MAX_VALUE) || //trouble: should test maxIsMV
- ul.equals(ulongMvs[c]) ||
- ul.equals(ulongFvs[c])) {
- } else {
- hasData = true;
- break;
- }
- } else if (isLong[c]) {
- long tl = columns.get(c).getLong(row);
- if (tl == Long.MAX_VALUE || //trouble: should test maxIsMV
- tl == longMvs[c] ||
- tl == longFvs[c]) {
- } else {
- hasData = true;
- break;
- }
- } else if (isChar[c]) {
- int tc = columns.get(c).getInt(row);
- if (tc == 0 ||
- tc == 32 ||
- tc == Integer.MAX_VALUE) { //trouble: should test maxIsMV
- } else {
- hasData = true;
- break;
- }
- } else {
- //nc allows strings to be 0-terminated or padded with spaces, so always trimEnd
- String s = String2.trimEnd(columns.get(c).getString(row));
- if (s.length() > 0) {
- hasData = true;
- break;
- }
- }
- }
- if (hasData) {
- //jump to next group
- while (lastDimCol.getInt(row) > 0)
- row--; //the loop's row-- will get to next group
- } else {
- keep.clear(row);
- }
- }
- if (debugMode) { String2.log(">> removeMVRows nRows before=" + onRows +
- " after=" + keep.cardinality());
- //one time debugging:
- if (false) {
- PrimitiveArray pa = getColumn(nLoadDims);
- for (int row = 0; row < onRows; row++) {
- if (keep.get(row) && pa.getDouble(row) == -99999)
- String2.log(">> remaining row with mv:\n" + //in debugMode
- dataToString(row-1, row+2));
- }
- }
- }
-
- //remove index columns and data columns just added for the MV testing
- removeColumns(0, nLoadDims);
- removeColumns(onCols, nColumns());
-
- //apply constraints
- if (haveConstraints) {
- tryToApplyConstraints(-1, conVars, conOps, conVals, keep);
- if (debugMode) String2.log(
- ">> removeMVRows + constraints nRows before=" + onRows +
- " after=" + keep.cardinality());
- }
-
- //just keep
- justKeep(keep);
- allIndicesTable.justKeep(keep);
- if (nRows() == 0) {
- if (verbose) String2.log(warningInMethod +
- "Returning an empty table after removeMVRows and applying constraints. " +
- "time=" + (System.currentTimeMillis() - time));
- clear();
- return;
- }
-
- } else if (haveConstraints && nColumns() > 0) {
- //apply constraints to vars that have all loadDims
- int onRows = nRows();
- BitSet keep = new BitSet();
- keep.set(0, onRows); //all true
- int nAfter = tryToApplyConstraints(-1, conVars, conOps, conVals, keep);
- if (nAfter == 0) {
- if (verbose) String2.log(warningInMethod +
- "Returning an empty table after applying constraints. " +
- "time=" + (System.currentTimeMillis() - time));
- clear();
- return;
- }
- if (debugMode)
- String2.log(Math2.memoryString() + "\n" +
- ">> after bigVar constraints, justKeep nRows before=" +
- onRows + " after=" + nAfter);
- justKeep(keep);
- allIndicesTable.justKeep(keep);
- }
-
- //read all of the other variables:
- //repeatedly, read batches of vars with same dimensions, and JOIN to main table
-//FUTURE: it would be better if this looked for groups of vars that are constrained FIRST.
- while (loaded.cardinality() < nLoadVars) {
- Table lut = new Table(); //look up table which will be JOINed into main table
- List cDims = null;
- int ncDims = -1;
-
- BATCH_V:
- for (int v = 0; v < nLoadVars; v++) {
- if (loaded.get(v))
- continue;
- //if (debugMode) {
- // String2.log(">> v=" + v + " cDims==null?" + (cDims==null) +
- // " lut: nCols=" + lut.nColumns() + " nRows=" + lut.nRows());
- // String2.log(">> lut=" + lut.dataToString(5));
- //}
-
- //look for an unloaded var (and other vars with same dimensions)
- Variable tVar = loadVars.get(v);
- List tDims = tVar.getDimensions(); //won't be null
- boolean isCharArray = tVar.getDataType() == DataType.CHAR &&
- tDims.size() > 0 &&
- !notStringLengthDims.contains(tDims.get(tDims.size() - 1));
- int ntDims = tDims.size() - (isCharArray? 1 : 0);
- if (cDims == null) {
- //this is the first variable found in this loop
- cDims = tDims;
- ncDims = ntDims;
- int cShape[] = new int[ncDims];
- for (int d = 0; d < ncDims; d++) {
- //which dim is it in loadDims?
- Dimension cDim = cDims.get(d);
- for (int d0 = 0; d0 < nd0; d0++) {
- if (String2.indexOfObject(tDimsAs[d0], cDim) >= 0) {
- //convert to the 'as' dimension
- cDim = tDimsAs[d0][tDimsAs[d0].length - 1];
- break;
- }
- }
- cShape[d] = cDim.getLength();
- int whichDim = loadDims.indexOf(cDim);
- //insert that index in main table
- addColumn(d, "_index_" + whichDim,
- allIndicesTable.getColumn(whichDim)); //will throw error if whichDim=-1
- }
-
- //insert index columns in lut
- if (ncDims == 0) {
- //if scalar vars, make key columns with 0's
- //in lut
- lut.addColumn(0, "_scalar_", new IntArray(new int[]{0}),
- new Attributes());
- //and in main table
- IntArray ia = new IntArray(nRows(), false);
- ia.addN(nRows(), 0);
- //String2.log("nRows=" + nRows() + " ia.size=" + ia.size());
- addColumn(0, "_scalar_", ia, new Attributes());
- } else {
- lut.addIndexColumns(cShape);
- }
-
- } else {
- //does this var have the exact same dimensions, in same order?
- if (ntDims != ncDims)
- continue BATCH_V;
- for (int d = 0; d < ncDims; d++) {
- Dimension dim = tDims.get(d);
- for (int d0 = 0; d0 < nd0; d0++) {
- if (String2.indexOfObject(tDimsAs[d0], dim) >= 0) {
- //convert to the 'as' dimension
- dim = tDimsAs[d0][tDimsAs[d0].length - 1];
- break;
- }
- }
- if (!cDims.get(d).equals(dim))
- continue BATCH_V;
- }
- }
-
- //read this var into lut
- PrimitiveArray pa = knownPAs[v]; //v is loadVars v
- Attributes atts = knownAtts[v];
- knownPAs[v] = null;
- knownAtts[v] = null;
- if (pa == null) {
- pa = NcHelper.getPrimitiveArray(tVar, isCharArray);
- if (pa instanceof StringArray t)
- t.trimEndAll();
- atts = new Attributes();
- if (getMetadata)
- NcHelper.getVariableAttributes(tVar, atts);
- pa = atts.standardizeVariable(standardizeWhat, loadVarNames.get(v), pa);
- }
- loaded.set(v);
- lut.addColumn(lut.nColumns(), loadVarNames.get(v),
- pa, atts);
- }
-
- //apply constraints to lut
- if (haveConstraints) {
- int onLutRows = lut.nRows();
- BitSet keep = new BitSet();
- keep.set(0, onLutRows); //all true
- int nAfter = lut.tryToApplyConstraints(-1, conVars, conOps, conVals, keep);
- if (nAfter == 0) {
- if (verbose) String2.log(warningInMethod +
- "Returning an empty table after applying constraints to lut. " +
- "time=" + (System.currentTimeMillis() - time));
- clear();
- return;
- }
- lut.justKeep(keep);
- if (debugMode)
- String2.log(Math2.memoryString() + "\n" +
- ">> after lut constraints, justKeep lut.nRows before=" +
- onLutRows + " after=" + nAfter);
- }
-
- //JOIN lut into main table
- //if (debugMode) String2.log(">> lut=\n" + lut.dataToString(5));
- int nMatchingCols = Math.max(1, ncDims); //even scalars have 1 matching column
- BitSet keep = join(nMatchingCols, 0, "", lut); //"" = mvKey not needed
- //remove the index columns from the main table
- removeColumns(0, nMatchingCols);
- //if (debugMode) String2.log(">> this table after join:\n" + dataToString(5));
-
- //remove unmatched rows
- int tnRows = keep.cardinality();
- if (tnRows == 0) {
- clear();
- if (verbose) String2.log(warningInMethod +
- "Returning an empty table after a join. " +
- "time=" + (System.currentTimeMillis() - time));
- return;
- }
- justKeep(keep);
- allIndicesTable.justKeep(keep);
- }
- //and Bob's your uncle! we have all of the data
-
- //this will be either the order that was requested, or their order in the file
- reorderColumns(loadVarNames, false); //discardOthers=false, should be irrelevant
-
- decodeCharsAndStrings();
- convertToUnsignedPAs();
-
- if (reallyVerbose)
- String2.log(msg +
- " finished. nRows=" + nRows() + " nCols=" + nColumns() +
- " time=" + (System.currentTimeMillis() - time) + "ms");
- } finally {
- try {if (ncFile != null) ncFile.close(); } catch (Exception e9) {}
- }
- }
-
/**
* This standardizes every column. See Attributes.unpackVariable for details.
*/
diff --git a/WEB-INF/classes/gov/noaa/pfel/coastwatch/pointdata/TableFromMultidimNcFile.java b/WEB-INF/classes/gov/noaa/pfel/coastwatch/pointdata/TableFromMultidimNcFile.java
new file mode 100644
index 00000000..06ff9686
--- /dev/null
+++ b/WEB-INF/classes/gov/noaa/pfel/coastwatch/pointdata/TableFromMultidimNcFile.java
@@ -0,0 +1,991 @@
+package gov.noaa.pfel.coastwatch.pointdata;
+
+import java.math.BigInteger;
+import java.util.ArrayList;
+import java.util.BitSet;
+import java.util.HashSet;
+import java.util.List;
+
+import org.apache.commons.lang3.tuple.Pair;
+
+import com.cohort.array.Attributes;
+import com.cohort.array.CharArray;
+import com.cohort.array.DoubleArray;
+import com.cohort.array.FloatArray;
+import com.cohort.array.IntArray;
+import com.cohort.array.PrimitiveArray;
+import com.cohort.array.StringArray;
+import com.cohort.array.ULongArray;
+import com.cohort.util.Math2;
+import com.cohort.util.String2;
+import com.cohort.util.Test;
+
+import gov.noaa.pfel.coastwatch.griddata.NcHelper;
+import ucar.ma2.DataType;
+import ucar.nc2.Dimension;
+import ucar.nc2.NetcdfFile;
+import ucar.nc2.Variable;
+
+public class TableFromMultidimNcFile {
+
+ private Table table;
+ private VarData cachedVarData[];
+ private boolean haveConstraints;
+ private String warningInMethod;
+ private HashSet notStringLengthDims;
+ private Dimension tDimsAs[][];
+ private NetcdfFile ncFile;
+ private StringArray loadVarNames;
+ private int standardizeWhat;
+ private Attributes gridMappingAtts = null;
+
+ private class VarData {
+ public PrimitiveArray pa;
+ public Attributes atts;
+ public List dims;
+ public int nDims;
+ public boolean isCharArray;
+
+ private void loadDims(TableFromMultidimNcFile tableMultidim, Variable tVar) {
+ dims = tVar.getDimensions();
+ isCharArray = tVar.getDataType() == DataType.CHAR &&
+ dims.size() > 0 &&
+ !tableMultidim.notStringLengthDims.contains(dims.get(dims.size() - 1));
+ nDims = dims.size() - (isCharArray ? 1 : 0);
+ }
+
+ private void loadArrayAndAttributes(TableFromMultidimNcFile tableMultidim, Variable tVar) throws Exception {
+ pa = NcHelper.getPrimitiveArray(tVar, isCharArray);
+ if (pa instanceof StringArray t) {
+ t.trimEndAll();
+ }
+ atts = new Attributes();
+ NcHelper.getVariableAttributes(tVar, atts);
+ pa = atts.standardizeVariable(tableMultidim.standardizeWhat, tVar.getFullName(), pa);
+ }
+
+ public static VarData dimsFromVariable(TableFromMultidimNcFile tableMultidim, Variable tVar) {
+ VarData data = tableMultidim.new VarData();
+ data.loadDims(tableMultidim, tVar);
+ return data;
+ }
+
+ public static VarData fromVariableIfDimsMatch(TableFromMultidimNcFile tableMultidim, Variable tVar, VarData other,
+ int nd0) throws Exception {
+ int index = tableMultidim.loadVarNames.indexOf(tVar.getFullName());
+ if (index > -1 && tableMultidim.cachedVarData[index] != null) {
+ VarData data = tableMultidim.cachedVarData[index];
+ if (!tableMultidim.doDimsMatch(nd0, data.nDims, data.dims, other.nDims, other.dims)) {
+ return null;
+ }
+ return data;
+ }
+ VarData data = tableMultidim.new VarData();
+ data.loadDims(tableMultidim, tVar);
+
+ if (!tableMultidim.doDimsMatch(nd0, data.nDims, data.dims, other.nDims, other.dims)) {
+ return null;
+ }
+ data.loadArrayAndAttributes(tableMultidim, tVar);
+ if (index > -1) {
+ tableMultidim.cachedVarData[index] = data;
+ }
+ return data;
+ }
+
+ public static VarData fromVariable(TableFromMultidimNcFile tableMultidim, Variable tVar) throws Exception {
+ int index = tableMultidim.loadVarNames.indexOf(tVar.getFullName());
+ if (index > -1 && tableMultidim.cachedVarData[index] != null) {
+ return tableMultidim.cachedVarData[index];
+ }
+ VarData data = tableMultidim.new VarData();
+ data.loadDims(tableMultidim, tVar);
+ data.loadArrayAndAttributes(tableMultidim, tVar);
+ if (index > -1) {
+ tableMultidim.cachedVarData[index] = data;
+ }
+ return data;
+ }
+ }
+
+ public TableFromMultidimNcFile(Table table) {
+ this.table = table;
+ }
+
+ /**
+ * This reads and flattens a group of variables which share dimensions
+ * from a multidimensional .nc file. (A new alternative to readNDNc().)
+ * One difference between using this and readNcCF: this doesn't require/expect
+ * that the file follows the nc CF DSG MA standard.
+ *
+ * This does not unpack the values or convert to standardMissingValues.
+ *
+ * For strings, this always calls String2.trimEnd(s)
+ *
+ * @param fullName This may be a local file name, an "http:" address of
+ * a
+ * .nc file, an .ncml file (which must end with
+ * ".ncml"), or an opendap url.
+ *
+ * If the fullName is an http address, the name needs
+ * to start with "http://"
+ * or "https://" (upper or lower case) and the server
+ * needs to support "byte ranges"
+ * (see ucar.nc2.NetcdfFile documentation).
+ * But this is very slow, so not recommended.
+ * @param loadVarNames
+ * If loadVarNames is specified, those variables will
+ * be loaded.
+ * If loadVarNames isn't specified, this method reads
+ * vars which use
+ * the specified loadDimNames and scalar vars.
+ *
+ * If a specified var isn't in the file, there won't be
+ * a column
+ * in the results table for it and it isn't an error.
+ * @param loadDimNames. If loadVarNames is specified, this is ignored.
+ * If loadDimNames is used, all variables using any of
+ * these dimensions
+ * (and dimension-less variables) will be loaded, plus
+ * all scalar vars.
+ * Don't include string-length dimensions.
+ * Just include the last treatDimensionsAs dimension
+ * (if any).
+ * Almost always, there will be 1+ variables which use
+ * all of these dimensions.
+ * If a given dimension isn't it the file, it is
+ * removed from the list.
+ * If loadDimNames isn't specified (or size=0), this
+ * method finds the var which uses
+ * the most dimensions, and uses for loadDimNames.
+ * So if you want to get just the scalar vars, request
+ * a nonexistent
+ * dimension (e.g., ZZTOP).
+ * @param treatDimensionsAs Lists of dimension names that
+ * should be treated as another dimension (the last in
+ * each list).
+ * Within a list, all dimensions that are in the file
+ * must be the same length.
+ * E.g. "Lat,Lon,Time" says to treat Lat and Lon as if
+ * they were Time.
+ * @param getMetadata if true, global and variable metadata is read
+ * @param standardizeWhat see Attributes.unpackVariable's standardizeWhat
+ * @param removeMVRows This removes any block of rows at the
+ * end of a group where all the values are
+ * missing_value, _FillValue,
+ * or the CoHort ...Array native missing value (or
+ * char=#32 for CharArrays).
+ * This is for the CF DSG Multidimensional Array file
+ * type and similar files.
+ * If true, this does the proper test and so always
+ * loads all the
+ * max dim variables, so it may take extra time.
+ * @param conVars the names of the constraint variables. May be null.
+ * It is up to this method how much they will be used.
+ * Currently, the constraints are just used for *quick*
+ * tests to see if the
+ * file has no matching data.
+ * If a conVar isn't in the loadVarNames (provided or
+ * derived),
+ * then the constraint isn't used.
+ * If standardizeWhat != 0, the constraints are applied
+ * to the unpacked variables.
+ * @param conOps the operators for the constraints.
+ * All ERDDAP ops are supported. May be null.
+ * @param conVals the values of the constraints. May be null.
+ * @throws Exception if unexpected trouble.
+ * But if none of the specified loadVariableNames are present
+ * or a requested dimension's size=0,
+ * it is not an error and it returns an empty table.
+ */
+ public void readMultidimNc(String fullName,
+ StringArray loadVarNames,
+ StringArray loadDimNames,
+ String treatDimensionsAs[][], // will be null if not used
+ boolean getMetadata, // before 2016-11-29, this had a boolean trimStrings parameter, now it always
+ // trimEnd's all strings
+ int standardizeWhat,
+ boolean removeMVRows,
+ StringArray conVars, StringArray conOps, StringArray conVals) throws Exception {
+
+ // clear the table
+ this.table.clear();
+ if (loadVarNames == null)
+ loadVarNames = new StringArray();
+ if (loadDimNames == null)
+ loadDimNames = new StringArray();
+ if (standardizeWhat != 0 || removeMVRows)
+ getMetadata = true;
+ warningInMethod = "TableFromMultidimNcFile.readMultidimNc read " + fullName + ":\n";
+ haveConstraints = conVars != null && conVars.size() > 0 &&
+ conOps != null && conOps.size() == conVars.size() &&
+ conVals != null && conVals.size() == conVars.size();
+ if (treatDimensionsAs == null || treatDimensionsAs.length == 0)
+ treatDimensionsAs = null;
+ int nd0 = treatDimensionsAs == null ? 0 : treatDimensionsAs.length;
+ if (nd0 > 0) {
+ validateTreatDimensionsAs(treatDimensionsAs, nd0);
+ }
+ this.loadVarNames = loadVarNames;
+ this.standardizeWhat = standardizeWhat;
+
+ // read the file
+ ncFile = NcHelper.openFile(fullName);
+
+ try {
+ // load the global metadata
+ if (getMetadata)
+ NcHelper.getGroupAttributes(ncFile.getRootGroup(), this.table.globalAttributes());
+
+ tDimsAs = processTreatDimensionsAs(treatDimensionsAs, nd0);
+
+ List allVars = ncFile.getVariables();
+ int nAllVars = allVars.size();
+
+ notStringLengthDims = findNonStringLengthDims(allVars, nAllVars);
+
+ // *** first half: make loadVars
+ ArrayList loadVars = new ArrayList<>(); // which we will actually load
+ ArrayList loadDims = new ArrayList<>(); // which we actually need
+ loadVars(loadVarNames, loadDimNames, loadVars, loadDims, nd0, nAllVars, allVars);
+
+ // loadVars is known and only uses loadDims
+ // loadDims is known and only has dims used by loadVars
+ // if (debugMode) String2.log(
+ // ">> loadVars=" + loadVarNames +
+ // "\n>> loadDims=" + loadDimNames);
+ int nLoadVars = loadVars.size();
+
+ List> varToKeep = new ArrayList<>();
+
+ // *** quick reject file? (by testing constraints on small (scalar and 1D) vars)
+ // maintain separate keep bitsets for each 1D var
+ // (and for scalars) so that the constraints are cumulative for each dimension.
+ this.cachedVarData = new VarData[nLoadVars];
+ if (haveConstraints) {
+ int nCons = conVars.size();
+
+ // go through the load vars looking for 0D or 1D vars that have constraints
+ for (int v = 0; v < nLoadVars; v++) {
+ // is there at least 1 constraint of this var?
+ String varName = loadVarNames.get(v);
+ int con1 = conVars.indexOf(varName);
+ if (con1 < 0)
+ continue;
+
+ // is this a 0D or 1D var?
+ Variable tVar = loadVars.get(v);
+ VarData data = new VarData();
+ data.loadDims(this, tVar);
+ if (data.nDims > 1) {
+ continue;
+ }
+ data.loadArrayAndAttributes(this, tVar);
+ cachedVarData[v] = data;
+
+ BitSet keep = getKeepForVar(data, nd0, varToKeep);
+ // test constraints
+ for (int con = con1; con < nCons; con++) {
+ if (!conVars.get(con).equals(varName))
+ continue;
+ if (data.pa.applyConstraint(false, // less precise, so more likely to pass the test
+ keep, conOps.get(con), conVals.get(con)) == 0) {
+ // if (verbose) String2.log(warningInMethod +
+ // "Returning an empty table because var=" + varName +
+ // " failed its constraints, including " +
+ // conOps.get(con) + conVals.get(con) +
+ // ". time=" + (System.currentTimeMillis() - time) + "ms");
+ return;
+ }
+ }
+ }
+ }
+ // *** second half: load the loadVars
+
+ // find vars with all of the loadDims
+ // If loadDims size=0, this finds scalar vars
+ BitSet loaded = new BitSet(nLoadVars); // all false
+ int shape[] = new int[loadDims.size()];
+ // find first var with all of the load dims
+ VarData firstVar = null;
+ for (int v = 0; v < nLoadVars; v++) {
+ Variable tVar = loadVars.get(v);
+ firstVar = new VarData();
+ firstVar.loadDims(this, tVar);
+ if (firstVar.nDims != loadDims.size()) {
+ continue;
+ }
+ if (this.table.nColumns() == 0) {
+ // first var with all dims: set loadDims to be in that order
+ for (int d = 0; d < firstVar.nDims; d++) {
+ Dimension dim = firstVar.dims.get(d);
+ dim = convertDimension(nd0, dim);
+ loadDims.set(d, dim); // perhaps change loadDims to different order
+ loadDimNames.set(d, dim.getName());
+ shape[d] = dim.getLength();
+ if (shape[d] == 0) {
+ // if (verbose) String2.log(warningInMethod +
+ // "Returning an empty table because dim=" + dim.getName() +
+ // "'s length=0! " +
+ // "time=" + (System.currentTimeMillis() - time));
+ return;
+ }
+ }
+ }
+ if (cachedVarData[v] != null) {
+ firstVar = cachedVarData[v];
+ } else {
+ // yes, load this var, it has all of the dimensions in the expected order
+ firstVar.loadArrayAndAttributes(this, tVar);
+ // knownPAs[v] = null;
+ // knownAtts[v] = null;
+ cachedVarData[v] = firstVar;
+ }
+ addColumnToTable(getMetadata, loaded, firstVar, v, tVar, this.table);
+ break;
+ }
+ if (firstVar != null) {
+ loadDimMatchedVars(loadVarNames, standardizeWhat, nd0, loadVars, loadDims, nLoadVars, loaded,
+ this.table, firstVar, getMetadata);
+ }
+ // if (debugMode) String2.log(Math2.memoryString() + "\n" +
+ // ">> this table after load varsWithAllDims:\n" +
+ // dataToString(5));
+
+ // if loadDims size is 0, we're done because all scalars have been read
+ if (loadDims.size() == 0) {
+ if (haveConstraints) {
+ BitSet keep = new BitSet();
+ keep.set(0, this.table.nRows()); // should be just 1 row, all true
+ int nAfter = this.table.tryToApplyConstraints(-1, conVars, conOps, conVals, keep);
+ if (nAfter == 0) {
+ // if (verbose) String2.log(warningInMethod +
+ // "Returning an empty table after applying constraints to scalars. " +
+ // "time=" + (System.currentTimeMillis() - time));
+ this.table.clear();
+ return;
+ } // else: no need to justKeep() because there is 1 row and it is valid
+ }
+ return; // empty table if no scalars
+ }
+
+ // make a table with index columns for all indices
+ if (this.table.nColumns() == 0) {
+ // no vars have all loadDims
+ // if (debugMode) String2.log(Math2.memoryString() + "\n" +
+ // ">> no vars have all loadDims");
+ for (int d = 0; d < loadDims.size(); d++) {
+ Dimension dim = loadDims.get(d);
+ shape[d] = dim.getLength();
+ if (shape[d] == 0) {
+ // if (verbose) String2.log(warningInMethod +
+ // "Returning an empty table because dim=" + dim.getName() +
+ // "'s length=0! " +
+ // "time=" + (System.currentTimeMillis() - time));
+ return;
+ }
+ }
+ }
+ Table allIndicesTable = new Table();
+ allIndicesTable.addIndexColumns(shape);
+ // if (debugMode) String2.log(Math2.memoryString() + "\n" +
+ // ">> allIndicesTable=" +
+ // allIndicesTable.dataToString(5));
+
+ // apply constraints
+ int onRows = this.table.nRows();
+ BitSet keep;
+ if (firstVar != null) {
+ keep = getKeepForVar(firstVar, nd0, varToKeep);
+ } else {
+ keep = new BitSet();
+ }
+ keep.set(0, onRows); // all true
+ // *** removeMVRows
+ if (removeMVRows && this.table.nColumns() > 0) {
+ // ensure all vars that use all loadDims are loaded
+ int onCols = this.table.nColumns();
+ for (int v = 0; v < nAllVars; v++) {
+ Variable tVar = allVars.get(v);
+ if (this.table.findColumnNumber(tVar.getFullName()) >= 0) {
+ continue;
+ } // already in the table
+ VarData data = new VarData();
+ data.loadDims(this, tVar);
+ if (!doDimsMatch(nd0, data.nDims, data.dims, loadDims.size(), loadDims)) {
+ continue;
+ }
+ // yes, load this var TEMPORARILY, it has all of the dimensions in the expected
+ // order
+ // don't use knownPAs here: different vars and different v's.
+ data.loadArrayAndAttributes(this, tVar);
+ this.table.addColumn(this.table.nColumns(), tVar.getFullName(), data.pa, data.atts);
+ }
+
+ // move all the allIndices columns into the main table
+ int nLoadDims = loadDims.size();
+ for (int d = 0; d < nLoadDims; d++)
+ this.table.addColumn(d, allIndicesTable.getColumnName(d),
+ allIndicesTable.getColumn(d),
+ allIndicesTable.columnAttributes(d));
+ int nColumns = this.table.nColumns(); // including indicesColumns
+ if (onRows != this.table.nRows()) {
+ throw new Exception("Row count mismatch, can't do contraints first");
+ }
+
+ // gather the missing_value and _FillValue values for each column
+ boolean isDouble[] = new boolean[nColumns];
+ boolean isULong[] = new boolean[nColumns];
+ boolean isLong[] = new boolean[nColumns];
+ boolean isChar[] = new boolean[nColumns];
+ double doubleMvs[] = new double[nColumns];
+ double doubleFvs[] = new double[nColumns];
+ BigInteger ulongMvs[] = new BigInteger[nColumns];
+ BigInteger ulongFvs[] = new BigInteger[nColumns];
+ long longMvs[] = new long[nColumns];
+ long longFvs[] = new long[nColumns];
+ for (int c = nLoadDims; c < nColumns; c++) {
+ PrimitiveArray pa = this.table.columns.get(c);
+ isDouble[c] = pa instanceof FloatArray ||
+ pa instanceof DoubleArray;
+ isULong[c] = pa instanceof ULongArray;
+ isLong[c] = pa.isIntegerType() && !(pa instanceof ULongArray);
+ isChar[c] = pa instanceof CharArray;
+ if (isDouble[c]) {
+ doubleMvs[c] = this.table.columnAttributes(c).getDouble("missing_value");
+ doubleFvs[c] = this.table.columnAttributes(c).getDouble("_FillValue");
+ } else if (isULong[c]) {
+ ulongMvs[c] = this.table.columnAttributes(c).getULong("missing_value");
+ ulongFvs[c] = this.table.columnAttributes(c).getULong("_FillValue");
+ } else if (isLong[c]) {
+ longMvs[c] = this.table.columnAttributes(c).getLong("missing_value");
+ longFvs[c] = this.table.columnAttributes(c).getLong("_FillValue");
+ }
+ }
+
+ // walk backwards. Work within each cycle of the last dim.
+
+ PrimitiveArray lastDimCol = this.table.columns.get(nLoadDims - 1);
+ for (int row = onRows - 1; row >= 0; row--) {
+ boolean hasData = false;
+ for (int c = nLoadDims; c < nColumns; c++) {
+ // if (debugMode && row > onRows - 200)
+ // String2.log(">> row=" + row + " col=" + c + " " +
+ // (isDouble[c]) + " " + (isLong[c]) + " " + (isChar[c]) + " " +
+ // " val=" + columns.get(c).getString(row));
+ if (isDouble[c]) {
+ double d = this.table.columns.get(c).getDouble(row);
+ if (Double.isNaN(d) ||
+ Math2.almostEqual(5, d, doubleMvs[c]) ||
+ Math2.almostEqual(5, d, doubleFvs[c])) {
+ } else {
+ hasData = true;
+ break;
+ }
+ } else if (isULong[c]) {
+ BigInteger ul = this.table.columns.get(c).getULong(row);
+ if (ul.equals(ULongArray.MAX_VALUE) || // trouble: should test maxIsMV
+ ul.equals(ulongMvs[c]) ||
+ ul.equals(ulongFvs[c])) {
+ } else {
+ hasData = true;
+ break;
+ }
+ } else if (isLong[c]) {
+ long tl = this.table.columns.get(c).getLong(row);
+ if (tl == Long.MAX_VALUE || // trouble: should test maxIsMV
+ tl == longMvs[c] ||
+ tl == longFvs[c]) {
+ } else {
+ hasData = true;
+ break;
+ }
+ } else if (isChar[c]) {
+ int tc = this.table.columns.get(c).getInt(row);
+ if (tc == 0 ||
+ tc == 32 ||
+ tc == Integer.MAX_VALUE) { // trouble: should test maxIsMV
+ } else {
+ hasData = true;
+ break;
+ }
+ } else {
+ // nc allows strings to be 0-terminated or padded with spaces, so always trimEnd
+ String s = String2.trimEnd(this.table.columns.get(c).getString(row));
+ if (s.length() > 0) {
+ hasData = true;
+ break;
+ }
+ }
+ }
+ if (hasData) {
+ // jump to next group
+ while (lastDimCol.getInt(row) > 0)
+ row--; // the loop's row-- will get to next group
+ } else {
+ keep.clear(row);
+ }
+ }
+ // if (debugMode) { String2.log(">> removeMVRows nRows before=" + onRows +
+ // " after=" + keep.cardinality());
+ // //one time debugging:
+ // if (false) {
+ // PrimitiveArray pa = getColumn(nLoadDims);
+ // for (int row = 0; row < onRows; row++) {
+ // if (keep.get(row) && pa.getDouble(row) == -99999)
+ // String2.log(">> remaining row with mv:\n" + //in debugMode
+ // dataToString(row-1, row+2));
+ // }
+ // }
+ // }
+
+ // remove index columns and data columns just added for the MV testing
+ this.table.removeColumns(0, nLoadDims);
+ this.table.removeColumns(onCols, this.table.nColumns());
+ }
+
+ if ((haveConstraints || removeMVRows) && this.table.nColumns() > 0) {
+ // apply constraints to vars that have all loadDims
+ this.table.tryToApplyConstraints(-1, conVars, conOps, conVals, keep);
+ // if (debugMode) String2.log(
+ // ">> removeMVRows + constraints nRows before=" + onRows +
+ // " after=" + keep.cardinality());
+
+ // if (debugMode)
+ // String2.log(Math2.memoryString() + "\n" +
+ // ">> after bigVar constraints, justKeep nRows before=" +
+ // onRows + " after=" + nRows());
+ if (keep.cardinality() == 0) {
+ // if (verbose) String2.log(warningInMethod +
+ // "Returning an empty table after removeMVRows/constraints. " +
+ // "time=" + (System.currentTimeMillis() - time));
+ this.table.clear();
+ return;
+ }
+ this.table.justKeep(keep);
+ allIndicesTable.justKeep(keep);
+ }
+ // read all of the other variables:
+ // repeatedly, read batches of vars with same dimensions, and JOIN to main table
+ // Load constrained variables first.
+ if (haveConstraints) {
+ for (int v = 0; v < nLoadVars; v++) {
+ if (loaded.get(v)) {
+ continue;
+ }
+ Table lut = new Table(); // look up table which will be JOINed into main table
+ // is there at least 1 constraint of this var?
+ String varName = loadVarNames.get(v);
+ int con1 = conVars.indexOf(varName);
+ if (con1 < 0) {
+ continue;
+ }
+ Variable tVar = loadVars.get(v);
+ VarData data = VarData.fromVariable(this, tVar);
+ addVarAndIndicies(nd0, loadDims, loaded, allIndicesTable, lut, getMetadata, data, v, tVar);
+ loadDimMatchedVars(loadVarNames, standardizeWhat, nd0, loadVars, loadDims, nLoadVars, loaded,
+ lut, data, getMetadata);
+
+ // If we ran constraints on this var earlier, load it.
+ BitSet lutkeep = getKeepForVar(data, nd0, varToKeep);
+ // If we've already applied the constraints, use that previous bitset
+ if (lutkeep.cardinality() == data.pa.size()) {
+ int nAfter = lut.tryToApplyConstraints(-1, conVars, conOps, conVals, lutkeep);
+ if (nAfter == 0) {
+ // if (verbose) String2.log(warningInMethod +
+ // "Returning an empty table after applying constraints to lut. " +
+ // "time=" + (System.currentTimeMillis() - time));
+ this.table.clear();
+ return;
+ }
+ }
+ lut.justKeep(lutkeep);
+ // if (debugMode)
+ // String2.log(Math2.memoryString() + "\n" +
+ // ">> after lut constraints, justKeep lut.nRows before=" +
+ // onLutRows + " after=" + nAfter);
+
+ joinLutToTable(lut, data, allIndicesTable);
+ }
+ }
+ while (loaded.cardinality() < nLoadVars) {
+ Table lut = new Table(); // look up table which will be JOINed into main table
+ VarData varData = findVarToLoad(loadVarNames, standardizeWhat, nd0, loadVars, loadDims, nLoadVars, loaded,
+ allIndicesTable, lut, getMetadata);
+
+ loadDimMatchedVars(loadVarNames, standardizeWhat, nd0, loadVars, loadDims, nLoadVars, loaded,
+ lut, varData, getMetadata);
+
+ // all constraints checked above so we just need to join this data in.
+ joinLutToTable(lut, varData, allIndicesTable);
+ }
+ // and Bob's your uncle! we have all of the data
+
+ // this will be either the order that was requested, or their order in the file
+ this.table.reorderColumns(loadVarNames, false); // discardOthers=false, should be irrelevant
+
+ this.table.decodeCharsAndStrings();
+ this.table.convertToUnsignedPAs();
+
+ // if (reallyVerbose)
+ // String2.log(msg +
+ // " finished. nRows=" + nRows() + " nCols=" + nColumns() +
+ // " time=" + (System.currentTimeMillis() - time) + "ms");
+ } finally {
+ try {
+ if (ncFile != null)
+ ncFile.close();
+ } catch (Exception e9) {
+ }
+ }
+ }
+
+ private void joinLutToTable(Table lut, VarData varData, Table allIndicesTable) {
+ // JOIN lut into main table
+ // if (debugMode) String2.log(">> lut=\n" + lut.dataToString(5));
+ int nMatchingCols = Math.max(1, varData.nDims); // even scalars have 1 matching column
+ BitSet lutkeep = this.table.join(nMatchingCols, 0, "", lut); // "" = mvKey not needed
+ // remove the index columns from the main table
+ this.table.removeColumns(0, nMatchingCols);
+ // if (debugMode) String2.log(">> this table after join:\n" + dataToString(5));
+
+ // remove unmatched rows
+ int tnRows = lutkeep.cardinality();
+ if (tnRows == 0) {
+ this.table.clear();
+ // if (verbose) String2.log(warningInMethod +
+ // "Returning an empty table after a join. " +
+ // "time=" + (System.currentTimeMillis() - time));
+ return;
+ }
+ this.table.justKeep(lutkeep);
+ allIndicesTable.justKeep(lutkeep);
+ }
+
+ private BitSet getKeepForVar(VarData data, int nd0, List> varToKeep) {
+ for (int i = 0; i < varToKeep.size(); i++) {
+ VarData inList = varToKeep.get(i).getLeft();
+ if (doDimsMatch(nd0, data.nDims, data.dims, inList.nDims, inList.dims)) {
+ return varToKeep.get(i).getRight();
+ }
+ }
+ BitSet keep = new BitSet();
+ keep.set(0, data.pa.size());
+ varToKeep.add(Pair.of(data, keep));
+ return keep;
+ }
+
+ private void addColumnToTable(boolean getMetadata, BitSet loaded, VarData varData,
+ int v, Variable tVar, Table table) {
+ loaded.set(v);
+ table.addColumn(table.nColumns(), tVar.getFullName(), varData.pa, varData.atts);
+ // does this var point to the pseudo-data var with CF grid_mapping (projection)
+ // information?
+ if (getMetadata &&
+ gridMappingAtts == null) {
+ gridMappingAtts = NcHelper.getGridMappingAtts(ncFile,
+ varData.atts.getString("grid_mapping"));
+ if (gridMappingAtts != null) {
+ table.globalAttributes.add(gridMappingAtts);
+ }
+ }
+ }
+
+ private Dimension convertDimension(int nd0, Dimension dim) {
+ for (int d0 = 0; d0 < nd0; d0++) {
+ if (String2.indexOfObject(tDimsAs[d0], dim) >= 0) {
+ // convert to the 'as' dimension
+ dim = tDimsAs[d0][tDimsAs[d0].length - 1];
+ break;
+ }
+ }
+ return dim;
+ }
+
+ private VarData findVarToLoad(StringArray loadVarNames, int standardizeWhat, int nd0, ArrayList loadVars,
+ ArrayList loadDims, int nLoadVars, BitSet loaded, Table allIndicesTable, Table lut,
+ boolean getMetadata) throws Exception {
+ VarData varData = null;
+ for (int v = 0; v < nLoadVars; v++) {
+ if (loaded.get(v))
+ continue;
+ // if (debugMode) {
+ // String2.log(">> v=" + v + " cDims==null?" + (cDims==null) +
+ // " lut: nCols=" + lut.nColumns() + " nRows=" + lut.nRows());
+ // String2.log(">> lut=" + lut.dataToString(5));
+ // }
+
+ // look for an unloaded var (and other vars with same dimensions)
+ Variable tVar = loadVars.get(v);
+
+ varData = VarData.fromVariable(this, tVar);
+
+ addVarAndIndicies(nd0, loadDims, loaded, allIndicesTable, lut, getMetadata, varData, v, tVar);
+ return varData;
+ }
+ return varData;
+ }
+
+ private void addVarAndIndicies(int nd0, ArrayList loadDims, BitSet loaded, Table allIndicesTable,
+ Table lut,
+ boolean getMetadata, VarData varData, int v, Variable tVar) {
+ int cShape[] = new int[varData.nDims];
+ for (int d = 0; d < varData.nDims; d++) {
+ // which dim is it in loadDims?
+ Dimension cDim = varData.dims.get(d);
+ cDim = convertDimension(nd0, cDim);
+ cShape[d] = cDim.getLength();
+ int whichDim = loadDims.indexOf(cDim);
+ // insert that index in main table
+ this.table.addColumn(d, "_index_" + whichDim,
+ allIndicesTable.getColumn(whichDim)); // will throw error if whichDim=-1
+ }
+
+ // insert index columns in lut
+ if (varData.nDims == 0) {
+ // if scalar vars, make key columns with 0's
+ // in lut
+ lut.addColumn(0, "_scalar_", new IntArray(new int[] { 0 }),
+ new Attributes());
+ // and in main table
+ IntArray ia = new IntArray(this.table.nRows(), false);
+ ia.addN(this.table.nRows(), 0);
+ // String2.log("nRows=" + nRows() + " ia.size=" + ia.size());
+ this.table.addColumn(0, "_scalar_", ia, new Attributes());
+ } else {
+ lut.addIndexColumns(cShape);
+ }
+
+ // read this var into lut
+ // knownPAs[v] = null;
+ // knownAtts[v] = null;
+ addColumnToTable(getMetadata, loaded, varData, v, tVar, lut);
+ }
+
+ private void loadDimMatchedVars(StringArray loadVarNames, int standardizeWhat, int nd0, ArrayList loadVars,
+ ArrayList loadDims, int nLoadVars, BitSet loaded, Table table,
+ VarData matchDims, boolean getMetadata) throws Exception {
+ // extra check on loaded?? verify this isn't a problem
+ for (int v = 0; v < nLoadVars; v++) {
+ if (loaded.get(v))
+ continue;
+ // if (debugMode) {
+ // String2.log(">> v=" + v + " cDims==null?" + (cDims==null) +
+ // " lut: nCols=" + lut.nColumns() + " nRows=" + lut.nRows());
+ // String2.log(">> lut=" + lut.dataToString(5));
+ // }
+
+ // look for an unloaded var (and other vars with same dimensions)
+ Variable tVar = loadVars.get(v);
+ VarData data = VarData.fromVariableIfDimsMatch(this, tVar, matchDims, nd0);
+ if (data == null) {
+ continue;
+ }
+ // read this var into lut
+ addColumnToTable(getMetadata, loaded, data, v, tVar, table);
+ }
+ }
+
+ private boolean doDimsMatch(int nd0, int ntDims, List tDims, int ncDims, List cDims) {
+ // does this var have the exact same dimensions, in same order?
+ if (ntDims != ncDims) {
+ return false;
+ }
+ for (int d = 0; d < ncDims; d++) {
+ Dimension dim = tDims.get(d);
+ dim = convertDimension(nd0, dim);
+ if (!cDims.get(d).equals(dim)) {
+ return false;
+ }
+ }
+ return true;
+ }
+
+ private HashSet findNonStringLengthDims(List allVars, int nAllVars) {
+ HashSet notStringLengthDims = new HashSet<>();
+ for (int v = 0; v < nAllVars; v++) {
+ Variable tVar = allVars.get(v);
+ List tDims = tVar.getDimensions(); // won't be null
+ int tnDims = tDims.size();
+ // here, assume the last dim of any multiDim char var
+ // is the string length dimension, so skip it
+ if (tVar.getDataType() == DataType.CHAR)
+ tnDims--;
+ for (int d = 0; d < tnDims; d++)
+ notStringLengthDims.add(tDims.get(d));
+ }
+ return notStringLengthDims;
+ }
+
+ private Dimension[][] processTreatDimensionsAs(String[][] treatDimensionsAs, int nd0) {
+ String msg;
+ Dimension[][] tDimsAs = null;
+ if (nd0 > 0) {
+ tDimsAs = new Dimension[nd0][];
+ for (int d0 = 0; d0 < nd0; d0++) {
+ int nd1 = treatDimensionsAs[d0].length;
+ tDimsAs[d0] = new Dimension[nd1];
+ int tDimsSize = -1;
+ for (int d1 = 0; d1 < nd1; d1++) {
+ tDimsAs[d0][d1] = ncFile.findDimension(treatDimensionsAs[d0][d1]);
+ if (tDimsAs[d0][d1] == null) {
+ msg = warningInMethod +
+ "treatDimensionAs[" + d0 + "][" + d1 + "]=" + treatDimensionsAs[d0][d1] +
+ " isn't in the file.";
+ if (d1 == nd1 - 1) // the 'to' dim must be in the file
+ throw new RuntimeException(msg);
+ // if (debugMode) String2.log(msg);
+ continue;
+ }
+ if (tDimsSize < 0)
+ tDimsSize = tDimsAs[d0][d1].getLength();
+ else
+ Test.ensureEqual(tDimsAs[d0][d1].getLength(), tDimsSize,
+ warningInMethod +
+ "All of the treatDimensionsAs dimensions (" +
+ String2.toCSSVString(treatDimensionsAs[d0]) +
+ ") must be the same length ([" + d0 + "][" + d1 + "]).");
+ }
+ }
+ }
+ return tDimsAs;
+ }
+
+ private void validateTreatDimensionsAs(String[][] treatDimensionsAs, int nd0) {
+ for (int d0 = 0; d0 < nd0; d0++) {
+ if (treatDimensionsAs[d0] == null)
+ throw new RuntimeException(warningInMethod +
+ "treatDimensionAs[" + d0 + "] is null!");
+ else if (treatDimensionsAs[d0].length < 2)
+ throw new RuntimeException(warningInMethod +
+ "treatDimensionAs[" + d0 + "].length=" +
+ treatDimensionsAs[d0].length + " must be >1: " +
+ String2.toCSSVString(treatDimensionsAs[d0]));
+ // if (debugMode)
+ // msg +=" treatDimensionsAs[" + d0 + "]=" +
+ // String2.toCSSVString(treatDimensionsAs[d0]);
+ }
+ }
+
+ private void loadVars(StringArray loadVarNames, StringArray loadDimNames, ArrayList loadVars,
+ ArrayList loadDims, int nd0, int nAllVars, List allVars) {
+ if (loadVarNames.size() > 0) {
+ // loadVarNames was specified
+
+ // gather the loadVars and loadDims (not including the aliases)
+ loadDimNames.clear();
+ for (int v = 0; v < loadVarNames.size(); v++) {
+ Variable var = ncFile.findVariable(loadVarNames.get(v));
+ if (var == null) {
+ loadVarNames.remove(v--); // var not in file, so don't try to load it
+ } else {
+ loadVars.add(var);
+ VarData data = VarData.dimsFromVariable(this, var);
+ for (int d = 0; d < data.nDims; d++) {
+ Dimension tDim = data.dims.get(d);
+ if (loadDims.indexOf(tDim) < 0) { // not yet in the list
+ tDim = convertDimension(nd0, tDim);
+ if (loadDims.indexOf(tDim) < 0) { // possibly different tDim not yet in the list
+ loadDims.add(tDim);
+ loadDimNames.add(tDim.getName());
+ }
+ }
+ }
+ }
+ }
+ if (loadVars.size() == 0) {
+ // if (verbose) String2.log(warningInMethod +
+ // "Returning an empty table because none of the requested variables are in the
+ // file. " +
+ // "time=" + (System.currentTimeMillis() - time));
+ return;
+ }
+
+ } else {
+ // loadVarNames wasn't specified
+
+ if (loadDimNames.size() == 0) {
+ // loadDimNames wasn't specified either
+
+ // find var(s) that use the most dimensions
+ try {
+ Variable tVars[] = NcHelper.findMaxDVariables(ncFile, ""); // throws Exception if no vars with dimensions
+
+ // gather loadDims from the first of those vars
+ // (so it won't include aliases)
+ Variable tVar = tVars[0];
+ VarData data = VarData.dimsFromVariable(this, tVar);
+ for (int d = 0; d < data.nDims; d++) {
+ Dimension dim = data.dims.get(d);
+ loadDims.add(dim);
+ loadDimNames.add(dim.getName());
+ }
+ } catch (Exception e) {
+ // FUTURE: read all static variables
+ String2.log("Table.readMultidimNc caught: " + e.toString());
+ }
+
+ } else {
+ // loadDimNames was specified (it doesn't include aliases)
+ // gather the specified loadDims
+ for (int d = 0; d < loadDimNames.size(); d++) {
+ String dimName = loadDimNames.get(d);
+ Dimension dim = ncFile.findDimension(dimName);
+ if (dim == null) {
+ String2.log("Removing dimName=" + dimName + ": it isn't in the file.");
+ loadDimNames.remove(d--);
+ } else {
+ loadDims.add(dim);
+ }
+ }
+ if (loadDimNames.size() == 0)
+ String2.log("None of the requested loadDimNames is in the file.");
+ }
+
+ // now, loadDims is known, but loadVars isn't
+ // find vars that use any subset of loadDims (and no others)
+ // including scalar vars
+ boolean dimUsed[] = new boolean[loadDims.size()];
+ LOADVARS_V: for (int v = 0; v < nAllVars; v++) {
+ Variable var = allVars.get(v);
+ VarData data = VarData.dimsFromVariable(this, var);
+ for (int d = 0; d < data.nDims; d++) {
+ Dimension tDim = data.dims.get(d);
+ int whichDim = loadDims.indexOf(tDim);
+ if (whichDim < 0) {
+ // is it one of the aliases?
+ for (int d0 = 0; d0 < nd0; d0++) {
+ if (String2.indexOfObject(tDimsAs[d0], tDim) >= 0) {
+ // change to the 'as' dimension?
+ whichDim = loadDims.indexOf(tDimsAs[d0][tDimsAs[d0].length - 1]);
+ if (whichDim >= 0)
+ break;
+ }
+ }
+ }
+ if (whichDim < 0)
+ continue LOADVARS_V;
+ dimUsed[whichDim] = true;
+ }
+ loadVars.add(var);
+ loadVarNames.add(var.getFullName());
+ }
+ if (loadVars.size() == 0) {
+ // if (verbose) String2.log(warningInMethod +
+ // "Returning an empty table because there are no scalar variables " +
+ // "and no variables in the file use any of these dimensions: " +
+ // loadDimNames + ". " +
+ // "time=" + (System.currentTimeMillis() - time));
+ return;
+ }
+
+ // remove unused dimensions
+ for (int d = loadDims.size() - 1; d >= 0; d--) { // backwards since may delete
+ if (!dimUsed[d])
+ loadDims.remove(d);
+ }
+ if (loadDims.size() == 0)
+ String2.log("After analysis, loadDims.size is now 0!");
+ }
+ }
+}
diff --git a/WEB-INF/classes/gov/noaa/pfel/erddap/dataset/EDDTableFromFiles.java b/WEB-INF/classes/gov/noaa/pfel/erddap/dataset/EDDTableFromFiles.java
index 25a70519..e0120cd8 100644
--- a/WEB-INF/classes/gov/noaa/pfel/erddap/dataset/EDDTableFromFiles.java
+++ b/WEB-INF/classes/gov/noaa/pfel/erddap/dataset/EDDTableFromFiles.java
@@ -49,123 +49,136 @@
import java.util.HashSet;
import java.util.regex.*;
-/**
- * This class represents a virtual table of data from by aggregating a collection of data files.
- *
The presumption is that the entire dataset can be read reasonable quickly
- * (from the local files, unlike remote data) and all variable's min and max info
- * can be gathered (for each file)
- * and cached (facilitating handling constraints in data requests).
- *
And file data can be cached and reused because each file has a lastModified
- * time and size which can be used to detect if file is unchanged.
+/**
+ * This class represents a virtual table of data from by aggregating a
+ * collection of data files.
+ *
+ * The presumption is that the entire dataset can be read reasonable quickly
+ * (from the local files, unlike remote data) and all variable's min and max
+ * info
+ * can be gathered (for each file)
+ * and cached (facilitating handling constraints in data requests).
+ *
+ * And file data can be cached and reused because each file has a lastModified
+ * time and size which can be used to detect if file is unchanged.
*
- * @author Bob Simons (was bob.simons@noaa.gov, now BobSimons2.00@gmail.com) 2008-04-12
+ * @author Bob Simons (was bob.simons@noaa.gov, now BobSimons2.00@gmail.com)
+ * 2008-04-12
*/
-public abstract class EDDTableFromFiles extends EDDTable implements WatchUpdateHandler{
+public abstract class EDDTableFromFiles extends EDDTable implements WatchUpdateHandler {
public final static String MF_FIRST = "first", MF_LAST = "last";
public static int suggestedUpdateEveryNMillis = 10000;
+
public static int suggestUpdateEveryNMillis(String tFileDir) {
- return String2.isTrulyRemote(tFileDir)? 0 : suggestedUpdateEveryNMillis;
+ return String2.isTrulyRemote(tFileDir) ? 0 : suggestedUpdateEveryNMillis;
}
- /** Don't set this to true here. Some test methods set this to true temporarily. */
+
+ /**
+ * Don't set this to true here. Some test methods set this to true temporarily.
+ */
protected static boolean testQuickRestart = false;
- //set by constructor
+ // set by constructor
protected String fileDir;
protected String fileNameRegex;
protected boolean recursive;
protected String pathRegex;
- protected String metadataFrom;
- protected String preExtractRegex, postExtractRegex, extractRegex,
- columnNameForExtract; // will be "" if not in use
+ protected String metadataFrom;
+ protected String preExtractRegex, postExtractRegex, extractRegex,
+ columnNameForExtract; // will be "" if not in use
protected Pattern preExtractPattern, postExtractPattern,
- extractPattern; //will be null if not in use
- protected String sortedColumnSourceName; //may be "", won't be null
+ extractPattern; // will be null if not in use
+ protected String sortedColumnSourceName; // may be "", won't be null
/**
- * filesAreLocal true if files are on a local hard drive or false if files are remote.
- * 1) A failure when reading a local file, causes file to be marked as bad and dataset reloaded;
- * but a remote failure doesn't.
- * 2) For remote files, the bad file list is rechecked every time dataset is reloaded.
+ * filesAreLocal true if files are on a local hard drive or false if files are
+ * remote.
+ * 1) A failure when reading a local file, causes file to be marked as bad and
+ * dataset reloaded;
+ * but a remote failure doesn't.
+ * 2) For remote files, the bad file list is rechecked every time dataset is
+ * reloaded.
*/
protected boolean filesAreLocal;
- protected String charset; //may be null or ""
- protected String skipHeaderToRegex; //may be null or ""
- protected String skipLinesRegex; //may be null or ""
- protected int columnNamesRow = 1, firstDataRow = 2;
+ protected String charset; // may be null or ""
+ protected String skipHeaderToRegex; // may be null or ""
+ protected String skipLinesRegex; // may be null or ""
+ protected int columnNamesRow = 1, firstDataRow = 2;
protected String columnSeparator = "";
- //for ColumnarAscii only: the startColumn and stopColumn of each
- // dataVariable on each line of the file (0..)
- protected int startColumn[], stopColumn[];
- protected boolean removeMVRows = true; //just used by EDDTableFromMultidimNcFiles
+ // for ColumnarAscii only: the startColumn and stopColumn of each
+ // dataVariable on each line of the file (0..)
+ protected int startColumn[], stopColumn[];
+ protected boolean removeMVRows = true; // just used by EDDTableFromMultidimNcFiles
final static String TREAT_DIMENSIONS_AS = "treatDimensionsAs";
- protected String treatDimensionsAs[][] = null; //just used by EDDTableFromMultidimNcFiles
+ protected String treatDimensionsAs[][] = null; // just used by EDDTableFromMultidimNcFiles
protected int standardizeWhat = Integer.MAX_VALUE; // <0 or MAX_VALUE=not specified by user. >=0 is validated value.
- public int nThreads = 4; //???
+ public int nThreads = 4; // ???
- //source info
+ // source info
protected StringArray sourceDataNames;
protected StringArray safeSourceDataNames;
protected String sourceDataTypes[];
- protected HashMap> scriptNeedsColumns = new HashMap(); //
+ protected HashMap> scriptNeedsColumns = new HashMap(); //
- //arrays to hold expected source add_offset, fillValue, missingValue, scale_factor, units
- protected double expectedAddOffset[];
- protected double expectedFillValue[];
+ // arrays to hold expected source add_offset, fillValue, missingValue,
+ // scale_factor, units
+ protected double expectedAddOffset[];
+ protected double expectedFillValue[];
protected double expectedMissingValue[];
- protected double expectedScaleFactor[];
+ protected double expectedScaleFactor[];
protected String expectedUnits[];
- //arrays to hold addAttributes mv info for NEC dv columns
- // so source min max can be determined (skipping missing values)
+ // arrays to hold addAttributes mv info for NEC dv columns
+ // so source min max can be determined (skipping missing values)
protected double addAttFillValue[];
protected double addAttMissingValue[];
/** Columns in the File Table */
- protected final static int
- FT_DIR_INDEX_COL=0, //useful that it is #0 (tFileTable uses same positions)
- FT_FILE_LIST_COL=1, //useful that it is #1
- FT_LAST_MOD_COL=2,
- FT_SIZE_COL=3,
- FT_SORTED_SPACING_COL=4;
- //then 3 cols for each dataVariable: sourceName + _min_|_max_|_hasNaN starting at dv0
+ protected final static int FT_DIR_INDEX_COL = 0, // useful that it is #0 (tFileTable uses same positions)
+ FT_FILE_LIST_COL = 1, // useful that it is #1
+ FT_LAST_MOD_COL = 2,
+ FT_SIZE_COL = 3,
+ FT_SORTED_SPACING_COL = 4;
+ // then 3 cols for each dataVariable: sourceName + _min_|_max_|_hasNaN starting
+ // at dv0
protected final static int dv0 = 5;
- int fileTableSortColumns[]; //null if not active
- boolean fileTableSortAscending[]; //size matches fileTableSortcolumns, all true
-
+ int fileTableSortColumns[]; // null if not active
+ boolean fileTableSortAscending[]; // size matches fileTableSortcolumns, all true
protected int sortedDVI = -1;
- protected String filesChanged = "";
+ protected String filesChanged = "";
protected int extractedColNameIndex = -1;
- protected long cumNNotRead = 0; //either don't have matching data or do ('distinct' and 1 value matches)
- protected long cumNReadHaveMatch = 0, cumNReadNoMatch = 0; //read the data file to look for matching data
+ protected long cumNNotRead = 0; // either don't have matching data or do ('distinct' and 1 value matches)
+ protected long cumNReadHaveMatch = 0, cumNReadNoMatch = 0; // read the data file to look for matching data
protected WatchDirectory watchDirectory;
- //dirTable and fileTable inMemory (default=false)
+ // dirTable and fileTable inMemory (default=false)
protected boolean fileTableInMemory = false;
- protected Table dirTable; //one column with dir names
+ protected Table dirTable; // one column with dir names
protected Table fileTable;
- //things just for EDDTableFromHttpGet
- public final static String HTTP_GET_REQUIRED_VARIABLES = "httpGetRequiredVariables";
+ // things just for EDDTableFromHttpGet
+ public final static String HTTP_GET_REQUIRED_VARIABLES = "httpGetRequiredVariables";
public final static String HTTP_GET_DIRECTORY_STRUCTURE = "httpGetDirectoryStructure";
- public final static String HTTP_GET_KEYS = "httpGetKeys";
+ public final static String HTTP_GET_KEYS = "httpGetKeys";
- protected String[] httpGetRequiredVariableNames; //e.g., stationID, time
- protected String[] httpGetRequiredVariableTypes; //e.g., String, double
+ protected String[] httpGetRequiredVariableNames; // e.g., stationID, time
+ protected String[] httpGetRequiredVariableTypes; // e.g., String, double
protected HashSet httpGetKeys = new HashSet();
- //this has the parsed httpGetDirectoryStructure specification
- //with 1 item per directory and the last item being for the file names
- protected StringArray httpGetDirectoryStructureColumnNames; //(i) has a var sourceName or ""
- protected IntArray httpGetDirectoryStructureNs; //(i) has the number of Calendar items, or -1
- protected IntArray httpGetDirectoryStructureCalendars; //(i) has the e.g., Calendar.MONTH, or -1
+ // this has the parsed httpGetDirectoryStructure specification
+ // with 1 item per directory and the last item being for the file names
+ protected StringArray httpGetDirectoryStructureColumnNames; // (i) has a var sourceName or ""
+ protected IntArray httpGetDirectoryStructureNs; // (i) has the number of Calendar items, or -1
+ protected IntArray httpGetDirectoryStructureCalendars; // (i) has the e.g., Calendar.MONTH, or -1
- protected String cacheFromUrl = null; //null if inactive
- protected long cacheMaxSizeB = -1; //cache threshold size in B, <=0 = copy the entire dataset
- protected String cachePartialPathRegex = null; //null if inactive
+ protected String cacheFromUrl = null; // null if inactive
+ protected long cacheMaxSizeB = -1; // cache threshold size in B, <=0 = copy the entire dataset
+ protected String cachePartialPathRegex = null; // null if inactive
/** When threshold size is reached, prune the cache to fraction*threshold. */
- protected double cacheFraction = FileVisitorDNLS.PRUNE_CACHE_DEFAULT_FRACTION;
+ protected double cacheFraction = FileVisitorDNLS.PRUNE_CACHE_DEFAULT_FRACTION;
/**
* This returns the default value for standardizeWhat for this subclass.
@@ -173,29 +186,30 @@ public static int suggestUpdateEveryNMillis(String tFileDir) {
* The default was chosen to mimic the subclass' behavior from
* before support for standardizeWhat options was added.
*/
- abstract int defaultStandardizeWhat();
-
-
-
+ abstract int defaultStandardizeWhat();
/**
- * This constructs an EDDTableFromFiles based on the information in an .xml file.
+ * This constructs an EDDTableFromFiles based on the information in an .xml
+ * file.
*
- * @param erddap if known in this context, else null
- * @param xmlReader with the <erddapDatasets><dataset type="[subclassName]">
- * having just been read.
+ * @param erddap if known in this context, else null
+ * @param xmlReader with the <erddapDatasets><dataset
+ * type="[subclassName]">
+ * having just been read.
* @return an EDDTableFromFiles.
- * When this returns, xmlReader will have just read <erddapDatasets></dataset> .
+ * When this returns, xmlReader will have just read
+ * <erddapDatasets></dataset> .
* @throws Throwable if trouble
*/
- public static EDDTableFromFiles fromXml(Erddap erddap,
- SimpleXMLReader xmlReader) throws Throwable {
-
- //data to be obtained (or not)
- if (verbose) String2.log("\n*** constructing EDDTableFromFiles(xmlReader)...");
- boolean tIsLocal = false; //not actually used
- String tDatasetID = xmlReader.attributeValue("datasetID");
- String tType = xmlReader.attributeValue("type");
+ public static EDDTableFromFiles fromXml(Erddap erddap,
+ SimpleXMLReader xmlReader) throws Throwable {
+
+ // data to be obtained (or not)
+ if (verbose)
+ String2.log("\n*** constructing EDDTableFromFiles(xmlReader)...");
+ boolean tIsLocal = false; // not actually used
+ String tDatasetID = xmlReader.attributeValue("datasetID");
+ String tType = xmlReader.attributeValue("type");
Attributes tGlobalAttributes = null;
ArrayList tDataVariables = new ArrayList();
int tReloadEveryNMinutes = Integer.MAX_VALUE;
@@ -212,29 +226,29 @@ public static EDDTableFromFiles fromXml(Erddap erddap,
boolean tRecursive = false;
String tPathRegex = ".*";
boolean tAccessibleViaFiles = EDStatic.defaultAccessibleViaFiles;
- String tMetadataFrom = MF_LAST;
+ String tMetadataFrom = MF_LAST;
String tPreExtractRegex = "", tPostExtractRegex = "", tExtractRegex = "";
String tColumnNameForExtract = "";
String tSortedColumnSourceName = "";
String tSortFilesBySourceNames = "";
- boolean tRemoveMVRows = true; //used by EDDTableFromMultidimNcFiles
- int tStandardizeWhat = Integer.MAX_VALUE; //not specified by user
+ boolean tRemoveMVRows = true; // used by EDDTableFromMultidimNcFiles
+ int tStandardizeWhat = Integer.MAX_VALUE; // not specified by user
String tSpecialMode = "";
String tCharset = null;
String tSkipHeaderToRegex = "";
String tSkipLinesRegex = "";
- int tColumnNamesRow = 1, tFirstDataRow = 2; //relevant for ASCII files only
- String tColumnSeparator = ""; //relevant for ASCII files only
+ int tColumnNamesRow = 1, tFirstDataRow = 2; // relevant for ASCII files only
+ String tColumnSeparator = ""; // relevant for ASCII files only
boolean tSourceNeedsExpandedFP_EQ = true;
String tDefaultDataQuery = null;
String tDefaultGraphQuery = null;
String tAddVariablesWhere = null;
- int tNThreads = -1; //unspecified
+ int tNThreads = -1; // unspecified
String tCacheFromUrl = null;
int tCacheSizeGB = -1;
String tCachePartialPathRegex = null;
- //process the tags
+ // process the tags
String startOfTags = xmlReader.allTags();
int startOfTagsN = xmlReader.stackSize();
int startOfTagsLength = startOfTags.length();
@@ -242,347 +256,390 @@ public static EDDTableFromFiles fromXml(Erddap erddap,
xmlReader.nextTag();
String tags = xmlReader.allTags();
String content = xmlReader.content();
- //if (reallyVerbose) String2.log(" tags=" + tags + content);
- if (xmlReader.stackSize() == startOfTagsN)
- break; //the tag
+ // if (reallyVerbose) String2.log(" tags=" + tags + content);
+ if (xmlReader.stackSize() == startOfTagsN)
+ break; // the tag
String localTags = tags.substring(startOfTagsLength);
- //try to make the tag names as consistent, descriptive and readable as possible
- if (localTags.equals(""))
+ // try to make the tag names as consistent, descriptive and readable as possible
+ if (localTags.equals(""))
tGlobalAttributes = getAttributesFromXml(xmlReader);
- else if (localTags.equals( ""))
+ else if (localTags.equals(""))
throw new SimpleException(EDVAlt.stopUsingAltitudeMetersPerSourceUnit);
- else if (localTags.equals( "")) tDataVariables.add(getSDADVariableFromXml(xmlReader));
- else if (localTags.equals( "")) {}
- else if (localTags.equals("")) tAccessibleTo = content;
- else if (localTags.equals( "")) {}
- else if (localTags.equals("")) tGraphsAccessibleTo = content;
- else if (localTags.equals( "")) {}
- else if (localTags.equals("")) tReloadEveryNMinutes = String2.parseInt(content);
- else if (localTags.equals( "")) {}
- else if (localTags.equals("")) tUpdateEveryNMillis = String2.parseInt(content);
- else if (localTags.equals( "")) {}
- else if (localTags.equals("")) tFileDir = content;
- else if (localTags.equals( "")) {}
- else if (localTags.equals("")) tFileNameRegex = content;
- else if (localTags.equals( "")) {}
- else if (localTags.equals("")) tRecursive = String2.parseBoolean(content);
- else if (localTags.equals( "")) {}
- else if (localTags.equals("")) tPathRegex = content;
- else if (localTags.equals( "")) {}
- else if (localTags.equals("")) tAccessibleViaFiles = String2.parseBoolean(content);
- else if (localTags.equals( "")) {}
- else if (localTags.equals("")) tMetadataFrom = content;
- else if (localTags.equals( "")) {}
- else if (localTags.equals("")) {} //tNDimensions = String2.parseInt(content);
- else if (localTags.equals( "")) {}
- else if (localTags.equals("")) tPreExtractRegex = content;
- else if (localTags.equals( "")) {}
- else if (localTags.equals("")) tPostExtractRegex = content;
- else if (localTags.equals( "")) {}
- else if (localTags.equals("")) tExtractRegex = content;
- else if (localTags.equals( "")) {}
- else if (localTags.equals("")) tColumnNameForExtract = content;
- else if (localTags.equals( "")) {}
- else if (localTags.equals("")) tSortedColumnSourceName = content;
- else if (localTags.equals( "")) {}
- else if (localTags.equals("")) tSortFilesBySourceNames = content;
- else if (localTags.equals( "")) {}
- else if (localTags.equals("")) tCharset = content;
- else if (localTags.equals( "")) {}
- else if (localTags.equals("")) tSkipHeaderToRegex = content;
- else if (localTags.equals( "")) {}
- else if (localTags.equals("")) tSkipLinesRegex = content;
- else if (localTags.equals( "")) {}
- else if (localTags.equals("")) tColumnNamesRow = String2.parseInt(content);
- else if (localTags.equals( "")) {}
- else if (localTags.equals("")) tFirstDataRow = String2.parseInt(content);
- else if (localTags.equals( "")) {}
- else if (localTags.equals("")) tColumnSeparator = content;
- else if (localTags.equals( "")) {}
- else if (localTags.equals("")) tSourceNeedsExpandedFP_EQ = String2.parseBoolean(content);
- else if (localTags.equals( "")) {}
- else if (localTags.equals("")) tSpecialMode = content;
- else if (localTags.equals( "")) {}
- else if (localTags.equals("")) tFileTableInMemory = String2.parseBoolean(content);
- else if (localTags.equals( "")) {}
- else if (localTags.equals("")) tOnChange.add(content);
- else if (localTags.equals( "")) {}
- else if (localTags.equals("")) tFgdcFile = content;
- else if (localTags.equals( "")) {}
- else if (localTags.equals("")) tIso19115File = content;
- else if (localTags.equals( "")) {}
- else if (localTags.equals("")) tSosOfferingPrefix = content;
- else if (localTags.equals( "")) {}
- else if (localTags.equals("")) tDefaultDataQuery = content;
- else if (localTags.equals( "")) {}
- else if (localTags.equals("")) tDefaultGraphQuery = content;
- else if (localTags.equals( "")) {}
- else if (localTags.equals("")) tAddVariablesWhere = content;
- else if (localTags.equals( "")) {}
- else if (localTags.equals("")) tIsLocal = String2.parseBoolean(content);
- else if (localTags.equals( "")) {}
- else if (localTags.equals("")) tRemoveMVRows = String2.parseBoolean(content);
- else if (localTags.equals( "")) {}
- else if (localTags.equals("")) tStandardizeWhat = String2.parseInt(content);
- else if (localTags.equals( "")) {}
- else if (localTags.equals("")) tNThreads = String2.parseInt(content);
- else if (localTags.equals( "")) {}
- else if (localTags.equals("")) tCacheFromUrl = content;
- else if (localTags.equals( "")) {}
- else if (localTags.equals("")) tCacheSizeGB = String2.parseInt(content);
- else if (localTags.equals( "")) {}
- else if (localTags.equals("")) tCachePartialPathRegex = content;
-
- else xmlReader.unexpectedTagException();
+ else if (localTags.equals(""))
+ tDataVariables.add(getSDADVariableFromXml(xmlReader));
+ else if (localTags.equals("")) {
+ } else if (localTags.equals(""))
+ tAccessibleTo = content;
+ else if (localTags.equals("")) {
+ } else if (localTags.equals(""))
+ tGraphsAccessibleTo = content;
+ else if (localTags.equals("")) {
+ } else if (localTags.equals(""))
+ tReloadEveryNMinutes = String2.parseInt(content);
+ else if (localTags.equals("")) {
+ } else if (localTags.equals(""))
+ tUpdateEveryNMillis = String2.parseInt(content);
+ else if (localTags.equals("")) {
+ } else if (localTags.equals(""))
+ tFileDir = content;
+ else if (localTags.equals("")) {
+ } else if (localTags.equals(""))
+ tFileNameRegex = content;
+ else if (localTags.equals("")) {
+ } else if (localTags.equals(""))
+ tRecursive = String2.parseBoolean(content);
+ else if (localTags.equals("")) {
+ } else if (localTags.equals(""))
+ tPathRegex = content;
+ else if (localTags.equals("")) {
+ } else if (localTags.equals(""))
+ tAccessibleViaFiles = String2.parseBoolean(content);
+ else if (localTags.equals("")) {
+ } else if (localTags.equals(""))
+ tMetadataFrom = content;
+ else if (localTags.equals("")) {
+ } else if (localTags.equals("")) {
+ } // tNDimensions = String2.parseInt(content);
+ else if (localTags.equals("")) {
+ } else if (localTags.equals(""))
+ tPreExtractRegex = content;
+ else if (localTags.equals("")) {
+ } else if (localTags.equals(""))
+ tPostExtractRegex = content;
+ else if (localTags.equals("")) {
+ } else if (localTags.equals(""))
+ tExtractRegex = content;
+ else if (localTags.equals("")) {
+ } else if (localTags.equals(""))
+ tColumnNameForExtract = content;
+ else if (localTags.equals("")) {
+ } else if (localTags.equals(""))
+ tSortedColumnSourceName = content;
+ else if (localTags.equals("")) {
+ } else if (localTags.equals(""))
+ tSortFilesBySourceNames = content;
+ else if (localTags.equals("")) {
+ } else if (localTags.equals(""))
+ tCharset = content;
+ else if (localTags.equals("")) {
+ } else if (localTags.equals(""))
+ tSkipHeaderToRegex = content;
+ else if (localTags.equals("")) {
+ } else if (localTags.equals(""))
+ tSkipLinesRegex = content;
+ else if (localTags.equals("")) {
+ } else if (localTags.equals(""))
+ tColumnNamesRow = String2.parseInt(content);
+ else if (localTags.equals("")) {
+ } else if (localTags.equals(""))
+ tFirstDataRow = String2.parseInt(content);
+ else if (localTags.equals("")) {
+ } else if (localTags.equals(""))
+ tColumnSeparator = content;
+ else if (localTags.equals("")) {
+ } else if (localTags.equals(""))
+ tSourceNeedsExpandedFP_EQ = String2.parseBoolean(content);
+ else if (localTags.equals("")) {
+ } else if (localTags.equals(""))
+ tSpecialMode = content;
+ else if (localTags.equals("")) {
+ } else if (localTags.equals(""))
+ tFileTableInMemory = String2.parseBoolean(content);
+ else if (localTags.equals("")) {
+ } else if (localTags.equals(""))
+ tOnChange.add(content);
+ else if (localTags.equals("")) {
+ } else if (localTags.equals(""))
+ tFgdcFile = content;
+ else if (localTags.equals("")) {
+ } else if (localTags.equals(""))
+ tIso19115File = content;
+ else if (localTags.equals("")) {
+ } else if (localTags.equals(""))
+ tSosOfferingPrefix = content;
+ else if (localTags.equals("")) {
+ } else if (localTags.equals(""))
+ tDefaultDataQuery = content;
+ else if (localTags.equals("")) {
+ } else if (localTags.equals(""))
+ tDefaultGraphQuery = content;
+ else if (localTags.equals("")) {
+ } else if (localTags.equals(""))
+ tAddVariablesWhere = content;
+ else if (localTags.equals("")) {
+ } else if (localTags.equals(""))
+ tIsLocal = String2.parseBoolean(content);
+ else if (localTags.equals("")) {
+ } else if (localTags.equals(""))
+ tRemoveMVRows = String2.parseBoolean(content);
+ else if (localTags.equals("")) {
+ } else if (localTags.equals(""))
+ tStandardizeWhat = String2.parseInt(content);
+ else if (localTags.equals("")) {
+ } else if (localTags.equals(""))
+ tNThreads = String2.parseInt(content);
+ else if (localTags.equals("")) {
+ } else if (localTags.equals(""))
+ tCacheFromUrl = content;
+ else if (localTags.equals("")) {
+ } else if (localTags.equals(""))
+ tCacheSizeGB = String2.parseInt(content);
+ else if (localTags.equals("")) {
+ } else if (localTags.equals(""))
+ tCachePartialPathRegex = content;
+
+ else
+ xmlReader.unexpectedTagException();
}
int ndv = tDataVariables.size();
Object ttDataVariables[][] = new Object[ndv][];
for (int i = 0; i < tDataVariables.size(); i++)
- ttDataVariables[i] = (Object[])tDataVariables.get(i);
+ ttDataVariables[i] = (Object[]) tDataVariables.get(i);
if (tType == null)
tType = "";
if (tType.equals("EDDTableFromAsciiFiles")) {
- return new EDDTableFromAsciiFiles(tDatasetID,
- tAccessibleTo, tGraphsAccessibleTo,
- tOnChange, tFgdcFile, tIso19115File, tSosOfferingPrefix,
- tDefaultDataQuery, tDefaultGraphQuery,
- tGlobalAttributes,
- ttDataVariables,
- tReloadEveryNMinutes, tUpdateEveryNMillis,
- tFileDir, tFileNameRegex, tRecursive, tPathRegex, tMetadataFrom,
- tCharset, tSkipHeaderToRegex, tSkipLinesRegex,
- tColumnNamesRow, tFirstDataRow, tColumnSeparator,
- tPreExtractRegex, tPostExtractRegex, tExtractRegex, tColumnNameForExtract,
- tSortedColumnSourceName, tSortFilesBySourceNames,
- tSourceNeedsExpandedFP_EQ, tFileTableInMemory,
- tAccessibleViaFiles, tRemoveMVRows, tStandardizeWhat,
- tNThreads, tCacheFromUrl, tCacheSizeGB, tCachePartialPathRegex,
- tAddVariablesWhere);
-
- } else if (tType.equals("EDDTableFromAudioFiles")) {
- return new EDDTableFromAudioFiles(tDatasetID,
- tAccessibleTo, tGraphsAccessibleTo,
- tOnChange, tFgdcFile, tIso19115File, tSosOfferingPrefix,
- tDefaultDataQuery, tDefaultGraphQuery,
- tGlobalAttributes,
- ttDataVariables,
- tReloadEveryNMinutes, tUpdateEveryNMillis,
- tFileDir, tFileNameRegex, tRecursive, tPathRegex, tMetadataFrom,
- tCharset, tSkipHeaderToRegex, tSkipLinesRegex,
- tColumnNamesRow, tFirstDataRow, tColumnSeparator,
- tPreExtractRegex, tPostExtractRegex, tExtractRegex, tColumnNameForExtract,
- tSortedColumnSourceName, tSortFilesBySourceNames,
- tSourceNeedsExpandedFP_EQ, tFileTableInMemory,
- tAccessibleViaFiles, tRemoveMVRows, tStandardizeWhat,
- tNThreads, tCacheFromUrl, tCacheSizeGB, tCachePartialPathRegex,
- tAddVariablesWhere);
+ return new EDDTableFromAsciiFiles(tDatasetID,
+ tAccessibleTo, tGraphsAccessibleTo,
+ tOnChange, tFgdcFile, tIso19115File, tSosOfferingPrefix,
+ tDefaultDataQuery, tDefaultGraphQuery,
+ tGlobalAttributes,
+ ttDataVariables,
+ tReloadEveryNMinutes, tUpdateEveryNMillis,
+ tFileDir, tFileNameRegex, tRecursive, tPathRegex, tMetadataFrom,
+ tCharset, tSkipHeaderToRegex, tSkipLinesRegex,
+ tColumnNamesRow, tFirstDataRow, tColumnSeparator,
+ tPreExtractRegex, tPostExtractRegex, tExtractRegex, tColumnNameForExtract,
+ tSortedColumnSourceName, tSortFilesBySourceNames,
+ tSourceNeedsExpandedFP_EQ, tFileTableInMemory,
+ tAccessibleViaFiles, tRemoveMVRows, tStandardizeWhat,
+ tNThreads, tCacheFromUrl, tCacheSizeGB, tCachePartialPathRegex,
+ tAddVariablesWhere);
+
+ } else if (tType.equals("EDDTableFromAudioFiles")) {
+ return new EDDTableFromAudioFiles(tDatasetID,
+ tAccessibleTo, tGraphsAccessibleTo,
+ tOnChange, tFgdcFile, tIso19115File, tSosOfferingPrefix,
+ tDefaultDataQuery, tDefaultGraphQuery,
+ tGlobalAttributes,
+ ttDataVariables,
+ tReloadEveryNMinutes, tUpdateEveryNMillis,
+ tFileDir, tFileNameRegex, tRecursive, tPathRegex, tMetadataFrom,
+ tCharset, tSkipHeaderToRegex, tSkipLinesRegex,
+ tColumnNamesRow, tFirstDataRow, tColumnSeparator,
+ tPreExtractRegex, tPostExtractRegex, tExtractRegex, tColumnNameForExtract,
+ tSortedColumnSourceName, tSortFilesBySourceNames,
+ tSourceNeedsExpandedFP_EQ, tFileTableInMemory,
+ tAccessibleViaFiles, tRemoveMVRows, tStandardizeWhat,
+ tNThreads, tCacheFromUrl, tCacheSizeGB, tCachePartialPathRegex,
+ tAddVariablesWhere);
} else if (tType.equals("EDDTableFromAwsXmlFiles")) {
- return new EDDTableFromAwsXmlFiles(tDatasetID,
- tAccessibleTo, tGraphsAccessibleTo,
- tOnChange, tFgdcFile, tIso19115File, tSosOfferingPrefix,
- tDefaultDataQuery, tDefaultGraphQuery,
- tGlobalAttributes,
- ttDataVariables,
- tReloadEveryNMinutes, tUpdateEveryNMillis,
- tFileDir, tFileNameRegex, tRecursive, tPathRegex, tMetadataFrom,
- tCharset, tSkipHeaderToRegex, tSkipLinesRegex,
- tColumnNamesRow, tFirstDataRow, tColumnSeparator,
- tPreExtractRegex, tPostExtractRegex, tExtractRegex, tColumnNameForExtract,
- tSortedColumnSourceName, tSortFilesBySourceNames,
- tSourceNeedsExpandedFP_EQ, tFileTableInMemory,
- tAccessibleViaFiles, tRemoveMVRows, tStandardizeWhat,
- tNThreads, tCacheFromUrl, tCacheSizeGB, tCachePartialPathRegex,
- tAddVariablesWhere);
+ return new EDDTableFromAwsXmlFiles(tDatasetID,
+ tAccessibleTo, tGraphsAccessibleTo,
+ tOnChange, tFgdcFile, tIso19115File, tSosOfferingPrefix,
+ tDefaultDataQuery, tDefaultGraphQuery,
+ tGlobalAttributes,
+ ttDataVariables,
+ tReloadEveryNMinutes, tUpdateEveryNMillis,
+ tFileDir, tFileNameRegex, tRecursive, tPathRegex, tMetadataFrom,
+ tCharset, tSkipHeaderToRegex, tSkipLinesRegex,
+ tColumnNamesRow, tFirstDataRow, tColumnSeparator,
+ tPreExtractRegex, tPostExtractRegex, tExtractRegex, tColumnNameForExtract,
+ tSortedColumnSourceName, tSortFilesBySourceNames,
+ tSourceNeedsExpandedFP_EQ, tFileTableInMemory,
+ tAccessibleViaFiles, tRemoveMVRows, tStandardizeWhat,
+ tNThreads, tCacheFromUrl, tCacheSizeGB, tCachePartialPathRegex,
+ tAddVariablesWhere);
} else if (tType.equals("EDDTableFromColumnarAsciiFiles")) {
- return new EDDTableFromColumnarAsciiFiles(tDatasetID,
- tAccessibleTo, tGraphsAccessibleTo,
- tOnChange, tFgdcFile, tIso19115File, tSosOfferingPrefix,
- tDefaultDataQuery, tDefaultGraphQuery,
- tGlobalAttributes,
- ttDataVariables,
- tReloadEveryNMinutes, tUpdateEveryNMillis,
- tFileDir, tFileNameRegex, tRecursive, tPathRegex, tMetadataFrom,
- tCharset, tSkipHeaderToRegex, tSkipLinesRegex,
- tColumnNamesRow, tFirstDataRow, tColumnSeparator,
- tPreExtractRegex, tPostExtractRegex, tExtractRegex, tColumnNameForExtract,
- tSortedColumnSourceName, tSortFilesBySourceNames,
- tSourceNeedsExpandedFP_EQ, tFileTableInMemory,
- tAccessibleViaFiles, tRemoveMVRows, tStandardizeWhat,
- tNThreads, tCacheFromUrl, tCacheSizeGB, tCachePartialPathRegex,
- tAddVariablesWhere);
-
- } else if (tType.equals("EDDTableFromHttpGet")) {
- return new EDDTableFromHttpGet(tDatasetID,
- tAccessibleTo, tGraphsAccessibleTo,
- tOnChange, tFgdcFile, tIso19115File, tSosOfferingPrefix,
- tDefaultDataQuery, tDefaultGraphQuery,
- tGlobalAttributes,
- ttDataVariables,
- tReloadEveryNMinutes, tUpdateEveryNMillis,
- tFileDir, tFileNameRegex, tRecursive, tPathRegex, tMetadataFrom,
- tCharset, tSkipHeaderToRegex, tSkipLinesRegex,
- tColumnNamesRow, tFirstDataRow, tColumnSeparator,
- tPreExtractRegex, tPostExtractRegex, tExtractRegex, tColumnNameForExtract,
- tSortedColumnSourceName, tSortFilesBySourceNames,
- tSourceNeedsExpandedFP_EQ, tFileTableInMemory,
- tAccessibleViaFiles, tRemoveMVRows, tStandardizeWhat,
- tNThreads, tCacheFromUrl, tCacheSizeGB, tCachePartialPathRegex,
- tAddVariablesWhere);
-
+ return new EDDTableFromColumnarAsciiFiles(tDatasetID,
+ tAccessibleTo, tGraphsAccessibleTo,
+ tOnChange, tFgdcFile, tIso19115File, tSosOfferingPrefix,
+ tDefaultDataQuery, tDefaultGraphQuery,
+ tGlobalAttributes,
+ ttDataVariables,
+ tReloadEveryNMinutes, tUpdateEveryNMillis,
+ tFileDir, tFileNameRegex, tRecursive, tPathRegex, tMetadataFrom,
+ tCharset, tSkipHeaderToRegex, tSkipLinesRegex,
+ tColumnNamesRow, tFirstDataRow, tColumnSeparator,
+ tPreExtractRegex, tPostExtractRegex, tExtractRegex, tColumnNameForExtract,
+ tSortedColumnSourceName, tSortFilesBySourceNames,
+ tSourceNeedsExpandedFP_EQ, tFileTableInMemory,
+ tAccessibleViaFiles, tRemoveMVRows, tStandardizeWhat,
+ tNThreads, tCacheFromUrl, tCacheSizeGB, tCachePartialPathRegex,
+ tAddVariablesWhere);
+
+ } else if (tType.equals("EDDTableFromHttpGet")) {
+ return new EDDTableFromHttpGet(tDatasetID,
+ tAccessibleTo, tGraphsAccessibleTo,
+ tOnChange, tFgdcFile, tIso19115File, tSosOfferingPrefix,
+ tDefaultDataQuery, tDefaultGraphQuery,
+ tGlobalAttributes,
+ ttDataVariables,
+ tReloadEveryNMinutes, tUpdateEveryNMillis,
+ tFileDir, tFileNameRegex, tRecursive, tPathRegex, tMetadataFrom,
+ tCharset, tSkipHeaderToRegex, tSkipLinesRegex,
+ tColumnNamesRow, tFirstDataRow, tColumnSeparator,
+ tPreExtractRegex, tPostExtractRegex, tExtractRegex, tColumnNameForExtract,
+ tSortedColumnSourceName, tSortFilesBySourceNames,
+ tSourceNeedsExpandedFP_EQ, tFileTableInMemory,
+ tAccessibleViaFiles, tRemoveMVRows, tStandardizeWhat,
+ tNThreads, tCacheFromUrl, tCacheSizeGB, tCachePartialPathRegex,
+ tAddVariablesWhere);
+
} else if (tType.equals("EDDTableFromInvalidCRAFiles")) {
- return new EDDTableFromInvalidCRAFiles(tDatasetID,
- tAccessibleTo, tGraphsAccessibleTo,
- tOnChange, tFgdcFile, tIso19115File, tSosOfferingPrefix,
- tDefaultDataQuery, tDefaultGraphQuery,
- tGlobalAttributes,
- ttDataVariables,
- tReloadEveryNMinutes, tUpdateEveryNMillis,
- tFileDir, tFileNameRegex, tRecursive, tPathRegex, tMetadataFrom,
- tCharset, tSkipHeaderToRegex, tSkipLinesRegex,
- tColumnNamesRow, tFirstDataRow, tColumnSeparator,
- tPreExtractRegex, tPostExtractRegex, tExtractRegex, tColumnNameForExtract,
- tSortedColumnSourceName, tSortFilesBySourceNames,
- tSourceNeedsExpandedFP_EQ, tFileTableInMemory,
- tAccessibleViaFiles, tRemoveMVRows, tStandardizeWhat,
- tNThreads, tCacheFromUrl, tCacheSizeGB, tCachePartialPathRegex,
- tAddVariablesWhere);
-
- } else if (tType.equals("EDDTableFromJsonlCSVFiles")) {
- return new EDDTableFromJsonlCSVFiles(tDatasetID,
- tAccessibleTo, tGraphsAccessibleTo,
- tOnChange, tFgdcFile, tIso19115File, tSosOfferingPrefix,
- tDefaultDataQuery, tDefaultGraphQuery,
- tGlobalAttributes,
- ttDataVariables,
- tReloadEveryNMinutes, tUpdateEveryNMillis,
- tFileDir, tFileNameRegex, tRecursive, tPathRegex, tMetadataFrom,
- tCharset, tSkipHeaderToRegex, tSkipLinesRegex,
- tColumnNamesRow, tFirstDataRow, tColumnSeparator,
- tPreExtractRegex, tPostExtractRegex, tExtractRegex, tColumnNameForExtract,
- tSortedColumnSourceName, tSortFilesBySourceNames,
- tSourceNeedsExpandedFP_EQ, tFileTableInMemory,
- tAccessibleViaFiles, tRemoveMVRows, tStandardizeWhat,
- tNThreads, tCacheFromUrl, tCacheSizeGB, tCachePartialPathRegex,
- tAddVariablesWhere);
-
- } else if (tType.equals("EDDTableFromMultidimNcFiles")) {
- return new EDDTableFromMultidimNcFiles(tDatasetID,
- tAccessibleTo, tGraphsAccessibleTo,
- tOnChange, tFgdcFile, tIso19115File, tSosOfferingPrefix,
- tDefaultDataQuery, tDefaultGraphQuery,
- tGlobalAttributes,
- ttDataVariables,
- tReloadEveryNMinutes, tUpdateEveryNMillis,
- tFileDir, tFileNameRegex, tRecursive, tPathRegex, tMetadataFrom,
- tCharset, tSkipHeaderToRegex, tSkipLinesRegex,
- tColumnNamesRow, tFirstDataRow, tColumnSeparator,
- tPreExtractRegex, tPostExtractRegex, tExtractRegex, tColumnNameForExtract,
- tSortedColumnSourceName, tSortFilesBySourceNames,
- tSourceNeedsExpandedFP_EQ, tFileTableInMemory,
- tAccessibleViaFiles, tRemoveMVRows, tStandardizeWhat,
- tNThreads, tCacheFromUrl, tCacheSizeGB, tCachePartialPathRegex,
- tAddVariablesWhere);
-
- } else if (tType.equals("EDDTableFromNcFiles")) {
- return new EDDTableFromNcFiles(tDatasetID,
- tAccessibleTo, tGraphsAccessibleTo,
- tOnChange, tFgdcFile, tIso19115File, tSosOfferingPrefix,
- tDefaultDataQuery, tDefaultGraphQuery,
- tGlobalAttributes,
- ttDataVariables,
- tReloadEveryNMinutes, tUpdateEveryNMillis,
- tFileDir, tFileNameRegex, tRecursive, tPathRegex, tMetadataFrom,
- tCharset, tSkipHeaderToRegex, tSkipLinesRegex,
- tColumnNamesRow, tFirstDataRow, tColumnSeparator,
- tPreExtractRegex, tPostExtractRegex, tExtractRegex, tColumnNameForExtract,
- tSortedColumnSourceName, tSortFilesBySourceNames,
- tSourceNeedsExpandedFP_EQ, tFileTableInMemory,
- tAccessibleViaFiles, tRemoveMVRows, tStandardizeWhat,
- tNThreads, tCacheFromUrl, tCacheSizeGB, tCachePartialPathRegex,
- tAddVariablesWhere);
+ return new EDDTableFromInvalidCRAFiles(tDatasetID,
+ tAccessibleTo, tGraphsAccessibleTo,
+ tOnChange, tFgdcFile, tIso19115File, tSosOfferingPrefix,
+ tDefaultDataQuery, tDefaultGraphQuery,
+ tGlobalAttributes,
+ ttDataVariables,
+ tReloadEveryNMinutes, tUpdateEveryNMillis,
+ tFileDir, tFileNameRegex, tRecursive, tPathRegex, tMetadataFrom,
+ tCharset, tSkipHeaderToRegex, tSkipLinesRegex,
+ tColumnNamesRow, tFirstDataRow, tColumnSeparator,
+ tPreExtractRegex, tPostExtractRegex, tExtractRegex, tColumnNameForExtract,
+ tSortedColumnSourceName, tSortFilesBySourceNames,
+ tSourceNeedsExpandedFP_EQ, tFileTableInMemory,
+ tAccessibleViaFiles, tRemoveMVRows, tStandardizeWhat,
+ tNThreads, tCacheFromUrl, tCacheSizeGB, tCachePartialPathRegex,
+ tAddVariablesWhere);
+
+ } else if (tType.equals("EDDTableFromJsonlCSVFiles")) {
+ return new EDDTableFromJsonlCSVFiles(tDatasetID,
+ tAccessibleTo, tGraphsAccessibleTo,
+ tOnChange, tFgdcFile, tIso19115File, tSosOfferingPrefix,
+ tDefaultDataQuery, tDefaultGraphQuery,
+ tGlobalAttributes,
+ ttDataVariables,
+ tReloadEveryNMinutes, tUpdateEveryNMillis,
+ tFileDir, tFileNameRegex, tRecursive, tPathRegex, tMetadataFrom,
+ tCharset, tSkipHeaderToRegex, tSkipLinesRegex,
+ tColumnNamesRow, tFirstDataRow, tColumnSeparator,
+ tPreExtractRegex, tPostExtractRegex, tExtractRegex, tColumnNameForExtract,
+ tSortedColumnSourceName, tSortFilesBySourceNames,
+ tSourceNeedsExpandedFP_EQ, tFileTableInMemory,
+ tAccessibleViaFiles, tRemoveMVRows, tStandardizeWhat,
+ tNThreads, tCacheFromUrl, tCacheSizeGB, tCachePartialPathRegex,
+ tAddVariablesWhere);
+
+ } else if (tType.equals("EDDTableFromMultidimNcFiles")) {
+ return new EDDTableFromMultidimNcFiles(tDatasetID,
+ tAccessibleTo, tGraphsAccessibleTo,
+ tOnChange, tFgdcFile, tIso19115File, tSosOfferingPrefix,
+ tDefaultDataQuery, tDefaultGraphQuery,
+ tGlobalAttributes,
+ ttDataVariables,
+ tReloadEveryNMinutes, tUpdateEveryNMillis,
+ tFileDir, tFileNameRegex, tRecursive, tPathRegex, tMetadataFrom,
+ tCharset, tSkipHeaderToRegex, tSkipLinesRegex,
+ tColumnNamesRow, tFirstDataRow, tColumnSeparator,
+ tPreExtractRegex, tPostExtractRegex, tExtractRegex, tColumnNameForExtract,
+ tSortedColumnSourceName, tSortFilesBySourceNames,
+ tSourceNeedsExpandedFP_EQ, tFileTableInMemory,
+ tAccessibleViaFiles, tRemoveMVRows, tStandardizeWhat,
+ tNThreads, tCacheFromUrl, tCacheSizeGB, tCachePartialPathRegex,
+ tAddVariablesWhere);
+
+ } else if (tType.equals("EDDTableFromNcFiles")) {
+ return new EDDTableFromNcFiles(tDatasetID,
+ tAccessibleTo, tGraphsAccessibleTo,
+ tOnChange, tFgdcFile, tIso19115File, tSosOfferingPrefix,
+ tDefaultDataQuery, tDefaultGraphQuery,
+ tGlobalAttributes,
+ ttDataVariables,
+ tReloadEveryNMinutes, tUpdateEveryNMillis,
+ tFileDir, tFileNameRegex, tRecursive, tPathRegex, tMetadataFrom,
+ tCharset, tSkipHeaderToRegex, tSkipLinesRegex,
+ tColumnNamesRow, tFirstDataRow, tColumnSeparator,
+ tPreExtractRegex, tPostExtractRegex, tExtractRegex, tColumnNameForExtract,
+ tSortedColumnSourceName, tSortFilesBySourceNames,
+ tSourceNeedsExpandedFP_EQ, tFileTableInMemory,
+ tAccessibleViaFiles, tRemoveMVRows, tStandardizeWhat,
+ tNThreads, tCacheFromUrl, tCacheSizeGB, tCachePartialPathRegex,
+ tAddVariablesWhere);
} else if (tType.equals("EDDTableFromNcCFFiles")) {
- return new EDDTableFromNcCFFiles(tDatasetID,
- tAccessibleTo, tGraphsAccessibleTo,
- tOnChange, tFgdcFile, tIso19115File, tSosOfferingPrefix,
- tDefaultDataQuery, tDefaultGraphQuery,
- tGlobalAttributes,
- ttDataVariables,
- tReloadEveryNMinutes, tUpdateEveryNMillis,
- tFileDir, tFileNameRegex, tRecursive, tPathRegex, tMetadataFrom,
- tCharset, tSkipHeaderToRegex, tSkipLinesRegex,
- tColumnNamesRow, tFirstDataRow, tColumnSeparator,
- tPreExtractRegex, tPostExtractRegex, tExtractRegex, tColumnNameForExtract,
- tSortedColumnSourceName, tSortFilesBySourceNames,
- tSourceNeedsExpandedFP_EQ, tFileTableInMemory,
- tAccessibleViaFiles, tRemoveMVRows, tStandardizeWhat,
- tNThreads, tCacheFromUrl, tCacheSizeGB, tCachePartialPathRegex,
- tAddVariablesWhere);
-
- /*} else if (tType.equals("EDDTableFromPostNcFiles")) {
- return new EDDTableFromNcFiles(tDatasetID,
- tAccessibleTo, tGraphsAccessibleTo,
- tOnChange, tFgdcFile, tIso19115File, tSosOfferingPrefix,
- tDefaultDataQuery, tDefaultGraphQuery,
- tGlobalAttributes,
- ttDataVariables,
- tReloadEveryNMinutes, tUpdateEveryNMillis,
- tFileDir, tFileNameRegex, tRecursive, tPathRegex, tMetadataFrom,
- tCharset, tSkipHeaderToRegex, tSkipLinesRegex,
- tColumnNamesRow, tFirstDataRow, tColumnSeparator,
- tPreExtractRegex, tPostExtractRegex, tExtractRegex, tColumnNameForExtract,
- tSortedColumnSourceName, tSortFilesBySourceNames,
- tSourceNeedsExpandedFP_EQ, tFileTableInMemory,
- tAccessibleViaFiles, tRemoveMVRows, tStandardizeWhat,
- tNThreads, tCacheFromUrl, tCacheSizeGB, tCachePartialPathRegex,
- tAddVariablesWhere);
- */
-
- } else if (tType.equals("EDDTableFromNccsvFiles")) {
- return new EDDTableFromNccsvFiles(tDatasetID,
- tAccessibleTo, tGraphsAccessibleTo,
- tOnChange, tFgdcFile, tIso19115File, tSosOfferingPrefix,
- tDefaultDataQuery, tDefaultGraphQuery,
- tGlobalAttributes,
- ttDataVariables,
- tReloadEveryNMinutes, tUpdateEveryNMillis,
- tFileDir, tFileNameRegex, tRecursive, tPathRegex, tMetadataFrom,
- tCharset, tSkipHeaderToRegex, tSkipLinesRegex,
- tColumnNamesRow, tFirstDataRow, tColumnSeparator,
- tPreExtractRegex, tPostExtractRegex, tExtractRegex, tColumnNameForExtract,
- tSortedColumnSourceName, tSortFilesBySourceNames,
- tSourceNeedsExpandedFP_EQ, tFileTableInMemory,
- tAccessibleViaFiles, tRemoveMVRows, tStandardizeWhat,
- tNThreads, tCacheFromUrl, tCacheSizeGB, tCachePartialPathRegex,
- tAddVariablesWhere);
+ return new EDDTableFromNcCFFiles(tDatasetID,
+ tAccessibleTo, tGraphsAccessibleTo,
+ tOnChange, tFgdcFile, tIso19115File, tSosOfferingPrefix,
+ tDefaultDataQuery, tDefaultGraphQuery,
+ tGlobalAttributes,
+ ttDataVariables,
+ tReloadEveryNMinutes, tUpdateEveryNMillis,
+ tFileDir, tFileNameRegex, tRecursive, tPathRegex, tMetadataFrom,
+ tCharset, tSkipHeaderToRegex, tSkipLinesRegex,
+ tColumnNamesRow, tFirstDataRow, tColumnSeparator,
+ tPreExtractRegex, tPostExtractRegex, tExtractRegex, tColumnNameForExtract,
+ tSortedColumnSourceName, tSortFilesBySourceNames,
+ tSourceNeedsExpandedFP_EQ, tFileTableInMemory,
+ tAccessibleViaFiles, tRemoveMVRows, tStandardizeWhat,
+ tNThreads, tCacheFromUrl, tCacheSizeGB, tCachePartialPathRegex,
+ tAddVariablesWhere);
+
+ /*
+ * } else if (tType.equals("EDDTableFromPostNcFiles")) {
+ * return new EDDTableFromNcFiles(tDatasetID,
+ * tAccessibleTo, tGraphsAccessibleTo,
+ * tOnChange, tFgdcFile, tIso19115File, tSosOfferingPrefix,
+ * tDefaultDataQuery, tDefaultGraphQuery,
+ * tGlobalAttributes,
+ * ttDataVariables,
+ * tReloadEveryNMinutes, tUpdateEveryNMillis,
+ * tFileDir, tFileNameRegex, tRecursive, tPathRegex, tMetadataFrom,
+ * tCharset, tSkipHeaderToRegex, tSkipLinesRegex,
+ * tColumnNamesRow, tFirstDataRow, tColumnSeparator,
+ * tPreExtractRegex, tPostExtractRegex, tExtractRegex, tColumnNameForExtract,
+ * tSortedColumnSourceName, tSortFilesBySourceNames,
+ * tSourceNeedsExpandedFP_EQ, tFileTableInMemory,
+ * tAccessibleViaFiles, tRemoveMVRows, tStandardizeWhat,
+ * tNThreads, tCacheFromUrl, tCacheSizeGB, tCachePartialPathRegex,
+ * tAddVariablesWhere);
+ */
+
+ } else if (tType.equals("EDDTableFromNccsvFiles")) {
+ return new EDDTableFromNccsvFiles(tDatasetID,
+ tAccessibleTo, tGraphsAccessibleTo,
+ tOnChange, tFgdcFile, tIso19115File, tSosOfferingPrefix,
+ tDefaultDataQuery, tDefaultGraphQuery,
+ tGlobalAttributes,
+ ttDataVariables,
+ tReloadEveryNMinutes, tUpdateEveryNMillis,
+ tFileDir, tFileNameRegex, tRecursive, tPathRegex, tMetadataFrom,
+ tCharset, tSkipHeaderToRegex, tSkipLinesRegex,
+ tColumnNamesRow, tFirstDataRow, tColumnSeparator,
+ tPreExtractRegex, tPostExtractRegex, tExtractRegex, tColumnNameForExtract,
+ tSortedColumnSourceName, tSortFilesBySourceNames,
+ tSourceNeedsExpandedFP_EQ, tFileTableInMemory,
+ tAccessibleViaFiles, tRemoveMVRows, tStandardizeWhat,
+ tNThreads, tCacheFromUrl, tCacheSizeGB, tCachePartialPathRegex,
+ tAddVariablesWhere);
} else if (tType.equals("EDDTableFromHyraxFiles")) {
String qrName = quickRestartFullFileName(tDatasetID);
- long tCreationTime = System.currentTimeMillis(); //used below
- if (EDStatic.quickRestart &&
- EDStatic.initialLoadDatasets() &&
- File2.isFile(qrName)) {
-
- //quickRestart
- //set creationTimeMillis to time of previous creation, so next time
- //to be reloaded will be same as if ERDDAP hadn't been restarted.
- tCreationTime = File2.getLastModified(qrName); //0 if trouble
+ long tCreationTime = System.currentTimeMillis(); // used below
+ if (EDStatic.quickRestart &&
+ EDStatic.initialLoadDatasets() &&
+ File2.isFile(qrName)) {
+
+ // quickRestart
+ // set creationTimeMillis to time of previous creation, so next time
+ // to be reloaded will be same as if ERDDAP hadn't been restarted.
+ tCreationTime = File2.getLastModified(qrName); // 0 if trouble
if (verbose)
- String2.log(" quickRestart " + tDatasetID + " previous=" +
- Calendar2.millisToIsoStringTZ(tCreationTime));
+ String2.log(" quickRestart " + tDatasetID + " previous=" +
+ Calendar2.millisToIsoStringTZ(tCreationTime));
} else {
- //make downloadFileTasks
+ // make downloadFileTasks
EDDTableFromHyraxFiles.makeDownloadFileTasks(tDatasetID,
- tGlobalAttributes.getString("sourceUrl"),
- tFileNameRegex, tRecursive, tPathRegex);
+ tGlobalAttributes.getString("sourceUrl"),
+ tFileNameRegex, tRecursive, tPathRegex);
- //save quickRestartFile (file's timestamp is all that matters)
+ // save quickRestartFile (file's timestamp is all that matters)
Attributes qrAtts = new Attributes();
qrAtts.add("datasetID", tDatasetID);
File2.makeDirectory(File2.getDirectory(qrName));
@@ -590,21 +647,21 @@ else if (localTags.equals( "")) {}
}
EDDTableFromFiles tEDDTable = new EDDTableFromHyraxFiles(tDatasetID,
- tAccessibleTo, tGraphsAccessibleTo,
- tOnChange, tFgdcFile, tIso19115File, tSosOfferingPrefix,
- tDefaultDataQuery, tDefaultGraphQuery,
- tGlobalAttributes,
- ttDataVariables,
- tReloadEveryNMinutes, tUpdateEveryNMillis,
- tFileDir, tFileNameRegex, tRecursive, tPathRegex, tMetadataFrom,
- tCharset, tSkipHeaderToRegex, tSkipLinesRegex,
- tColumnNamesRow, tFirstDataRow, tColumnSeparator,
- tPreExtractRegex, tPostExtractRegex, tExtractRegex, tColumnNameForExtract,
- tSortedColumnSourceName, tSortFilesBySourceNames,
- tSourceNeedsExpandedFP_EQ, tFileTableInMemory,
- tAccessibleViaFiles, tRemoveMVRows, tStandardizeWhat,
- tNThreads, tCacheFromUrl, tCacheSizeGB, tCachePartialPathRegex,
- tAddVariablesWhere);
+ tAccessibleTo, tGraphsAccessibleTo,
+ tOnChange, tFgdcFile, tIso19115File, tSosOfferingPrefix,
+ tDefaultDataQuery, tDefaultGraphQuery,
+ tGlobalAttributes,
+ ttDataVariables,
+ tReloadEveryNMinutes, tUpdateEveryNMillis,
+ tFileDir, tFileNameRegex, tRecursive, tPathRegex, tMetadataFrom,
+ tCharset, tSkipHeaderToRegex, tSkipLinesRegex,
+ tColumnNamesRow, tFirstDataRow, tColumnSeparator,
+ tPreExtractRegex, tPostExtractRegex, tExtractRegex, tColumnNameForExtract,
+ tSortedColumnSourceName, tSortFilesBySourceNames,
+ tSourceNeedsExpandedFP_EQ, tFileTableInMemory,
+ tAccessibleViaFiles, tRemoveMVRows, tStandardizeWhat,
+ tNThreads, tCacheFromUrl, tCacheSizeGB, tCachePartialPathRegex,
+ tAddVariablesWhere);
tEDDTable.creationTimeMillis = tCreationTime;
return tEDDTable;
@@ -612,283 +669,380 @@ else if (localTags.equals( "")) {}
} else if (tType.equals("EDDTableFromThreddsFiles")) {
String qrName = quickRestartFullFileName(tDatasetID);
- long tCreationTime = System.currentTimeMillis(); //used below
- if (EDStatic.quickRestart &&
- EDStatic.initialLoadDatasets() &&
- File2.isFile(qrName)) {
-
- //quickRestart
- //set creationTimeMillis to time of previous creation, so next time
- //to be reloaded will be same as if ERDDAP hadn't been restarted.
- tCreationTime = File2.getLastModified(qrName); //0 if trouble
+ long tCreationTime = System.currentTimeMillis(); // used below
+ if (EDStatic.quickRestart &&
+ EDStatic.initialLoadDatasets() &&
+ File2.isFile(qrName)) {
+
+ // quickRestart
+ // set creationTimeMillis to time of previous creation, so next time
+ // to be reloaded will be same as if ERDDAP hadn't been restarted.
+ tCreationTime = File2.getLastModified(qrName); // 0 if trouble
if (verbose)
- String2.log(" quickRestart " + tDatasetID + " previous=" +
- Calendar2.millisToIsoStringTZ(tCreationTime));
+ String2.log(" quickRestart " + tDatasetID + " previous=" +
+ Calendar2.millisToIsoStringTZ(tCreationTime));
} else {
- //make downloadFileTasks
+ // make downloadFileTasks
EDDTableFromThreddsFiles.makeDownloadFileTasks(tDatasetID,
- tGlobalAttributes.getString("sourceUrl"),
- tFileNameRegex, tRecursive, tPathRegex, tSpecialMode);
+ tGlobalAttributes.getString("sourceUrl"),
+ tFileNameRegex, tRecursive, tPathRegex, tSpecialMode);
- //save quickRestartFile (file's timestamp is all that matters)
+ // save quickRestartFile (file's timestamp is all that matters)
Attributes qrAtts = new Attributes();
qrAtts.add("datasetID", tDatasetID);
File2.makeDirectory(File2.getDirectory(qrName));
NcHelper.writeAttributesToNc3(qrName, qrAtts);
}
- EDDTableFromFiles tEDDTable = new EDDTableFromThreddsFiles(tDatasetID,
- tAccessibleTo, tGraphsAccessibleTo,
- tOnChange, tFgdcFile, tIso19115File, tSosOfferingPrefix,
- tDefaultDataQuery, tDefaultGraphQuery,
- tGlobalAttributes,
- ttDataVariables,
- tReloadEveryNMinutes, tUpdateEveryNMillis,
- tFileDir, tFileNameRegex, tRecursive, tPathRegex, tMetadataFrom,
- tCharset, tSkipHeaderToRegex, tSkipLinesRegex,
- tColumnNamesRow, tFirstDataRow, tColumnSeparator,
- tPreExtractRegex, tPostExtractRegex, tExtractRegex, tColumnNameForExtract,
- tSortedColumnSourceName, tSortFilesBySourceNames,
- tSourceNeedsExpandedFP_EQ, tFileTableInMemory,
- tAccessibleViaFiles, tRemoveMVRows, tStandardizeWhat,
- tNThreads, tCacheFromUrl, tCacheSizeGB, tCachePartialPathRegex,
- tAddVariablesWhere);
+ EDDTableFromFiles tEDDTable = new EDDTableFromThreddsFiles(tDatasetID,
+ tAccessibleTo, tGraphsAccessibleTo,
+ tOnChange, tFgdcFile, tIso19115File, tSosOfferingPrefix,
+ tDefaultDataQuery, tDefaultGraphQuery,
+ tGlobalAttributes,
+ ttDataVariables,
+ tReloadEveryNMinutes, tUpdateEveryNMillis,
+ tFileDir, tFileNameRegex, tRecursive, tPathRegex, tMetadataFrom,
+ tCharset, tSkipHeaderToRegex, tSkipLinesRegex,
+ tColumnNamesRow, tFirstDataRow, tColumnSeparator,
+ tPreExtractRegex, tPostExtractRegex, tExtractRegex, tColumnNameForExtract,
+ tSortedColumnSourceName, tSortFilesBySourceNames,
+ tSourceNeedsExpandedFP_EQ, tFileTableInMemory,
+ tAccessibleViaFiles, tRemoveMVRows, tStandardizeWhat,
+ tNThreads, tCacheFromUrl, tCacheSizeGB, tCachePartialPathRegex,
+ tAddVariablesWhere);
tEDDTable.creationTimeMillis = tCreationTime;
return tEDDTable;
} else if (tType.equals("EDDTableFromWFSFiles")) {
- String fileDir = EDStatic.fullCopyDirectory + tDatasetID + "/";
+ String fileDir = EDStatic.fullCopyDirectory + tDatasetID + "/";
String fileName = "data.tsv";
- long tCreationTime = System.currentTimeMillis(); //used below
- if (EDStatic.quickRestart && EDStatic.initialLoadDatasets() &&
- File2.isFile(fileDir + fileName)) {
-
- //quickRestart
- //set creationTimeMillis to time of previous creation, so next time
- //to be reloaded will be same as if ERDDAP hadn't been restarted.
- tCreationTime = File2.getLastModified(fileDir + fileName); //0 if trouble
+ long tCreationTime = System.currentTimeMillis(); // used below
+ if (EDStatic.quickRestart && EDStatic.initialLoadDatasets() &&
+ File2.isFile(fileDir + fileName)) {
+
+ // quickRestart
+ // set creationTimeMillis to time of previous creation, so next time
+ // to be reloaded will be same as if ERDDAP hadn't been restarted.
+ tCreationTime = File2.getLastModified(fileDir + fileName); // 0 if trouble
if (verbose)
- String2.log(" quickRestart " + tDatasetID + " previous=" +
- Calendar2.millisToIsoStringTZ(tCreationTime));
+ String2.log(" quickRestart " + tDatasetID + " previous=" +
+ Calendar2.millisToIsoStringTZ(tCreationTime));
} else {
- //download the file (its timestamp will be *now*)
+ // download the file (its timestamp will be *now*)
File2.makeDirectory(fileDir);
String error = EDDTableFromWFSFiles.downloadData(
- tGlobalAttributes.getString("sourceUrl"),
- tGlobalAttributes.getString("rowElementXPath"),
- fileDir + fileName);
- if (error.length() > 0)
+ tGlobalAttributes.getString("sourceUrl"),
+ tGlobalAttributes.getString("rowElementXPath"),
+ fileDir + fileName);
+ if (error.length() > 0)
String2.log(error);
}
- return new EDDTableFromWFSFiles(tDatasetID,
- tAccessibleTo, tGraphsAccessibleTo,
- tOnChange, tFgdcFile, tIso19115File, tSosOfferingPrefix,
- tDefaultDataQuery, tDefaultGraphQuery,
- tGlobalAttributes,
- ttDataVariables,
- tReloadEveryNMinutes, tUpdateEveryNMillis,
- fileDir, //force fileDir
- ".*\\.tsv", //force fileNameRegex
- false, //force !recursive,
- ".*", //irrelevant pathRegex
- tMetadataFrom,
- File2.UTF_8,//force charset
- tSkipHeaderToRegex,
- tSkipLinesRegex,
- 1, //force columnNamesRow,
- 3, //force firstDataRow,
- "", //force tColumnSeparator
- "","","","", //force tPreExtractRegex, tPostExtractRegex, tExtractRegex, tColumnNameForExtract,
- tSortedColumnSourceName, tSortFilesBySourceNames,
- tSourceNeedsExpandedFP_EQ, tFileTableInMemory,
- tAccessibleViaFiles, tRemoveMVRows, tStandardizeWhat,
- tNThreads, tCacheFromUrl, tCacheSizeGB, tCachePartialPathRegex,
- tAddVariablesWhere);
-
- //} else if (tType.equals("EDDTableFrom???Files")) {
- // return new EDDTableFromFiles(tDatasetID,
- // tAccessibleTo, tGraphsAccessibleTo,
- // tOnChange, tFgdcFile, tIso19115File, tSosOfferingPrefix,
- // tDefaultDataQuery, tDefaultGraphQuery,
- // tGlobalAttributes,
- // ttDataVariables,
- // tReloadEveryNMinutes, tUpdateEveryNMillis,
- // tFileDir, tFileNameRegex, tRecursive, tPathRegex, tMetadataFrom,
- // tCharset, tSkipHeaderToRegex, tSkipLinesRegex,
- // tColumnNamesRow, tFirstDataRow, tColumnSeparator,
- // tPreExtractRegex, tPostExtractRegex, tExtractRegex, tColumnNameForExtract,
- // tSortedColumnSourceName, tSortFilesBySourceNames,
- // tSourceNeedsExpandedFP_EQ, tFileTableInMemory,
- // tAccessibleViaFiles, tRemoveMVRows, tStandardizeWhat,
- // tNThreads, tCacheFromUrl, tCacheSizeGB, tCachePartialPathRegex,
- // tAddVariablesWhere);
+ return new EDDTableFromWFSFiles(tDatasetID,
+ tAccessibleTo, tGraphsAccessibleTo,
+ tOnChange, tFgdcFile, tIso19115File, tSosOfferingPrefix,
+ tDefaultDataQuery, tDefaultGraphQuery,
+ tGlobalAttributes,
+ ttDataVariables,
+ tReloadEveryNMinutes, tUpdateEveryNMillis,
+ fileDir, // force fileDir
+ ".*\\.tsv", // force fileNameRegex
+ false, // force !recursive,
+ ".*", // irrelevant pathRegex
+ tMetadataFrom,
+ File2.UTF_8, // force charset
+ tSkipHeaderToRegex,
+ tSkipLinesRegex,
+ 1, // force columnNamesRow,
+ 3, // force firstDataRow,
+ "", // force tColumnSeparator
+ "", "", "", "", // force tPreExtractRegex, tPostExtractRegex, tExtractRegex,
+ // tColumnNameForExtract,
+ tSortedColumnSourceName, tSortFilesBySourceNames,
+ tSourceNeedsExpandedFP_EQ, tFileTableInMemory,
+ tAccessibleViaFiles, tRemoveMVRows, tStandardizeWhat,
+ tNThreads, tCacheFromUrl, tCacheSizeGB, tCachePartialPathRegex,
+ tAddVariablesWhere);
+
+ // } else if (tType.equals("EDDTableFrom???Files")) {
+ // return new EDDTableFromFiles(tDatasetID,
+ // tAccessibleTo, tGraphsAccessibleTo,
+ // tOnChange, tFgdcFile, tIso19115File, tSosOfferingPrefix,
+ // tDefaultDataQuery, tDefaultGraphQuery,
+ // tGlobalAttributes,
+ // ttDataVariables,
+ // tReloadEveryNMinutes, tUpdateEveryNMillis,
+ // tFileDir, tFileNameRegex, tRecursive, tPathRegex, tMetadataFrom,
+ // tCharset, tSkipHeaderToRegex, tSkipLinesRegex,
+ // tColumnNamesRow, tFirstDataRow, tColumnSeparator,
+ // tPreExtractRegex, tPostExtractRegex, tExtractRegex, tColumnNameForExtract,
+ // tSortedColumnSourceName, tSortFilesBySourceNames,
+ // tSourceNeedsExpandedFP_EQ, tFileTableInMemory,
+ // tAccessibleViaFiles, tRemoveMVRows, tStandardizeWhat,
+ // tNThreads, tCacheFromUrl, tCacheSizeGB, tCachePartialPathRegex,
+ // tAddVariablesWhere);
} else {
- throw new Exception("type=\"" + tType +
- "\" needs to be added to EDDTableFromFiles.fromXml at end.");
+ throw new Exception("type=\"" + tType +
+ "\" needs to be added to EDDTableFromFiles.fromXml at end.");
}
}
/**
* The constructor.
*
- * @param tClassName e.g., EDDTableFromNcFiles
- * @param tDatasetID is a very short string identifier
- * (recommended: [A-Za-z][A-Za-z0-9_]* )
- * for this dataset. See EDD.datasetID().
- * @param tAccessibleTo is a comma separated list of 0 or more
- * roles which will have access to this dataset.
- *
If null, everyone will have access to this dataset (even if not logged in).
- *
If "", no one will have access to this dataset.
- * @param tOnChange 0 or more actions (starting with http://, https://, or mailto: )
- * to be done whenever the dataset changes significantly
- * @param tFgdcFile This should be the fullname of a file with the FGDC
- * that should be used for this dataset, or "" (to cause ERDDAP not
- * to try to generate FGDC metadata for this dataset), or null (to allow
- * ERDDAP to try to generate FGDC metadata for this dataset).
- * @param tIso19115 This is like tFgdcFile, but for the ISO 19119-2/19139 metadata.
- * @param tAddGlobalAttributes are global attributes which will
- * be added to (and take precedence over) the data source's global attributes.
- * This may be null if you have nothing to add.
- * The combined global attributes must include:
- *
- * - "title" - the short (< 80 characters) description of the dataset
- *
- "summary" - the longer description of the dataset.
- * It may have newline characters (usually at <= 72 chars per line).
- *
- "institution" - the source of the data
- * (best if < 50 characters so it fits in a graph's legend).
- *
- "infoUrl" - the url with information about this data set
- *
- "cdm_data_type" - one of the EDD.CDM_xxx options
- *
- * Special case: value="null" causes that item to be removed from combinedGlobalAttributes.
- * Special case: if combinedGlobalAttributes name="license", any instance of value="[standard]"
- * will be converted to the EDStatic.standardLicense.
- * @param tDataVariables is an Object[nDataVariables][3 or 4]:
- *
[0]=String sourceName (the name of the data variable in the dataset source,
- * without the outer or inner sequence name),
- *
[1]=String destinationName (the name to be presented to the ERDDAP user,
- * or null to use the sourceName),
- *
[2]=Attributes addAttributes (at ERD, this must have "ioos_category" -
- * a category from EDV.ioosCategories).
- * Special case: value="null" causes that item to be removed from combinedAttributes.
- *
[3]=String source dataType (e.g., "int", "float", "String").
- * Some data sources have ambiguous data types, so it needs to be specified here.
- *
The order of variables you define doesn't have to match the
- * order in the source.
- * If there is a time variable,
- * either tAddAttributes (read first) or tSourceAttributes must have "units"
- * which is either
- * - a UDUunits string (containing " since ")
- * describing how to interpret source time values
- * (which should always be numeric since they are a dimension of a grid)
- * (e.g., "seconds since 1970-01-01T00:00:00").
- *
- a java.time.format.DateTimeFormatter string
- * (which is compatible with java.text.SimpleDateFormat) describing how to interpret
- * string times (e.g., the ISO8601TZ_FORMAT "yyyy-MM-dd'T'HH:mm:ssZ", see
- * https://docs.oracle.com/en/java/javase/17/docs/api/java.base/java/time/format/DateTimeFormatter.html or
- * https://docs.oracle.com/en/java/javase/17/docs/api/java.base/java/text/SimpleDateFormat.html)).
- *
- * @param tReloadEveryNMinutes indicates how often the source should
- * be checked for new data.
- * @param tFileDir the base directory where the files are located.
- * For EDDTableFromHyraxFiles, this is the url of the main .html page,
- * e.g., http://biloxi-bay.ssc.hpc.msstate.edu/dods-bin/nph-dods/WCOS/nmsp/wcos/
- * @param tFileNameRegex the regex which determines which files in
- * the directories are to be read (use .* for all)
- *
You can use .* for all, but it is better to be more specific.
- * For example, .*\.nc will get all files with the extension .nc.
- * @param tRecursive if true, this class will look for files in the
- * fileDir and all subdirectories
- * @param tMetadataFrom this indicates the file to be used
- * to extract source metadata (first/last based on sorted file lastModifiedTime).
- * Valid values are "first", "penultimate", "last".
- * @param tCharset the charset; relevant for ASCII files only
- * @param tColumnNamesRow the number of the row with column names (1..; usually 1, may be 0 (none)); relevant for ASCII files only.
- * @param tDataRow the number of the row with column names (1..; usually 2); relevant for ASCII files only.
- * @param tColumnSeparator the character that separates the columns.
- * Use "" or null to have this method guess. Otherwise,
- * the first character of this string will be used.
- * @param tPreExtractRegex may be "" or null if not needed.
- * If present, this usually begins with "^" to match the beginning of the file name.
- * If present, this is used to remove text from the start of the file name.
- * The removal only occurs if the regex is matched.
- * @param tPostExtractRegex may be "" or null if not needed.
- * If present, this usually ends with "$" to match the beginning of the file name.
- * If present, this is used to remove text from the end of the file name.
- * The removal only occurs if the regex is matched.
- * @param tExtractRegex may be "" or null if not needed.
- * Use ".*" to match the entire file name.
- * If present, this is used after preExtractRegex and postExtractRegex
- * to extract a string from the file name (e.g., stationID).
- * If the regex isn't matched, the entire file name is used (minus preExtract and postExtract).
- * @param tColumnNameForExtract the data column name for the extracted Strings.
- * This column name must be in the tDataVariables list as a source column name
- * (with any data type).
- * @param tSortedColumnSourceName the source name of a timeStamp column or
- * the numeric column that the data files are usually already sorted by
- * within each file (use null or "" for none), e.g., "time".
- * It is ok if not all files are sorted by this column.
- * If present, this can greatly speed up some data requests.
- * @param tSortFilesBySourceNames is a comma(or space)-separated list of source variable names
- * specifying how the internal list of files should be sorted (in ascending order).
- *
It is the minimum value of the specified columns in each file that is used for sorting.
- *
When a data request is filled, data is obtained from the files in this order.
- *
Thus it largely determines the overall order of the data in the response.
- *
If you specify more than one column name,
- *
the second name is used if there is a tie for the first column;
- *
the third is used if there is a tie for the first and second columns; ...
- *
It is optional (the default is fileDir+fileName order).
+ * @param tClassName e.g., EDDTableFromNcFiles
+ * @param tDatasetID is a very short string identifier
+ * (recommended: [A-Za-z][A-Za-z0-9_]* )
+ * for this dataset. See EDD.datasetID().
+ * @param tAccessibleTo is a comma separated list of 0 or more
+ * roles which will have access to this
+ * dataset.
+ *
+ * If null, everyone will have access to this
+ * dataset (even if not logged in).
+ *
+ * If "", no one will have access to this
+ * dataset.
+ * @param tOnChange 0 or more actions (starting with http://,
+ * https://, or mailto: )
+ * to be done whenever the dataset changes
+ * significantly
+ * @param tFgdcFile This should be the fullname of a file with
+ * the FGDC
+ * that should be used for this dataset, or ""
+ * (to cause ERDDAP not
+ * to try to generate FGDC metadata for this
+ * dataset), or null (to allow
+ * ERDDAP to try to generate FGDC metadata for
+ * this dataset).
+ * @param tIso19115 This is like tFgdcFile, but for the ISO
+ * 19119-2/19139 metadata.
+ * @param tAddGlobalAttributes are global attributes which will
+ * be added to (and take precedence over) the
+ * data source's global attributes.
+ * This may be null if you have nothing to add.
+ * The combined global attributes must include:
+ *
+ * - "title" - the short (< 80 characters)
+ * description of the dataset
+ *
- "summary" - the longer description of
+ * the dataset.
+ * It may have newline characters (usually at
+ * <= 72 chars per line).
+ *
- "institution" - the source of the data
+ * (best if < 50 characters so it fits in a
+ * graph's legend).
+ *
- "infoUrl" - the url with information
+ * about this data set
+ *
- "cdm_data_type" - one of the EDD.CDM_xxx
+ * options
+ *
+ * Special case: value="null" causes that item
+ * to be removed from combinedGlobalAttributes.
+ * Special case: if combinedGlobalAttributes
+ * name="license", any instance of
+ * value="[standard]"
+ * will be converted to the
+ * EDStatic.standardLicense.
+ * @param tDataVariables is an Object[nDataVariables][3 or 4]:
+ *
+ * [0]=String sourceName (the name of the data
+ * variable in the dataset source,
+ * without the outer or inner sequence name),
+ *
+ * [1]=String destinationName (the name to be
+ * presented to the ERDDAP user,
+ * or null to use the sourceName),
+ *
+ * [2]=Attributes addAttributes (at ERD, this
+ * must have "ioos_category" -
+ * a category from EDV.ioosCategories).
+ * Special case: value="null" causes that item
+ * to be removed from combinedAttributes.
+ *
+ * [3]=String source dataType (e.g., "int",
+ * "float", "String").
+ * Some data sources have ambiguous data types,
+ * so it needs to be specified here.
+ *
+ * The order of variables you define doesn't
+ * have to match the
+ * order in the source.
+ *
+ * If there is a time variable,
+ * either tAddAttributes (read first) or
+ * tSourceAttributes must have "units"
+ * which is either
+ *
+ * - a UDUunits string (containing " since ")
+ * describing how to interpret source time
+ * values
+ * (which should always be numeric since they
+ * are a dimension of a grid)
+ * (e.g., "seconds since 1970-01-01T00:00:00").
+ *
- a java.time.format.DateTimeFormatter
+ * string
+ * (which is compatible with
+ * java.text.SimpleDateFormat) describing how
+ * to interpret
+ * string times (e.g., the ISO8601TZ_FORMAT
+ * "yyyy-MM-dd'T'HH:mm:ssZ", see
+ * https://docs.oracle.com/en/java/javase/17/docs/api/java.base/java/time/format/DateTimeFormatter.html
+ * or
+ * https://docs.oracle.com/en/java/javase/17/docs/api/java.base/java/text/SimpleDateFormat.html)).
+ *
+ * @param tReloadEveryNMinutes indicates how often the source should
+ * be checked for new data.
+ * @param tFileDir the base directory where the files are
+ * located.
+ * For EDDTableFromHyraxFiles, this is the url
+ * of the main .html page,
+ * e.g.,
+ * http://biloxi-bay.ssc.hpc.msstate.edu/dods-bin/nph-dods/WCOS/nmsp/wcos/
+ * @param tFileNameRegex the regex which determines which files in
+ * the directories are to be read (use .* for
+ * all)
+ *
+ * You can use .* for all, but it is better to
+ * be more specific.
+ * For example, .*\.nc will get all files with
+ * the extension .nc.
+ * @param tRecursive if true, this class will look for files in
+ * the
+ * fileDir and all subdirectories
+ * @param tMetadataFrom this indicates the file to be used
+ * to extract source metadata (first/last based
+ * on sorted file lastModifiedTime).
+ * Valid values are "first", "penultimate",
+ * "last".
+ * @param tCharset the charset; relevant for ASCII files only
+ * @param tColumnNamesRow the number of the row with column names
+ * (1..; usually 1, may be 0 (none)); relevant
+ * for ASCII files only.
+ * @param tDataRow the number of the row with column names
+ * (1..; usually 2); relevant for ASCII files
+ * only.
+ * @param tColumnSeparator the character that separates the columns.
+ * Use "" or null to have this method guess.
+ * Otherwise,
+ * the first character of this string will be
+ * used.
+ * @param tPreExtractRegex may be "" or null if not needed.
+ * If present, this usually begins with "^" to
+ * match the beginning of the file name.
+ * If present, this is used to remove text from
+ * the start of the file name.
+ * The removal only occurs if the regex is
+ * matched.
+ * @param tPostExtractRegex may be "" or null if not needed.
+ * If present, this usually ends with "$" to
+ * match the beginning of the file name.
+ * If present, this is used to remove text from
+ * the end of the file name.
+ * The removal only occurs if the regex is
+ * matched.
+ * @param tExtractRegex may be "" or null if not needed.
+ * Use ".*" to match the entire file name.
+ * If present, this is used after
+ * preExtractRegex and postExtractRegex
+ * to extract a string from the file name
+ * (e.g., stationID).
+ * If the regex isn't matched, the entire file
+ * name is used (minus preExtract and
+ * postExtract).
+ * @param tColumnNameForExtract the data column name for the extracted
+ * Strings.
+ * This column name must be in the
+ * tDataVariables list as a source column name
+ * (with any data type).
+ * @param tSortedColumnSourceName the source name of a timeStamp column or
+ * the numeric column that the data files are
+ * usually already sorted by
+ * within each file (use null or "" for none),
+ * e.g., "time".
+ * It is ok if not all files are sorted by this
+ * column.
+ * If present, this can greatly speed up some
+ * data requests.
+ * @param tSortFilesBySourceNames is a comma(or space)-separated list of
+ * source variable names
+ * specifying how the internal list of files
+ * should be sorted (in ascending order).
+ *
+ * It is the minimum value of the specified
+ * columns in each file that is used for
+ * sorting.
+ *
+ * When a data request is filled, data is
+ * obtained from the files in this order.
+ *
+ * Thus it largely determines the overall order
+ * of the data in the response.
+ *
+ * If you specify more than one column name,
+ *
+ * the second name is used if there is a tie
+ * for the first column;
+ *
+ * the third is used if there is a tie for the
+ * first and second columns; ...
+ *
+ * It is optional (the default is
+ * fileDir+fileName order).
* @param tSourceNeedsExpandedFP_EQ
* @param tRemoveMVRows
- * @param tStandardizeWhat Use -1 or MAX_VALUE to indicate that you want the
- * subclasses default value.
- * @param tNThreads Use -1 or MAX_VALUE to indicate that you want the
- * default ERDDAP value from datasets.xml.
+ * @param tStandardizeWhat Use -1 or MAX_VALUE to indicate that you
+ * want the
+ * subclasses default value.
+ * @param tNThreads Use -1 or MAX_VALUE to indicate that you
+ * want the
+ * default ERDDAP value from datasets.xml.
* @throws Throwable if trouble
*/
- public EDDTableFromFiles(String tClassName, String tDatasetID,
- String tAccessibleTo, String tGraphsAccessibleTo,
- StringArray tOnChange, String tFgdcFile, String tIso19115File,
- String tSosOfferingPrefix,
- String tDefaultDataQuery, String tDefaultGraphQuery,
- Attributes tAddGlobalAttributes,
- Object[][] tDataVariables,
- int tReloadEveryNMinutes, int tUpdateEveryNMillis,
- String tFileDir, String tFileNameRegex, boolean tRecursive, String tPathRegex,
- String tMetadataFrom, String tCharset,
- String tSkipHeaderToRegex, String tSkipLinesRegex,
- int tColumnNamesRow, int tFirstDataRow, String tColumnSeparator,
- String tPreExtractRegex, String tPostExtractRegex, String tExtractRegex,
- String tColumnNameForExtract,
- String tSortedColumnSourceName, String tSortFilesBySourceNames,
- boolean tSourceNeedsExpandedFP_EQ, boolean tFileTableInMemory,
- boolean tAccessibleViaFiles, boolean tRemoveMVRows,
- int tStandardizeWhat, int tNThreads,
- String tCacheFromUrl, int tCacheSizeGB, String tCachePartialPathRegex,
- String tAddVariablesWhere)
- throws Throwable {
-
- if (verbose) String2.log(
- "\n*** constructing EDDTableFromFiles " + tDatasetID);
+ public EDDTableFromFiles(String tClassName, String tDatasetID,
+ String tAccessibleTo, String tGraphsAccessibleTo,
+ StringArray tOnChange, String tFgdcFile, String tIso19115File,
+ String tSosOfferingPrefix,
+ String tDefaultDataQuery, String tDefaultGraphQuery,
+ Attributes tAddGlobalAttributes,
+ Object[][] tDataVariables,
+ int tReloadEveryNMinutes, int tUpdateEveryNMillis,
+ String tFileDir, String tFileNameRegex, boolean tRecursive, String tPathRegex,
+ String tMetadataFrom, String tCharset,
+ String tSkipHeaderToRegex, String tSkipLinesRegex,
+ int tColumnNamesRow, int tFirstDataRow, String tColumnSeparator,
+ String tPreExtractRegex, String tPostExtractRegex, String tExtractRegex,
+ String tColumnNameForExtract,
+ String tSortedColumnSourceName, String tSortFilesBySourceNames,
+ boolean tSourceNeedsExpandedFP_EQ, boolean tFileTableInMemory,
+ boolean tAccessibleViaFiles, boolean tRemoveMVRows,
+ int tStandardizeWhat, int tNThreads,
+ String tCacheFromUrl, int tCacheSizeGB, String tCachePartialPathRegex,
+ String tAddVariablesWhere)
+ throws Throwable {
+
+ if (verbose)
+ String2.log(
+ "\n*** constructing EDDTableFromFiles " + tDatasetID);
long constructionStartMillis = System.currentTimeMillis();
- String errorInMethod = "Error in EDDTableFromFiles(" +
- tDatasetID + ") constructor:\n";
-
- //save some of the parameters
+ String errorInMethod = "Error in EDDTableFromFiles(" +
+ tDatasetID + ") constructor:\n";
+
+ // save some of the parameters
className = tClassName;
- datasetID = tDatasetID;
-
- //ensure valid for creation of datasetInfo files below
- if (!String2.isFileNameSafe(datasetID))
- throw new IllegalArgumentException(errorInMethod +
- "datasetID=" + datasetID + " isn't fileNameSafe.");
- File2.makeDirectory(datasetDir()); //based on datasetID
- String dirTableFileName = datasetDir() + DIR_TABLE_FILENAME;
+ datasetID = tDatasetID;
+
+ // ensure valid for creation of datasetInfo files below
+ if (!String2.isFileNameSafe(datasetID))
+ throw new IllegalArgumentException(errorInMethod +
+ "datasetID=" + datasetID + " isn't fileNameSafe.");
+ File2.makeDirectory(datasetDir()); // based on datasetID
+ String dirTableFileName = datasetDir() + DIR_TABLE_FILENAME;
String fileTableFileName = datasetDir() + FILE_TABLE_FILENAME;
setAccessibleTo(tAccessibleTo);
@@ -908,7 +1062,7 @@ public EDDTableFromFiles(String tClassName, String tDatasetID,
fileDir = File2.addSlash(tFileDir);
fileNameRegex = tFileNameRegex;
recursive = tRecursive;
- pathRegex = tPathRegex == null || tPathRegex.length() == 0? ".*": tPathRegex;
+ pathRegex = tPathRegex == null || tPathRegex.length() == 0 ? ".*" : tPathRegex;
metadataFrom = tMetadataFrom;
charset = tCharset;
skipHeaderToRegex = tSkipHeaderToRegex;
@@ -916,10 +1070,10 @@ public EDDTableFromFiles(String tClassName, String tDatasetID,
columnNamesRow = tColumnNamesRow;
firstDataRow = tFirstDataRow;
columnSeparator = tColumnSeparator;
- standardizeWhat = tStandardizeWhat < 0 || tStandardizeWhat == Integer.MAX_VALUE?
- defaultStandardizeWhat() : tStandardizeWhat;
+ standardizeWhat = tStandardizeWhat < 0 || tStandardizeWhat == Integer.MAX_VALUE ? defaultStandardizeWhat()
+ : tStandardizeWhat;
accessibleViaFiles = EDStatic.filesActive && tAccessibleViaFiles;
- nThreads = tNThreads;
+ nThreads = tNThreads;
preExtractRegex = tPreExtractRegex;
postExtractRegex = tPostExtractRegex;
@@ -929,20 +1083,21 @@ public EDDTableFromFiles(String tClassName, String tDatasetID,
sortedColumnSourceName = tSortedColumnSourceName;
int ndv = tDataVariables.length;
+ removeMVRows = tRemoveMVRows;
+
if (String2.isSomething(tCacheFromUrl) && !String2.isRemote(tCacheFromUrl))
throw new IllegalArgumentException(errorInMethod + "'cacheFromUrl' must be a URL.");
tCacheFromUrl = File2.addSlash(tCacheFromUrl);
- cacheFromUrl = String2.isRemote(tCacheFromUrl)? tCacheFromUrl : null;
- cacheMaxSizeB = tCacheSizeGB <= 0 || tCacheSizeGB == Integer.MAX_VALUE? -1 : //<=0 = copy all
- tCacheSizeGB * Math2.BytesPerGB;
- cachePartialPathRegex = String2.isSomething(tCachePartialPathRegex)?
- tCachePartialPathRegex : null;
+ cacheFromUrl = String2.isRemote(tCacheFromUrl) ? tCacheFromUrl : null;
+ cacheMaxSizeB = tCacheSizeGB <= 0 || tCacheSizeGB == Integer.MAX_VALUE ? -1 : // <=0 = copy all
+ tCacheSizeGB * Math2.BytesPerGB;
+ cachePartialPathRegex = String2.isSomething(tCachePartialPathRegex) ? tCachePartialPathRegex : null;
- //class-specific things
+ // class-specific things
if (className.equals("EDDTableFromHttpGet")) {
- setHttpGetRequiredVariableNames( tAddGlobalAttributes.getString(HTTP_GET_REQUIRED_VARIABLES));
- setHttpGetDirectoryStructure( tAddGlobalAttributes.getString(HTTP_GET_DIRECTORY_STRUCTURE));
- setHttpGetKeys( tAddGlobalAttributes.getString(HTTP_GET_KEYS));
+ setHttpGetRequiredVariableNames(tAddGlobalAttributes.getString(HTTP_GET_REQUIRED_VARIABLES));
+ setHttpGetDirectoryStructure(tAddGlobalAttributes.getString(HTTP_GET_DIRECTORY_STRUCTURE));
+ setHttpGetKeys(tAddGlobalAttributes.getString(HTTP_GET_KEYS));
tAddGlobalAttributes.remove(HTTP_GET_KEYS);
} else if (className.equals("EDDTableFromMultidimNcFiles")) {
@@ -953,8 +1108,9 @@ public EDDTableFromFiles(String tClassName, String tDatasetID,
treatDimensionsAs = new String[nParts][];
for (int part = 0; part < nParts; part++) {
treatDimensionsAs[part] = String2.split(parts[part], ',');
- if (reallyVerbose) String2.log(TREAT_DIMENSIONS_AS + "[" + part +
- "] was set to " + String2.toCSSVString(treatDimensionsAs[part]));
+ if (reallyVerbose)
+ String2.log(TREAT_DIMENSIONS_AS + "[" + part +
+ "] was set to " + String2.toCSSVString(treatDimensionsAs[part]));
}
}
tAddGlobalAttributes.remove(TREAT_DIMENSIONS_AS);
@@ -965,179 +1121,193 @@ public EDDTableFromFiles(String tClassName, String tDatasetID,
filesAreLocal = !String2.isTrulyRemote(fileDir);
if (filesAreLocal)
fileDir = File2.addSlash(fileDir);
- if (fileNameRegex == null || fileNameRegex.length() == 0)
+ if (fileNameRegex == null || fileNameRegex.length() == 0)
fileNameRegex = ".*";
- if (metadataFrom == null) metadataFrom = "";
- if (metadataFrom.length() == 0) metadataFrom = MF_LAST;
- if (!metadataFrom.equals(MF_FIRST) &&
- !metadataFrom.equals(MF_LAST))
+ if (metadataFrom == null)
+ metadataFrom = "";
+ if (metadataFrom.length() == 0)
+ metadataFrom = MF_LAST;
+ if (!metadataFrom.equals(MF_FIRST) &&
+ !metadataFrom.equals(MF_LAST))
throw new IllegalArgumentException("datasets.xml error: " +
- "metadataFrom=" + metadataFrom + " must be " +
- MF_FIRST + " or " + MF_LAST + ".");
+ "metadataFrom=" + metadataFrom + " must be " +
+ MF_FIRST + " or " + MF_LAST + ".");
if (columnNamesRow < 0 || columnNamesRow > 10000)
throw new IllegalArgumentException("datasets.xml error: " +
- "columnNamesRow=" + columnNamesRow + " must be between 0 and 10000.");
+ "columnNamesRow=" + columnNamesRow + " must be between 0 and 10000.");
if (firstDataRow <= columnNamesRow || firstDataRow > 10000)
throw new IllegalArgumentException("datasets.xml error: " +
- "firstDataRow=" + firstDataRow + " must be between " +
- (columnNamesRow+1) + " and 10000.");
- if (preExtractRegex == null) preExtractRegex = "";
- if (postExtractRegex == null) postExtractRegex = "";
- if (extractRegex == null) extractRegex = "";
- if (columnNameForExtract == null) columnNameForExtract = "";
+ "firstDataRow=" + firstDataRow + " must be between " +
+ (columnNamesRow + 1) + " and 10000.");
+ if (preExtractRegex == null)
+ preExtractRegex = "";
+ if (postExtractRegex == null)
+ postExtractRegex = "";
+ if (extractRegex == null)
+ extractRegex = "";
+ if (columnNameForExtract == null)
+ columnNameForExtract = "";
if (extractRegex.length() == 0 && columnNameForExtract.length() > 0)
throw new IllegalArgumentException("datasets.xml error: " +
- "columnNameForExtract=" + columnNameForExtract +
- " but extractRegex=\"\". It should be something, e.g., \".*\".");
+ "columnNameForExtract=" + columnNameForExtract +
+ " but extractRegex=\"\". It should be something, e.g., \".*\".");
if (columnNameForExtract.length() == 0 && extractRegex.length() > 0)
throw new IllegalArgumentException("datasets.xml error: " +
- "extractRegex=" + extractRegex +
- " but columnNameForExtract=\"\". It should be something.");
+ "extractRegex=" + extractRegex +
+ " but columnNameForExtract=\"\". It should be something.");
- preExtractPattern = preExtractRegex.length() == 0? null : Pattern.compile(preExtractRegex);
- postExtractPattern = postExtractRegex.length() == 0? null : Pattern.compile(postExtractRegex);
- extractPattern = extractRegex.length() == 0? null : Pattern.compile(extractRegex);
- if (sortedColumnSourceName == null) sortedColumnSourceName = "";
+ preExtractPattern = preExtractRegex.length() == 0 ? null : Pattern.compile(preExtractRegex);
+ postExtractPattern = postExtractRegex.length() == 0 ? null : Pattern.compile(postExtractRegex);
+ extractPattern = extractRegex.length() == 0 ? null : Pattern.compile(extractRegex);
+ if (sortedColumnSourceName == null)
+ sortedColumnSourceName = "";
- //note sourceDataNames, sourceDataTypes
- // and do special things for special sourceNames
+ // note sourceDataNames, sourceDataTypes
+ // and do special things for special sourceNames
sourceDataNames = new StringArray();
safeSourceDataNames = new StringArray();
sourceDataTypes = new String[ndv];
boolean isColumnarAscii = className.equals("EDDTableFromColumnarAsciiFiles");
if (isColumnarAscii) {
- startColumn = new int[ndv]; //all 0's
- stopColumn = new int[ndv]; //all 0's
+ startColumn = new int[ndv]; // all 0's
+ stopColumn = new int[ndv]; // all 0's
}
for (int dv = 0; dv < ndv; dv++) {
- String tSourceName = (String)tDataVariables[dv][0];
+ String tSourceName = (String) tDataVariables[dv][0];
sourceDataNames.add(tSourceName);
safeSourceDataNames.add(String2.encodeVariableNameSafe(tSourceName));
- sourceDataTypes[dv] = (String)tDataVariables[dv][3];
+ sourceDataTypes[dv] = (String) tDataVariables[dv][3];
if (sourceDataTypes[dv] == null || sourceDataTypes[dv].length() == 0)
throw new IllegalArgumentException("Unspecified data type for var#" + dv + ".");
- //note timeIndex
- String tDestName = (String)tDataVariables[dv][1];
+ // note timeIndex
+ String tDestName = (String) tDataVariables[dv][1];
if (EDV.TIME_NAME.equals(tDestName) ||
- ((tDestName == null || tDestName.trim().length() == 0) &&
- EDV.TIME_NAME.equals(tSourceName)))
+ ((tDestName == null || tDestName.trim().length() == 0) &&
+ EDV.TIME_NAME.equals(tSourceName)))
timeIndex = dv;
- Attributes atts = (Attributes)tDataVariables[dv][2];
+ Attributes atts = (Attributes) tDataVariables[dv][2];
- //do things for special variable types
+ // do things for special variable types
if (tSourceName.startsWith("=")) {
- scriptNeedsColumns.put(tSourceName, Script2.jexlScriptNeedsColumns(tSourceName)); //needsColumns.size() may be 0
+ scriptNeedsColumns.put(tSourceName, Script2.jexlScriptNeedsColumns(tSourceName)); // needsColumns.size()
+ // may be 0
} else if (tSourceName.startsWith("global:")) {
- //do nothing for column types that aren't in source file
+ // do nothing for column types that aren't in source file
} else if (tSourceName.startsWith("variable:")) {
- //validate syntax
+ // validate syntax
String ttName = tSourceName.substring(9);
int po = ttName.indexOf(':');
if (po <= 0 || po == tSourceName.length() - 1)
- throw new IllegalArgumentException("sourceName=" +
- tSourceName + " must be in the form: variable:[varName]:[attName] .");
+ throw new IllegalArgumentException("sourceName=" +
+ tSourceName + " must be in the form: variable:[varName]:[attName] .");
} else if (tSourceName.equals(columnNameForExtract) ||
- tSourceName.startsWith("***")) {
- //do nothing for column types that aren't in source file
+ tSourceName.startsWith("***")) {
+ // do nothing for column types that aren't in source file
} else {
if (isColumnarAscii) {
- //required
+ // required
startColumn[dv] = atts.getInt("startColumn");
- stopColumn[dv] = atts.getInt("stopColumn");
- Test.ensureBetween(startColumn[dv], 0, 1000000,
- "Invalid startColumn attribute for destinationName=" + tDestName);
- Test.ensureBetween(stopColumn[dv], startColumn[dv] + 1, 1000000,
- "Invalid stopColumn attribute for destinationName=" + tDestName);
+ stopColumn[dv] = atts.getInt("stopColumn");
+ Test.ensureBetween(startColumn[dv], 0, 1000000,
+ "Invalid startColumn attribute for destinationName=" + tDestName);
+ Test.ensureBetween(stopColumn[dv], startColumn[dv] + 1, 1000000,
+ "Invalid stopColumn attribute for destinationName=" + tDestName);
atts.remove("startColumn");
atts.remove("stopColumn");
}
}
}
- //EDDTableFromColumnarAscii needs this
+ // EDDTableFromColumnarAscii needs this
dataVariableSourceNames = sourceDataNames.toArray();
- if (reallyVerbose) String2.log("sourceDataNames=" + sourceDataNames +
- "\nsourceDataTypes=" + String2.toCSSVString(sourceDataTypes));
+ if (reallyVerbose)
+ String2.log("sourceDataNames=" + sourceDataNames +
+ "\nsourceDataTypes=" + String2.toCSSVString(sourceDataTypes));
if (sortedColumnSourceName.length() > 0) {
sortedDVI = sourceDataNames.indexOf(sortedColumnSourceName);
if (sortedDVI < 0)
throw new IllegalArgumentException("sortedColumnSourceName=" +
- sortedColumnSourceName +
- " isn't among the source data variable names.");
- String tName = (String)tDataVariables[sortedDVI][1]; //destName
+ sortedColumnSourceName +
+ " isn't among the source data variable names.");
+ String tName = (String) tDataVariables[sortedDVI][1]; // destName
if (!String2.isSomething(tName))
- tName = (String)tDataVariables[sortedDVI][0]; //sourceName
- Attributes tAtts = (Attributes)tDataVariables[sortedDVI][2];
- String tUnits = tAtts == null? null : tAtts.getString("units");
+ tName = (String) tDataVariables[sortedDVI][0]; // sourceName
+ Attributes tAtts = (Attributes) tDataVariables[sortedDVI][2];
+ String tUnits = tAtts == null ? null : tAtts.getString("units");
if (tName.equals("time") ||
- Calendar2.isTimeUnits(tUnits) ||
- !"String".equals(sourceDataTypes[sortedDVI])) {
- //okay
+ Calendar2.isTimeUnits(tUnits) ||
+ !"String".equals(sourceDataTypes[sortedDVI])) {
+ // okay
} else {
throw new IllegalArgumentException(
- "sortedColumnSourceName must be a time or numeric column.");
+ "sortedColumnSourceName must be a time or numeric column.");
}
}
extractedColNameIndex = -1;
- if (columnNameForExtract.length() > 0) {
+ if (columnNameForExtract.length() > 0) {
extractedColNameIndex = sourceDataNames.indexOf(columnNameForExtract);
if (extractedColNameIndex < 0)
- throw new IllegalArgumentException("columnNameForExtract=" +
- columnNameForExtract +
- " isn't among the source data variable names.");
+ throw new IllegalArgumentException("columnNameForExtract=" +
+ columnNameForExtract +
+ " isn't among the source data variable names.");
else if (extractPattern == null)
- throw new IllegalArgumentException("columnNameForExtract=" +
- columnNameForExtract + " but extractRegex wasn't specified.");
+ throw new IllegalArgumentException("columnNameForExtract=" +
+ columnNameForExtract + " but extractRegex wasn't specified.");
}
- //if (reallyVerbose) String2.log(
- // "columnNameForExtract=" + columnNameForExtract + " extractedColNameIndex=" + extractedColNameIndex);
+ // if (reallyVerbose) String2.log(
+ // "columnNameForExtract=" + columnNameForExtract + " extractedColNameIndex=" +
+ // extractedColNameIndex);
- //This class can handle some constraints;
- //PARTIAL passes all through to getDataForDapQuery,
- //but also does them again in standardizeResultsTable
+ // This class can handle some constraints;
+ // PARTIAL passes all through to getDataForDapQuery,
+ // but also does them again in standardizeResultsTable
sourceNeedsExpandedFP_EQ = tSourceNeedsExpandedFP_EQ;
- sourceCanConstrainNumericData = CONSTRAIN_PARTIAL; //all partially handled
- sourceCanConstrainStringData = CONSTRAIN_PARTIAL; //all partially handled
- sourceCanConstrainStringRegex = PrimitiveArray.REGEX_OP; //partially
+ sourceCanConstrainNumericData = CONSTRAIN_PARTIAL; // all partially handled
+ sourceCanConstrainStringData = CONSTRAIN_PARTIAL; // all partially handled
+ sourceCanConstrainStringRegex = PrimitiveArray.REGEX_OP; // partially
- //load cached dirTable->dirList
- dirTable = tryToLoadDirFileTable(dirTableFileName); //may be null
+ // load cached dirTable->dirList
+ dirTable = tryToLoadDirFileTable(dirTableFileName); // may be null
if (dirTable != null) {
- if (verbose) String2.log(
- dirTable.nRows() + " rows in dirTable");
- if (reallyVerbose) String2.log(
- "first 5 rows=\n" +
- dirTable.dataToString(5));
+ if (verbose)
+ String2.log(
+ dirTable.nRows() + " rows in dirTable");
+ if (reallyVerbose)
+ String2.log(
+ "first 5 rows=\n" +
+ dirTable.dataToString(5));
}
- //load cached fileTable
- fileTable = tryToLoadDirFileTable(fileTableFileName); //may be null
+ // load cached fileTable
+ fileTable = tryToLoadDirFileTable(fileTableFileName); // may be null
if (fileTable != null) {
- if (verbose) String2.log(
- fileTable.nRows() + " rows in fileTable");
- if (reallyVerbose) String2.log(
- "first 5 rows=\n" +
- fileTable.dataToString(5));
+ if (verbose)
+ String2.log(
+ fileTable.nRows() + " rows in fileTable");
+ if (reallyVerbose)
+ String2.log(
+ "first 5 rows=\n" +
+ fileTable.dataToString(5));
}
- //ensure fileTable has correct columns and data types
+ // ensure fileTable has correct columns and data types
if (fileTable != null) {
- //In case variable order has been changed in datasets.xml,
- // try to reorder fileTable to desiredOrder.
- //This will be super fast if already in order.
+ // In case variable order has been changed in datasets.xml,
+ // try to reorder fileTable to desiredOrder.
+ // This will be super fast if already in order.
StringArray desiredOrder = new StringArray();
- desiredOrder.add("dirIndex"); //FT_DIR_INDEX_COL
+ desiredOrder.add("dirIndex"); // FT_DIR_INDEX_COL
desiredOrder.add("fileName");
desiredOrder.add("lastMod");
desiredOrder.add("size");
@@ -1147,31 +1317,39 @@ else if (extractPattern == null)
desiredOrder.add(safeSourceDataNames.get(dv) + MAX_SUFFIX);
desiredOrder.add(safeSourceDataNames.get(dv) + "_hasNaN_");
}
- //reorder and ensure all are present
- boolean ok = fileTable.reorderColumns(desiredOrder, true) == desiredOrder.size(); //discardOthers
-
- //then test if all cols have expected types
- if (!ok) {} //don't do other tests
- else if (!(fileTable.getColumn(FT_DIR_INDEX_COL) instanceof ShortArray)) ok = false;
- else if (!(fileTable.getColumn(FT_FILE_LIST_COL) instanceof StringArray)) ok = false;
- else if (!(fileTable.getColumn(FT_LAST_MOD_COL) instanceof LongArray)) ok = false;
- else if (!(fileTable.getColumn(FT_SIZE_COL) instanceof LongArray)) ok = false;
- else if (!(fileTable.getColumn(FT_SORTED_SPACING_COL) instanceof DoubleArray)) ok = false;
- else for (int dv = 0; dv < ndv; dv++) {
- String sdt = sourceDataTypes[dv];
- if (sdt.equals("boolean"))
- sdt = "byte";
- if (!fileTable.getColumn(dv0 + dv*3 + 0).elementTypeString().equals(sdt) ||
- !fileTable.getColumn(dv0 + dv*3 + 1).elementTypeString().equals(sdt) ||
- !fileTable.getColumn(dv0 + dv*3 + 2).elementTypeString().equals("byte")) {
- ok = false;
- break;
+ // reorder and ensure all are present
+ boolean ok = fileTable.reorderColumns(desiredOrder, true) == desiredOrder.size(); // discardOthers
+
+ // then test if all cols have expected types
+ if (!ok) {
+ } // don't do other tests
+ else if (!(fileTable.getColumn(FT_DIR_INDEX_COL) instanceof ShortArray))
+ ok = false;
+ else if (!(fileTable.getColumn(FT_FILE_LIST_COL) instanceof StringArray))
+ ok = false;
+ else if (!(fileTable.getColumn(FT_LAST_MOD_COL) instanceof LongArray))
+ ok = false;
+ else if (!(fileTable.getColumn(FT_SIZE_COL) instanceof LongArray))
+ ok = false;
+ else if (!(fileTable.getColumn(FT_SORTED_SPACING_COL) instanceof DoubleArray))
+ ok = false;
+ else
+ for (int dv = 0; dv < ndv; dv++) {
+ String sdt = sourceDataTypes[dv];
+ if (sdt.equals("boolean"))
+ sdt = "byte";
+ if (!fileTable.getColumn(dv0 + dv * 3 + 0).elementTypeString().equals(sdt) ||
+ !fileTable.getColumn(dv0 + dv * 3 + 1).elementTypeString().equals(sdt) ||
+ !fileTable.getColumn(dv0 + dv * 3 + 2).elementTypeString().equals("byte")) {
+ ok = false;
+ break;
+ }
}
- }
if (!ok) {
- try { //added 2016-05-20
- String2.log("Old fileTable discarded because of incorrect column names and/or data types (first 2 rows):");
- String2.log(fileTable.toString(2)); //separate in case of trouble
+ try { // added 2016-05-20
+ String2.log(
+ "Old fileTable discarded because of incorrect column names and/or data types (first 2 rows):");
+ String2.log(fileTable.toString(2)); // separate in case of trouble
} catch (Throwable t2) {
String2.log(MustBe.throwableToString(t2));
}
@@ -1179,65 +1357,65 @@ else if (extractPattern == null)
}
}
-
- //load badFileMap
+ // load badFileMap
ConcurrentHashMap badFileMap = readBadFileMap();
-
- //if trouble reading any, recreate all
+
+ // if trouble reading any, recreate all
if (dirTable == null || fileTable == null || badFileMap == null) {
- if (verbose) String2.log("creating new dirTable and fileTable " +
- "(dirTable=null?" + (dirTable==null) +
- " fileTable=null?" + (fileTable==null) +
- " badFileMap=null?" + (badFileMap==null) + ")");
+ if (verbose)
+ String2.log("creating new dirTable and fileTable " +
+ "(dirTable=null?" + (dirTable == null) +
+ " fileTable=null?" + (fileTable == null) +
+ " badFileMap=null?" + (badFileMap == null) + ")");
dirTable = new Table();
dirTable.addColumn("dirName", new StringArray());
fileTable = new Table();
- fileTable.addColumn("dirIndex", new ShortArray()); //col 0=FT_DIR_INDEX_COL
- fileTable.addColumn("fileName", new StringArray()); //col 1=FT_FILE_NAME_COL
- fileTable.addColumn("lastMod", new LongArray()); //col 2=FT_LAST_MOD_COL
- fileTable.addColumn("size", new LongArray()); //col 3=FT_SIZE_COL
- fileTable.addColumn("sortedSpacing", new DoubleArray()); //col 4=FT_SORTED_SPACING_COL
+ fileTable.addColumn("dirIndex", new ShortArray()); // col 0=FT_DIR_INDEX_COL
+ fileTable.addColumn("fileName", new StringArray()); // col 1=FT_FILE_NAME_COL
+ fileTable.addColumn("lastMod", new LongArray()); // col 2=FT_LAST_MOD_COL
+ fileTable.addColumn("size", new LongArray()); // col 3=FT_SIZE_COL
+ fileTable.addColumn("sortedSpacing", new DoubleArray()); // col 4=FT_SORTED_SPACING_COL
for (int dv = 0; dv < ndv; dv++) {
- String sdt = sourceDataTypes[dv]; //booleans handled correctly below
- fileTable.addColumn(safeSourceDataNames.get(dv) + MIN_SUFFIX,
- PrimitiveArray.factory(PAType.fromCohortString(sdt), 8, false));
- fileTable.addColumn(safeSourceDataNames.get(dv) + MAX_SUFFIX,
- PrimitiveArray.factory(PAType.fromCohortString(sdt), 8, false));
- fileTable.addColumn(safeSourceDataNames.get(dv) + "_hasNaN_",
- PrimitiveArray.factory(PAType.BYTE, 8, false));
+ String sdt = sourceDataTypes[dv]; // booleans handled correctly below
+ fileTable.addColumn(safeSourceDataNames.get(dv) + MIN_SUFFIX,
+ PrimitiveArray.factory(PAType.fromCohortString(sdt), 8, false));
+ fileTable.addColumn(safeSourceDataNames.get(dv) + MAX_SUFFIX,
+ PrimitiveArray.factory(PAType.fromCohortString(sdt), 8, false));
+ fileTable.addColumn(safeSourceDataNames.get(dv) + "_hasNaN_",
+ PrimitiveArray.factory(PAType.BYTE, 8, false));
}
badFileMap = newEmptyBadFileMap();
}
- //skip loading until after intial loadDatasets?
+ // skip loading until after intial loadDatasets?
if (EDStatic.allowDeferedLoading && fileTable.nRows() == 0 && EDStatic.initialLoadDatasets()) {
requestReloadASAP();
throw new RuntimeException(DEFER_LOADING_DATASET_BECAUSE + "fileTable.nRows=0.");
- }
-
- //get the PrimitiveArrays from fileTable
- StringArray dirList = (StringArray)dirTable.getColumn(0);
- ShortArray ftDirIndex = (ShortArray) fileTable.getColumn(FT_DIR_INDEX_COL); //0
- StringArray ftFileList = (StringArray)fileTable.getColumn(FT_FILE_LIST_COL); //1
- LongArray ftLastMod = (LongArray) fileTable.getColumn(FT_LAST_MOD_COL); //2
- LongArray ftSize = (LongArray) fileTable.getColumn(FT_SIZE_COL); //3
- DoubleArray ftSortedSpacing = (DoubleArray)fileTable.getColumn(FT_SORTED_SPACING_COL); //4
+ }
+
+ // get the PrimitiveArrays from fileTable
+ StringArray dirList = (StringArray) dirTable.getColumn(0);
+ ShortArray ftDirIndex = (ShortArray) fileTable.getColumn(FT_DIR_INDEX_COL); // 0
+ StringArray ftFileList = (StringArray) fileTable.getColumn(FT_FILE_LIST_COL); // 1
+ LongArray ftLastMod = (LongArray) fileTable.getColumn(FT_LAST_MOD_COL); // 2
+ LongArray ftSize = (LongArray) fileTable.getColumn(FT_SIZE_COL); // 3
+ DoubleArray ftSortedSpacing = (DoubleArray) fileTable.getColumn(FT_SORTED_SPACING_COL); // 4
String msg = "";
- //set up WatchDirectory
+ // set up WatchDirectory
if (updateEveryNMillis > 0) {
try {
if (EDStatic.useSharedWatchService) {
SharedWatchService.watchDirectory(fileDir, recursive, pathRegex, this, datasetID);
} else {
- watchDirectory = WatchDirectory.watchDirectoryAll(fileDir,
- recursive, pathRegex);
+ watchDirectory = WatchDirectory.watchDirectoryAll(fileDir,
+ recursive, pathRegex);
}
} catch (Throwable t) {
- updateEveryNMillis = 0; //disable the inotify system for this instance
+ updateEveryNMillis = 0; // disable the inotify system for this instance
String subject = String2.ERROR + " in " + datasetID + " constructor (inotify)";
msg = MustBe.throwableToString(t);
if (msg.indexOf("inotify instances") >= 0)
@@ -1247,80 +1425,83 @@ else if (extractPattern == null)
}
}
- //doQuickRestart?
- boolean doQuickRestart = fileTable.nRows() > 0 &&
- (testQuickRestart || (EDStatic.quickRestart && EDStatic.initialLoadDatasets()));
+ // doQuickRestart?
+ boolean doQuickRestart = fileTable.nRows() > 0 &&
+ (testQuickRestart || (EDStatic.quickRestart && EDStatic.initialLoadDatasets()));
if (verbose)
String2.log("doQuickRestart=" + doQuickRestart);
if (doQuickRestart) {
msg = "\nQuickRestart";
- //make the expected arrays based on info from a file
+ // make the expected arrays based on info from a file
makeExpected(tDataVariables, dirList, ftDirIndex, ftFileList, ftLastMod, ftSize);
} else {
- //!doQuickRestart
+ // !doQuickRestart
if (!filesAreLocal) {
- //if files are not local, throw away list of bad files,
- //so each will be retried again.
- //One failure shouldn't be considered permanent.
- //Downside: persistently bad files/urls will be rechecked repeatedly -- probably slow!
+ // if files are not local, throw away list of bad files,
+ // so each will be retried again.
+ // One failure shouldn't be considered permanent.
+ // Downside: persistently bad files/urls will be rechecked repeatedly --
+ // probably slow!
badFileMap = newEmptyBadFileMap();
}
-
- //if copy all remote files via taskThread, start those tasks now
+
+ // if copy all remote files via taskThread, start those tasks now
if (cacheFromUrl != null && cacheMaxSizeB <= 0) {
String cPathRegex = pathRegex;
if (cachePartialPathRegex != null) {
- //if this is same month, use cachePartialPathRegex
- String fileTableMonth = Calendar2.millisToIsoDateString(
- File2.getLastModified(fileTableFileName)).substring(0, 7); //0 if trouble
+ // if this is same month, use cachePartialPathRegex
+ String fileTableMonth = Calendar2.millisToIsoDateString(
+ File2.getLastModified(fileTableFileName)).substring(0, 7); // 0 if trouble
String currentMonth = Calendar2.getCurrentISODateTimeStringZulu().substring(0, 7);
- if (reallyVerbose) String2.log("fileTableMonth=" + fileTableMonth +
- " currentMonth=" + currentMonth);
+ if (reallyVerbose)
+ String2.log("fileTableMonth=" + fileTableMonth +
+ " currentMonth=" + currentMonth);
if (currentMonth.equals(fileTableMonth))
cPathRegex = cachePartialPathRegex;
}
- EDStatic.makeCopyFileTasks(className, EDStatic.DefaultMaxMakeCopyFileTasks,
- datasetID, cacheFromUrl, fileNameRegex, recursive, cPathRegex,
- fileDir);
+ EDStatic.makeCopyFileTasks(className, EDStatic.DefaultMaxMakeCopyFileTasks,
+ datasetID, cacheFromUrl, fileNameRegex, recursive, cPathRegex,
+ fileDir);
}
- //get tFileList of available data files
+ // get tFileList of available data files
long elapsedTime = System.currentTimeMillis();
- //was tFileNames with dir+name
+ // was tFileNames with dir+name
Table tFileTable = getFileInfo(fileDir, fileNameRegex, recursive, pathRegex);
- StringArray tFileDirPA = (StringArray)(tFileTable.getColumn(FileVisitorDNLS.DIRECTORY));
- StringArray tFileNamePA = (StringArray)(tFileTable.getColumn(FileVisitorDNLS.NAME));
- LongArray tFileLastModPA = (LongArray) (tFileTable.getColumn(FileVisitorDNLS.LASTMODIFIED));
- LongArray tFileSizePA = (LongArray) (tFileTable.getColumn(FileVisitorDNLS.SIZE));
+ StringArray tFileDirPA = (StringArray) (tFileTable.getColumn(FileVisitorDNLS.DIRECTORY));
+ StringArray tFileNamePA = (StringArray) (tFileTable.getColumn(FileVisitorDNLS.NAME));
+ LongArray tFileLastModPA = (LongArray) (tFileTable.getColumn(FileVisitorDNLS.LASTMODIFIED));
+ LongArray tFileSizePA = (LongArray) (tFileTable.getColumn(FileVisitorDNLS.SIZE));
tFileTable.removeColumn(FileVisitorDNLS.SIZE);
int ntft = tFileNamePA.size();
- msg = ntft + " files found in " + fileDir +
- "\nregex=" + fileNameRegex + " recursive=" + recursive +
- " pathRegex=" + pathRegex +
- " time=" + (System.currentTimeMillis() - elapsedTime) + "ms";
+ msg = ntft + " files found in " + fileDir +
+ "\nregex=" + fileNameRegex + " recursive=" + recursive +
+ " pathRegex=" + pathRegex +
+ " time=" + (System.currentTimeMillis() - elapsedTime) + "ms";
if (ntft == 0)
- //Just exit. Don't delete the dirTable and fileTable files!
- //The problem may be that a drive isn't mounted.
+ // Just exit. Don't delete the dirTable and fileTable files!
+ // The problem may be that a drive isn't mounted.
throw new RuntimeException(msg);
- if (verbose) String2.log(msg);
+ if (verbose)
+ String2.log(msg);
msg = "";
- //switch to dir indexes
- ShortArray tFileDirIndexPA = new ShortArray(ntft, false);
- tFileTable.removeColumn(0); //tFileDirPA col
- tFileTable.addColumn(0, "dirIndex", tFileDirIndexPA); //col 0, matches fileTable
- tFileTable.setColumnName(1, "fileList"); //col 1, matches fileTable
+ // switch to dir indexes
+ ShortArray tFileDirIndexPA = new ShortArray(ntft, false);
+ tFileTable.removeColumn(0); // tFileDirPA col
+ tFileTable.addColumn(0, "dirIndex", tFileDirIndexPA); // col 0, matches fileTable
+ tFileTable.setColumnName(1, "fileList"); // col 1, matches fileTable
String lastDir = "\u0000";
int lastPo = -1;
for (int i = 0; i < ntft; i++) {
String tDir = tFileDirPA.get(i);
int po = lastPo;
- if (!tDir.equals(lastDir)) { //rare
- po = dirList.indexOf(tDir); //linear search, but should be short list
+ if (!tDir.equals(lastDir)) { // rare
+ po = dirList.indexOf(tDir); // linear search, but should be short list
if (po < 0) {
po = dirList.size();
dirList.add(tDir);
@@ -1330,90 +1511,93 @@ else if (extractPattern == null)
}
tFileDirIndexPA.addInt(po);
}
- tFileDirPA = null; //allow gc
+ tFileDirPA = null; // allow gc
- //remove "badFiles" if they no longer exist (in tFileNames)
+ // remove "badFiles" if they no longer exist (in tFileNames)
if (badFileMap.size() > 0) {
- //make hashset with all tFileNames
+ // make hashset with all tFileNames
HashSet tFileSet = new HashSet(Math2.roundToInt(1.4 * ntft));
for (int i = 0; i < ntft; i++) {
tFileSet.add(tFileDirIndexPA.get(i) + "/" + tFileNamePA.get(i));
- //String2.log("tFileSet add: " + tFileDirIndexPA.get(i) + "/" + tFileNamePA.get(i));
+ // String2.log("tFileSet add: " + tFileDirIndexPA.get(i) + "/" +
+ // tFileNamePA.get(i));
}
Object badFileNames[] = badFileMap.keySet().toArray();
int nMissing = 0;
int nbfn = badFileNames.length;
for (int i = 0; i < nbfn; i++) {
- Object name = badFileNames[i];
- if (!tFileSet.contains(name)) {
- if (reallyVerbose)
+ Object name = badFileNames[i];
+ if (!tFileSet.contains(name)) {
+ if (reallyVerbose)
String2.log("previously bad file now missing: " + name);
- nMissing++;
- badFileMap.remove(name);
- }
+ nMissing++;
+ badFileMap.remove(name);
+ }
}
- if (verbose) String2.log(
- "old nBadFiles size=" + nbfn + " nMissing=" + nMissing);
+ if (verbose)
+ String2.log(
+ "old nBadFiles size=" + nbfn + " nMissing=" + nMissing);
} else {
- if (verbose) String2.log("old nBadFiles size=0");
- }
+ if (verbose)
+ String2.log("old nBadFiles size=0");
+ }
- //sort fileTable and tFileTable by dirIndex and fileName
+ // sort fileTable and tFileTable by dirIndex and fileName
elapsedTime = System.currentTimeMillis();
- fileTable.leftToRightSort(2); //lexical sort so can walk through below
- tFileTable.leftToRightSort(2); //lexical sort so can walk through below
- if (reallyVerbose) String2.log("sortTime1=" + (System.currentTimeMillis() - elapsedTime) + "ms");
+ fileTable.leftToRightSort(2); // lexical sort so can walk through below
+ tFileTable.leftToRightSort(2); // lexical sort so can walk through below
+ if (reallyVerbose)
+ String2.log("sortTime1=" + (System.currentTimeMillis() - elapsedTime) + "ms");
- //remove any files in fileTable not in tFileTable (i.e., the file was deleted)
- //I can step through fileTable and tFileTable since both sorted same way
+ // remove any files in fileTable not in tFileTable (i.e., the file was deleted)
+ // I can step through fileTable and tFileTable since both sorted same way
{
int nft = ftFileList.size();
- BitSet keepFTRow = new BitSet(nft); //all false
+ BitSet keepFTRow = new BitSet(nft); // all false
int nFilesMissing = 0;
int tPo = 0;
for (int ftPo = 0; ftPo < nft; ftPo++) {
- int dirI = ftDirIndex.get(ftPo);
- String fileS = ftFileList.get(ftPo);
+ int dirI = ftDirIndex.get(ftPo);
+ String fileS = ftFileList.get(ftPo);
- //skip through tDir until it is >= ftDir
+ // skip through tDir until it is >= ftDir
while (tPo < ntft && tFileDirIndexPA.get(tPo) < dirI)
tPo++;
- //if dirs match, skip through tFile until it is >= ftFile
+ // if dirs match, skip through tFile until it is >= ftFile
boolean keep;
- if (tPo < ntft && tFileDirIndexPA.get(tPo) == dirI) {
- while (tPo < ntft && tFileDirIndexPA.get(tPo) == dirI &&
- tFileNamePA.get(tPo).compareTo(fileS) < 0)
+ if (tPo < ntft && tFileDirIndexPA.get(tPo) == dirI) {
+ while (tPo < ntft && tFileDirIndexPA.get(tPo) == dirI &&
+ tFileNamePA.get(tPo).compareTo(fileS) < 0)
tPo++;
keep = tPo < ntft && tFileDirIndexPA.get(tPo) == dirI &&
- tFileNamePA.get(tPo).equals(fileS);
+ tFileNamePA.get(tPo).equals(fileS);
} else {
keep = false;
}
- //deal with keep
+ // deal with keep
if (keep)
keepFTRow.set(ftPo, true);
else {
nFilesMissing++;
- if (reallyVerbose)
- String2.log("previously valid file now missing: " +
- dirList.get(dirI) + fileS);
+ if (reallyVerbose)
+ String2.log("previously valid file now missing: " +
+ dirList.get(dirI) + fileS);
}
}
if (verbose)
- String2.log("old fileTable size=" + nft + " nFilesMissing=" + nFilesMissing);
+ String2.log("old fileTable size=" + nft + " nFilesMissing=" + nFilesMissing);
fileTable.justKeep(keepFTRow);
}
- //make the expected arrays based on info from a file
+ // make the expected arrays based on info from a file
makeExpected(tDataVariables, dirList, ftDirIndex, ftFileList, ftLastMod, ftSize);
-
- //update fileTable by processing tFileNamePA
- int fileListPo = 0; //next one to look at
- int tFileListPo = 0; //next one to look at
+ // update fileTable by processing tFileNamePA
+ int fileListPo = 0; // next one to look at
+ int tFileListPo = 0; // next one to look at
int nReadFile = 0, nNoLastMod = 0, nNoSize = 0;
long readFileCumTime = 0;
long removeCumTime = 0;
@@ -1422,73 +1606,74 @@ else if (extractPattern == null)
while (tFileListPo < tFileNamePA.size()) {
if (Thread.currentThread().isInterrupted())
throw new SimpleException("EDDTableFromFiles.init" +
- EDStatic.caughtInterruptedAr[0]);
-
- int tDirI = tFileDirIndexPA.get(tFileListPo);
- String tFileS = tFileNamePA.get(tFileListPo);
- int dirI = fileListPo < ftFileList.size()? ftDirIndex.get(fileListPo) : Integer.MAX_VALUE;
- String fileS = fileListPo < ftFileList.size()? ftFileList.get(fileListPo) : "\uFFFF";
- long lastMod = fileListPo < ftFileList.size()? ftLastMod.get(fileListPo) : Long.MAX_VALUE;
- long size = fileListPo < ftFileList.size()? ftSize.get(fileListPo) : Long.MAX_VALUE;
- boolean logThis = (reallyVerbose && tFileListPo <= 100) ||
- ((reallyVerbose || verbose) &&
- ((tFileListPo <= 1000 && tFileListPo % 100 == 0) ||
- (tFileListPo % 1000 == 0)));
+ EDStatic.caughtInterruptedAr[0]);
+
+ int tDirI = tFileDirIndexPA.get(tFileListPo);
+ String tFileS = tFileNamePA.get(tFileListPo);
+ int dirI = fileListPo < ftFileList.size() ? ftDirIndex.get(fileListPo) : Integer.MAX_VALUE;
+ String fileS = fileListPo < ftFileList.size() ? ftFileList.get(fileListPo) : "\uFFFF";
+ long lastMod = fileListPo < ftFileList.size() ? ftLastMod.get(fileListPo) : Long.MAX_VALUE;
+ long size = fileListPo < ftFileList.size() ? ftSize.get(fileListPo) : Long.MAX_VALUE;
+ boolean logThis = (reallyVerbose && tFileListPo <= 100) ||
+ ((reallyVerbose || verbose) &&
+ ((tFileListPo <= 1000 && tFileListPo % 100 == 0) ||
+ (tFileListPo % 1000 == 0)));
if (logThis)
String2.log("EDDTableFromFiles file #" + tFileListPo + "=" + dirList.get(tDirI) + tFileS);
- //is tLastMod available for tFile?
+ // is tLastMod available for tFile?
long tLastMod = tFileLastModPA.get(tFileListPo);
- if (tLastMod == 0 || tLastMod == Long.MAX_VALUE) { //0=trouble
+ if (tLastMod == 0 || tLastMod == Long.MAX_VALUE) { // 0=trouble
nNoLastMod++;
- String2.log(tFileListPo + " reject because unable to get lastMod time: " +
- dirList.get(tDirI) + tFileS);
+ String2.log(tFileListPo + " reject because unable to get lastMod time: " +
+ dirList.get(tDirI) + tFileS);
tFileListPo++;
addBadFile(badFileMap, tDirI, tFileS, tLastMod, "Unable to get lastMod time.");
continue;
}
- //is tSize available for tFile?
+ // is tSize available for tFile?
long tSize = tFileSizePA.get(tFileListPo);
- if (tSize < 0 || tSize == Long.MAX_VALUE) { //-1=trouble
+ if (tSize < 0 || tSize == Long.MAX_VALUE) { // -1=trouble
nNoSize++;
- String2.log(tFileListPo + " reject because unable to get size: " +
- dirList.get(tDirI) + tFileS);
+ String2.log(tFileListPo + " reject because unable to get size: " +
+ dirList.get(tDirI) + tFileS);
tFileListPo++;
addBadFile(badFileMap, tDirI, tFileS, tLastMod, "Unable to get size.");
continue;
}
- //is tFile in badFileMap?
+ // is tFile in badFileMap?
Object bfi = badFileMap.get(tDirI + "/" + tFileS);
if (bfi != null) {
- //tFile is in badFileMap
- Object bfia[] = (Object[])bfi;
- long bfLastMod = ((Long)bfia[0]).longValue();
+ // tFile is in badFileMap
+ Object bfia[] = (Object[]) bfi;
+ long bfLastMod = ((Long) bfia[0]).longValue();
if (bfLastMod == tLastMod) {
- //file hasn't been changed; it is still bad
+ // file hasn't been changed; it is still bad
tFileListPo++;
if (tDirI == dirI && tFileS.equals(fileS)) {
- //remove it from cached info (Yes, a file may be marked bad (recently) and so still be in cache)
+ // remove it from cached info (Yes, a file may be marked bad (recently) and so
+ // still be in cache)
nRemoved++;
removeCumTime -= System.currentTimeMillis();
fileTable.removeRow(fileListPo);
removeCumTime += System.currentTimeMillis();
}
- //go on to next tFile
+ // go on to next tFile
if (logThis)
String2.log(tFileListPo + " already in badFile list");
continue;
} else {
- //file has been changed since being marked as bad; remove from badFileMap
+ // file has been changed since being marked as bad; remove from badFileMap
badFileMap.remove(tDirI + "/" + tFileS);
- //and continue processing this file
+ // and continue processing this file
}
}
- //is tFile already in cache?
- if (tDirI == dirI && tFileS.equals(fileS) && tLastMod == lastMod &&
- (tSize == size || !filesAreLocal)) { //remote file's size may be approximate, e.g., 11K
+ // is tFile already in cache?
+ if (tDirI == dirI && tFileS.equals(fileS) && tLastMod == lastMod &&
+ (tSize == size || !filesAreLocal)) { // remote file's size may be approximate, e.g., 11K
if (logThis)
String2.log(tFileListPo + " already in fileList");
nUnchanged++;
@@ -1497,251 +1682,278 @@ else if (extractPattern == null)
continue;
}
- //file in cache no longer exists: remove from fileTable
+ // file in cache no longer exists: remove from fileTable
if (dirI < tDirI ||
- (dirI == tDirI && fileS.compareTo(tFileS) < 0)) {
+ (dirI == tDirI && fileS.compareTo(tFileS) < 0)) {
if (logThis)
String2.log(tFileListPo + " file no longer exists: remove from fileList: " +
- dirList.get(dirI) + fileS);
+ dirList.get(dirI) + fileS);
nRemoved++;
removeCumTime -= System.currentTimeMillis();
- fileTable.removeRow(fileListPo); //may be slow
+ fileTable.removeRow(fileListPo); // may be slow
removeCumTime += System.currentTimeMillis();
- //tFileListPo isn't incremented, so it will be considered again in next iteration
+ // tFileListPo isn't incremented, so it will be considered again in next
+ // iteration
continue;
}
- //tFile is new, or tFile is in ftFileList but time is different
+ // tFile is new, or tFile is in ftFileList but time is different
if (dirI == tDirI && fileS.equals(tFileS)) {
if (logThis)
- String2.log(tFileListPo +
- " already in fileList (but time changed)");
+ String2.log(tFileListPo +
+ " already in fileList (but time changed)");
nDifferentModTime++;
} else {
- //if new, add row to fileTable
+ // if new, add row to fileTable
if (logThis)
String2.log(tFileListPo + " insert in fileList");
nNew++;
- fileTable.insertBlankRow(fileListPo); //may be slow
+ fileTable.insertBlankRow(fileListPo); // may be slow
}
- //gather file's info
+ // gather file's info
try {
- //read all of the data and metadata in the file
+ // read all of the data and metadata in the file
nReadFile++;
long rfcTime = System.currentTimeMillis();
- Table tTable = getSourceDataFromFile(dirList.get(tDirI), tFileS,
- sourceDataNames, sourceDataTypes,
- -1, Double.NaN, Double.NaN,
- null, null, null, true, true); //getMetadata, mustGetData
- //String2.log(">> getSourceDataFromFile " + tFileS + "\n" + tTable.toString(5));
+ Table tTable = getSourceDataFromFile(dirList.get(tDirI), tFileS,
+ sourceDataNames, sourceDataTypes,
+ -1, Double.NaN, Double.NaN,
+ null, null, null, true, true); // getMetadata, mustGetData
+ // String2.log(">> getSourceDataFromFile " + tFileS + "\n" +
+ // tTable.toString(5));
readFileCumTime += System.currentTimeMillis() - rfcTime;
- //set the values on the fileTable row throws throwable
- setFileTableRow(fileTable, fileListPo, tDirI, tFileS, tLastMod, tSize,
- tTable, logThis? tFileListPo : -1);
+ // set the values on the fileTable row throws throwable
+ setFileTableRow(fileTable, fileListPo, tDirI, tFileS, tLastMod, tSize,
+ tTable, logThis ? tFileListPo : -1);
tFileListPo++;
fileListPo++;
} catch (Throwable t) {
- String fullName = dirList.get(tDirI) + tFileS;
- msg = tFileListPo + " bad file: removing fileTable row for " +
- fullName + "\n" +
- MustBe.throwableToString(t);
- String2.log(msg);
+ String fullName = dirList.get(tDirI) + tFileS;
+ msg = tFileListPo + " bad file: removing fileTable row for " +
+ fullName + "\n" +
+ MustBe.throwableToString(t);
+ String2.log(msg);
if (Thread.currentThread().isInterrupted() ||
- t instanceof InterruptedException ||
- msg.indexOf(Math2.TooManyOpenFiles) >= 0)
- throw t; //stop loading this dataset
+ t instanceof InterruptedException ||
+ msg.indexOf(Math2.TooManyOpenFiles) >= 0)
+ throw t; // stop loading this dataset
nRemoved++;
removeCumTime -= System.currentTimeMillis();
fileTable.removeRow(fileListPo);
removeCumTime += System.currentTimeMillis();
tFileListPo++;
if (System.currentTimeMillis() - tLastMod > 30 * Calendar2.MILLIS_PER_MINUTE &&
- !(t instanceof TimeoutException &&
- !(t instanceof FileNotFoundException)) //occurs when a RAID unmounts itself. If really gone, removing from file list is enough.
- //??? This assumes any memory problem is permanent
- )
- //>30 minutes old, so not still being ftp'd, so add to badFileMap
+ !(t instanceof TimeoutException &&
+ !(t instanceof FileNotFoundException)) // occurs when a RAID unmounts itself. If
+ // really gone, removing from file list is
+ // enough.
+ // ??? This assumes any memory problem is permanent
+ )
+ // >30 minutes old, so not still being ftp'd, so add to badFileMap
addBadFile(badFileMap, tDirI, tFileS, tLastMod, MustBe.throwableToShortString(t));
msg = "";
}
}
- if (verbose) String2.log("fileTable updated; time=" +
- (System.currentTimeMillis() - elapsedTime) + "ms");
- Test.ensureTrue(fileTable.nRows() > 0,
- "No valid data files were found. See log.txt for details.");
+ if (verbose)
+ String2.log("fileTable updated; time=" +
+ (System.currentTimeMillis() - elapsedTime) + "ms");
+ Test.ensureTrue(fileTable.nRows() > 0,
+ "No valid data files were found. See log.txt for details.");
- //sort fileTable by sortFilesBySourceNames
+ // sort fileTable by sortFilesBySourceNames
if (String2.isSomething(tSortFilesBySourceNames)) {
- String sortBy[] = tSortFilesBySourceNames.indexOf(',') >= 0?
- StringArray.arrayFromCSV(tSortFilesBySourceNames) :
- StringArray.wordsAndQuotedPhrases(tSortFilesBySourceNames).toArray();
+ String sortBy[] = tSortFilesBySourceNames.indexOf(',') >= 0
+ ? StringArray.arrayFromCSV(tSortFilesBySourceNames)
+ : StringArray.wordsAndQuotedPhrases(tSortFilesBySourceNames).toArray();
IntArray sortColumns = new IntArray();
for (int i = 0; i < sortBy.length; i++) {
if (sortBy[i].length() == 0)
continue;
int dv = sourceDataNames.indexOf(sortBy[i]);
- if (dv < 0)
- throw new RuntimeException("Unknown name#" +
- i + "=\"" + sortBy[i] +
- "\"\nsourceDataNames=" + sourceDataNames.toString());
- sortColumns.add(dv0 + dv*3 + 0); //the dataVariable's min value
+ if (dv < 0)
+ throw new RuntimeException("Unknown name#" +
+ i + "=\"" + sortBy[i] +
+ "\"\nsourceDataNames=" + sourceDataNames.toString());
+ sortColumns.add(dv0 + dv * 3 + 0); // the dataVariable's min value
}
if (sortColumns.size() > 0) {
- //String2.log("first 10 rows of fileTable before sortFilesBySourceNames:\n" +
- // fileTable.toString("row", 10));
+ // String2.log("first 10 rows of fileTable before sortFilesBySourceNames:\n" +
+ // fileTable.toString("row", 10));
fileTableSortColumns = sortColumns.toArray();
fileTableSortAscending = new boolean[sortColumns.size()];
Arrays.fill(fileTableSortAscending, true);
elapsedTime = System.currentTimeMillis();
- fileTable.sort(fileTableSortColumns, fileTableSortAscending);
- if (debugMode)
- String2.log("time to sort fileTable by = " +
- (System.currentTimeMillis() - elapsedTime) + "ms");
+ fileTable.sort(fileTableSortColumns, fileTableSortAscending);
+ if (debugMode)
+ String2.log("time to sort fileTable by = " +
+ (System.currentTimeMillis() - elapsedTime) + "ms");
}
}
- if (reallyVerbose) String2.log("fileTable.nRows=" + fileTable.nRows() +
- ". The first few rows are:\n" + fileTable.toString(debugMode? 100 : 10));
-
- msg = "\n tFileNamePA.size()=" + tFileNamePA.size() +
- "\n dirTable.nRows()=" + dirTable.nRows() +
- "\n fileTable.nRows()=" + fileTable.nRows() +
- "\n fileTableInMemory=" + fileTableInMemory +
- "\n nUnchanged=" + nUnchanged +
- "\n nRemoved=" + nRemoved + " (nNoLastMod=" + nNoLastMod +
- ", nNoSize=" + nNoSize + ")" +
- "\n nReadFile=" + nReadFile +
- " (nDifferentModTime=" + nDifferentModTime + " nNew=" + nNew + ")" +
- " readFileCumTime=" + Calendar2.elapsedTimeString(readFileCumTime) +
- " avg=" + (readFileCumTime / Math.max(1,nReadFile)) + "ms";
- if (verbose || fileTable.nRows() == 0)
+ if (reallyVerbose)
+ String2.log("fileTable.nRows=" + fileTable.nRows() +
+ ". The first few rows are:\n" + fileTable.toString(debugMode ? 100 : 10));
+
+ msg = "\n tFileNamePA.size()=" + tFileNamePA.size() +
+ "\n dirTable.nRows()=" + dirTable.nRows() +
+ "\n fileTable.nRows()=" + fileTable.nRows() +
+ "\n fileTableInMemory=" + fileTableInMemory +
+ "\n nUnchanged=" + nUnchanged +
+ "\n nRemoved=" + nRemoved + " (nNoLastMod=" + nNoLastMod +
+ ", nNoSize=" + nNoSize + ")" +
+ "\n nReadFile=" + nReadFile +
+ " (nDifferentModTime=" + nDifferentModTime + " nNew=" + nNew + ")" +
+ " readFileCumTime=" + Calendar2.elapsedTimeString(readFileCumTime) +
+ " avg=" + (readFileCumTime / Math.max(1, nReadFile)) + "ms";
+ if (verbose || fileTable.nRows() == 0)
String2.log(msg);
if (fileTable.nRows() == 0)
throw new RuntimeException("No valid files!");
- if (nReadFile > 0 || nRemoved > 0)
- filesChanged =
- "The list of aggregated files changed:\n" +
- " The number of new or changed data files that were read: " + nReadFile + ".\n" +
- " The number of files that were removed from the file list: " + nRemoved + ".\n" +
- " The total number of good files is now " + tFileNamePA.size() + ".\n";
+ if (nReadFile > 0 || nRemoved > 0)
+ filesChanged = "The list of aggregated files changed:\n" +
+ " The number of new or changed data files that were read: " + nReadFile + ".\n" +
+ " The number of files that were removed from the file list: " + nRemoved + ".\n" +
+ " The total number of good files is now " + tFileNamePA.size() + ".\n";
- //end !doQuickRestart
+ // end !doQuickRestart
}
- //if (debugMode) String2.log(">> EDDTableFromFiles " + Calendar2.getCurrentISODateTimeStringLocalTZ() + " finished file loop");
+ // if (debugMode) String2.log(">> EDDTableFromFiles " +
+ // Calendar2.getCurrentISODateTimeStringLocalTZ() + " finished file loop");
- //make combined minMaxTable one col per dv; row0=min, row1=max, row2=hasNaN
- //it holds raw source values -- scale_factor and add_offset haven't been applied
+ // make combined minMaxTable one col per dv; row0=min, row1=max, row2=hasNaN
+ // it holds raw source values -- scale_factor and add_offset haven't been
+ // applied
Table tMinMaxTable = makeMinMaxTable(dirList, fileTable);
- //if (debugMode) String2.log(">> EDDTableFromFiles " + Calendar2.getCurrentISODateTimeStringLocalTZ() + " finished makeMinMaxTable");
-
- //if !quickRestart, save dirTable, fileTable, badFileMap
- if (!doQuickRestart)
- saveDirTableFileTableBadFiles(standardizeWhat, dirTable, fileTable, badFileMap); //throws Throwable
- //then make related changes as quickly/atomically as possible
- minMaxTable = tMinMaxTable; //swap into place quickly
-
- //set creationTimeMillis to fileTable lastModified
- //(either very recent or (if quickRestart) from previous full restart)
+ // if (debugMode) String2.log(">> EDDTableFromFiles " +
+ // Calendar2.getCurrentISODateTimeStringLocalTZ() + " finished
+ // makeMinMaxTable");
+
+ // if !quickRestart, save dirTable, fileTable, badFileMap
+ if (!doQuickRestart)
+ saveDirTableFileTableBadFiles(standardizeWhat, dirTable, fileTable, badFileMap); // throws Throwable
+ // then make related changes as quickly/atomically as possible
+ minMaxTable = tMinMaxTable; // swap into place quickly
+
+ // set creationTimeMillis to fileTable lastModified
+ // (either very recent or (if quickRestart) from previous full restart)
creationTimeMillis = File2.getLastModified(datasetDir() + FILE_TABLE_FILENAME);
- //send email with bad file info
+ // send email with bad file info
if (!badFileMap.isEmpty()) {
StringBuilder emailSB = new StringBuilder();
emailSB.append(badFileMapToString(badFileMap, dirList));
emailSB.append(msg + "\n\n");
- EDStatic.email(EDStatic.emailEverythingToCsv, errorInMethod + "Bad Files",
- emailSB.toString());
+ EDStatic.email(EDStatic.emailEverythingToCsv, errorInMethod + "Bad Files",
+ emailSB.toString());
}
- //if (debugMode) String2.log(">> EDDTableFromFiles " + Calendar2.getCurrentISODateTimeStringLocalTZ() + " finished sending email with bad file info");
+ // if (debugMode) String2.log(">> EDDTableFromFiles " +
+ // Calendar2.getCurrentISODateTimeStringLocalTZ() + " finished sending email
+ // with bad file info");
- //try to open metadataFrom FIRST|LAST file (based on lastModifiedTime) to get source metadata
+ // try to open metadataFrom FIRST|LAST file (based on lastModifiedTime) to get
+ // source metadata
int nMinMaxIndex[] = ftLastMod.getNMinMaxIndex();
- int tFileI = metadataFrom.equals(MF_FIRST)? nMinMaxIndex[1] : nMinMaxIndex[2];
- String mdFromDir = dirList.get(ftDirIndex.get(tFileI));
+ int tFileI = metadataFrom.equals(MF_FIRST) ? nMinMaxIndex[1] : nMinMaxIndex[2];
+ String mdFromDir = dirList.get(ftDirIndex.get(tFileI));
String mdFromName = ftFileList.get(tFileI);
- if (verbose) String2.log("getting dataset metadata from " + mdFromDir + mdFromName +
- "\n ftLastMod" +
- " first=" + Calendar2.millisToIsoStringTZ(ftLastMod.get(nMinMaxIndex[1])) +
- " last=" + Calendar2.millisToIsoStringTZ(ftLastMod.get(nMinMaxIndex[2])));
+ if (verbose)
+ String2.log("getting dataset metadata from " + mdFromDir + mdFromName +
+ "\n ftLastMod" +
+ " first=" + Calendar2.millisToIsoStringTZ(ftLastMod.get(nMinMaxIndex[1])) +
+ " last=" + Calendar2.millisToIsoStringTZ(ftLastMod.get(nMinMaxIndex[2])));
Table tTable = getSourceDataFromFile(mdFromDir, mdFromName,
- sourceDataNames, sourceDataTypes, -1, Double.NaN, Double.NaN,
- null, null, null, true, false); //getMetadata, mustGetData //throws Exception if trouble
- //if (debugMode) String2.log(">> EDDTableFromFiles " + Calendar2.getCurrentISODateTimeStringLocalTZ() + " finished open metadataFrom FIRST|LAST");
- //String2.log(">> EDDTableFromFiles " + Calendar2.getCurrentISODateTimeStringLocalTZ() + " get source metadata table header (nCols=" +
- // tTable.nColumns() + " nRows=" + tTable.nRows() + "):\n" + tTable.getNCHeader("row"));
-
- //if accessibleViaFiles=true and filesInS3Bucket, test if files are in a private bucket
- //and thus /files/ access must be handles by ERDDAP acting as go between
- //(not just redirect, which works for public bucket)
+ sourceDataNames, sourceDataTypes, -1, Double.NaN, Double.NaN,
+ null, null, null, true, false); // getMetadata, mustGetData //throws Exception if trouble
+ // if (debugMode) String2.log(">> EDDTableFromFiles " +
+ // Calendar2.getCurrentISODateTimeStringLocalTZ() + " finished open metadataFrom
+ // FIRST|LAST");
+ // String2.log(">> EDDTableFromFiles " +
+ // Calendar2.getCurrentISODateTimeStringLocalTZ() + " get source metadata table
+ // header (nCols=" +
+ // tTable.nColumns() + " nRows=" + tTable.nRows() + "):\n" +
+ // tTable.getNCHeader("row"));
+
+ // if accessibleViaFiles=true and filesInS3Bucket, test if files are in a
+ // private bucket
+ // and thus /files/ access must be handles by ERDDAP acting as go between
+ // (not just redirect, which works for public bucket)
filesInS3Bucket = String2.isAwsS3Url(mdFromDir);
if (accessibleViaFiles && filesInS3Bucket) {
filesInPrivateS3Bucket = SSR.awsS3FileIsPrivate(mdFromDir + mdFromName);
- if (verbose) String2.log(" For datasetID=" + datasetID + ", filesInPrivateS3Bucket=" + filesInPrivateS3Bucket);
+ if (verbose)
+ String2.log(" For datasetID=" + datasetID + ", filesInPrivateS3Bucket=" + filesInPrivateS3Bucket);
}
- //remove e.g., global geospatial_lon_min and column actual_max, actual_min,
- // actual_range, data_min, data_max
+ // remove e.g., global geospatial_lon_min and column actual_max, actual_min,
+ // actual_range, data_min, data_max
tTable.unsetActualRangeAndBoundingBox();
sourceGlobalAttributes = tTable.globalAttributes();
- //make combinedGlobalAttributes
- combinedGlobalAttributes = new Attributes(addGlobalAttributes, sourceGlobalAttributes); //order is important
+ // make combinedGlobalAttributes
+ combinedGlobalAttributes = new Attributes(addGlobalAttributes, sourceGlobalAttributes); // order is important
String tLicense = combinedGlobalAttributes.getString("license");
if (tLicense != null)
- combinedGlobalAttributes.set("license",
- String2.replaceAll(tLicense, "[standard]", EDStatic.standardLicense));
+ combinedGlobalAttributes.set("license",
+ String2.replaceAll(tLicense, "[standard]", EDStatic.standardLicense));
combinedGlobalAttributes.removeValue("\"null\"");
- //if (debugMode) String2.log(">> EDDTableFromFiles " + Calendar2.getCurrentISODateTimeStringLocalTZ() + " finished making combineGlobalAtts");
+ // if (debugMode) String2.log(">> EDDTableFromFiles " +
+ // Calendar2.getCurrentISODateTimeStringLocalTZ() + " finished making
+ // combineGlobalAtts");
- //make the dataVariables[]
+ // make the dataVariables[]
dataVariables = new EDV[ndv];
for (int dv = 0; dv < ndv; dv++) {
String tSourceName = sourceDataNames.get(dv);
- String tDestName = (String)tDataVariables[dv][1];
+ String tDestName = (String) tDataVariables[dv][1];
if (tDestName == null || tDestName.trim().length() == 0)
tDestName = tSourceName;
- int tableDv = tTable.findColumnNumber(tSourceName);
+ int tableDv = tTable.findColumnNumber(tSourceName);
if (reallyVerbose && dv != extractedColNameIndex && tableDv < 0)
- String2.log("NOTE: " + tSourceName + " not found in metadataFrom=" + metadataFrom +
- " colNames=" + tTable.getColumnNamesCSVString());
- Attributes tSourceAtt = tableDv < 0? new Attributes() : tTable.columnAttributes(tableDv);
- Attributes tAddAtt = (Attributes)tDataVariables[dv][2];
- //PrimitiveArray taa = tAddAtt.get("_FillValue");
- //String2.log(">>taa " + tSourceName + " _FillValue=" + taa);
- //dMin and dMax are raw source values -- scale_factor and add_offset haven't been applied
+ String2.log("NOTE: " + tSourceName + " not found in metadataFrom=" + metadataFrom +
+ " colNames=" + tTable.getColumnNamesCSVString());
+ Attributes tSourceAtt = tableDv < 0 ? new Attributes() : tTable.columnAttributes(tableDv);
+ Attributes tAddAtt = (Attributes) tDataVariables[dv][2];
+ // PrimitiveArray taa = tAddAtt.get("_FillValue");
+ // String2.log(">>taa " + tSourceName + " _FillValue=" + taa);
+ // dMin and dMax are raw source values -- scale_factor and add_offset haven't
+ // been applied
String tSourceType = sourceDataTypes[dv];
- PAOne tMin = tSourceType.equals("String")? PAOne.fromString("") : minMaxTable.getPAOneData(dv, 0);
- PAOne tMax = tSourceType.equals("String")? PAOne.fromString("") : minMaxTable.getPAOneData(dv, 1);
- //String2.log(">> tSourceName=" + tSourceName + " sMin=" + sMin + " sMax=" + sMax + " paMinest=" + minMaxTable.getColumn(dv).MINEST_VALUE() + " paMaxest=" + minMaxTable.getColumn(dv).MAXEST_VALUE());
+ PAOne tMin = tSourceType.equals("String") ? PAOne.fromString("") : minMaxTable.getPAOneData(dv, 0);
+ PAOne tMax = tSourceType.equals("String") ? PAOne.fromString("") : minMaxTable.getPAOneData(dv, 1);
+ // String2.log(">> tSourceName=" + tSourceName + " sMin=" + sMin + " sMax=" +
+ // sMax + " paMinest=" + minMaxTable.getColumn(dv).MINEST_VALUE() + " paMaxest="
+ // + minMaxTable.getColumn(dv).MAXEST_VALUE());
- if (reallyVerbose) String2.log(" dv=" + dv + " sourceName=" + tSourceName + " sourceType=" + tSourceType + " tMin=" + tMin + " tMax=" + tMax);
+ if (reallyVerbose)
+ String2.log(" dv=" + dv + " sourceName=" + tSourceName + " sourceType=" + tSourceType + " tMin=" + tMin
+ + " tMax=" + tMax);
if (EDV.LON_NAME.equals(tDestName)) {
dataVariables[dv] = new EDVLon(datasetID, tSourceName,
- tSourceAtt, tAddAtt,
- tSourceType, tMin, tMax);
+ tSourceAtt, tAddAtt,
+ tSourceType, tMin, tMax);
lonIndex = dv;
} else if (EDV.LAT_NAME.equals(tDestName)) {
dataVariables[dv] = new EDVLat(datasetID, tSourceName,
- tSourceAtt, tAddAtt,
- tSourceType, tMin, tMax);
+ tSourceAtt, tAddAtt,
+ tSourceType, tMin, tMax);
latIndex = dv;
} else if (EDV.ALT_NAME.equals(tDestName)) {
dataVariables[dv] = new EDVAlt(datasetID, tSourceName,
- tSourceAtt, tAddAtt,
- tSourceType, tMin, tMax);
+ tSourceAtt, tAddAtt,
+ tSourceType, tMin, tMax);
altIndex = dv;
} else if (EDV.DEPTH_NAME.equals(tDestName)) {
dataVariables[dv] = new EDVDepth(datasetID, tSourceName,
- tSourceAtt, tAddAtt,
- tSourceType, tMin, tMax);
+ tSourceAtt, tAddAtt,
+ tSourceType, tMin, tMax);
depthIndex = dv;
} else if (EDVTimeStamp.hasTimeUnits(tSourceAtt, tAddAtt)) {
- //for ISO strings and numeric source values:
+ // for ISO strings and numeric source values:
if (tAddAtt == null)
tAddAtt = new Attributes();
String tUnits = tAddAtt.getString("units");
@@ -1749,113 +1961,127 @@ else if (extractPattern == null)
tUnits = tSourceAtt.getString("units");
if (tUnits == null)
tUnits = "";
- //String2.log(">> timestamp minMaxTable min=" + minMaxTable.getStringData(dv, 0));
- if (tSourceType.equals("String") && //iso string times sort correctly
- (tUnits.toLowerCase().startsWith("yyyy") ||
- tUnits.toLowerCase().startsWith("uuuu"))) { //probably sorted correctly
+ // String2.log(">> timestamp minMaxTable min=" + minMaxTable.getStringData(dv,
+ // 0));
+ if (tSourceType.equals("String") && // iso string times sort correctly
+ (tUnits.toLowerCase().startsWith("yyyy") ||
+ tUnits.toLowerCase().startsWith("uuuu"))) { // probably sorted correctly
StringArray actualRange = new StringArray();
actualRange.add(minMaxTable.getStringData(dv, 0));
actualRange.add(minMaxTable.getStringData(dv, 1));
tAddAtt.set("actual_range", actualRange);
- //String2.log(">> timestamp actual_range=" + actualRange);
- } else if (!tSourceType.equals("String")) { //numeric times sort correctly
+ // String2.log(">> timestamp actual_range=" + actualRange);
+ } else if (!tSourceType.equals("String")) { // numeric times sort correctly
PrimitiveArray actualRange = PrimitiveArray.factory(
- PAType.fromCohortString(sourceDataTypes[dv]), 2, false);
+ PAType.fromCohortString(sourceDataTypes[dv]), 2, false);
actualRange.addPAOne(minMaxTable.getPAOneData(dv, 0));
actualRange.addPAOne(minMaxTable.getPAOneData(dv, 1));
tAddAtt.set("actual_range", actualRange);
- //String2.log(">> timestamp actual_range=" + actualRange);
+ // String2.log(">> timestamp actual_range=" + actualRange);
}
if (EDV.TIME_NAME.equals(tDestName)) {
- //it's the time variable
+ // it's the time variable
dataVariables[dv] = new EDVTime(datasetID, tSourceName,
- tSourceAtt, tAddAtt,
- tSourceType); //this constructor gets source / sets destination actual_range
+ tSourceAtt, tAddAtt,
+ tSourceType); // this constructor gets source / sets destination actual_range
timeIndex = dv;
- } else {
- //it's a timeStamp variable
- dataVariables[dv] = new EDVTimeStamp(datasetID, tSourceName, tDestName,
- tSourceAtt, tAddAtt,
- tSourceType); //this constructor gets source / sets destination actual_range
+ } else {
+ // it's a timeStamp variable
+ dataVariables[dv] = new EDVTimeStamp(datasetID, tSourceName, tDestName,
+ tSourceAtt, tAddAtt,
+ tSourceType); // this constructor gets source / sets destination actual_range
}
} else {
- dataVariables[dv] = new EDV(datasetID, tSourceName, tDestName,
- tSourceAtt, tAddAtt, tSourceType, tMin, tMax);
+ dataVariables[dv] = new EDV(datasetID, tSourceName, tDestName,
+ tSourceAtt, tAddAtt, tSourceType, tMin, tMax);
dataVariables[dv].setActualRangeFromDestinationMinMax();
}
- //String2.pressEnterToContinue("!!!sourceName=" + dataVariables[dv].sourceName() +
- // " type=" + dataVariables[dv].sourceDataType() + " min=" + dataVariables[dv].destinationMinDouble());
+ // String2.pressEnterToContinue("!!!sourceName=" +
+ // dataVariables[dv].sourceName() +
+ // " type=" + dataVariables[dv].sourceDataType() + " min=" +
+ // dataVariables[dv].destinationMinDouble());
}
- //if (debugMode) String2.log(">> EDDTableFromFiles " + Calendar2.getCurrentISODateTimeStringLocalTZ() + " finished making variables");
+ // if (debugMode) String2.log(">> EDDTableFromFiles " +
+ // Calendar2.getCurrentISODateTimeStringLocalTZ() + " finished making
+ // variables");
- //more class-specific things (after variables have been created)
+ // more class-specific things (after variables have been created)
if (className.equals("EDDTableFromHttpGet")) {
setHttpGetRequiredVariableTypes();
}
- //Try to gather information to serve this dataset via ERDDAP's SOS server.
- //This has an advantage over the generic gathering of SOS data:
- // if it works, it can determine the min/max lon/lat/time of each station.
- //This can only deal with: each file has info for only one e.g., station,
- // but there may be several files for same station.
- //If this fails, makeAccessibleViaSOS() may still work.
- if (preliminaryAccessibleViaSOS().length() == 0) {
-
- EDV lonVar = dataVariables[lonIndex];
- EDV latVar = dataVariables[latIndex];
- EDV timeVar = dataVariables[timeIndex];
+ // Try to gather information to serve this dataset via ERDDAP's SOS server.
+ // This has an advantage over the generic gathering of SOS data:
+ // if it works, it can determine the min/max lon/lat/time of each station.
+ // This can only deal with: each file has info for only one e.g., station,
+ // but there may be several files for same station.
+ // If this fails, makeAccessibleViaSOS() may still work.
+ if (preliminaryAccessibleViaSOS().length() == 0) {
+
+ EDV lonVar = dataVariables[lonIndex];
+ EDV latVar = dataVariables[latIndex];
+ EDV timeVar = dataVariables[timeIndex];
EDV offeringVar = dataVariables[sosOfferingIndex];
- //Get convenient access to fileTable data min,max,hasNaN
- // (but converted to destination values).
- //clone() to ensure original fileTable values are changed.
- PrimitiveArray fFileName = fileTable.getColumn("fileName");
- PrimitiveArray fLonMin = lonVar.toDestination( (PrimitiveArray)fileTable.getColumn(dv0 + lonIndex*3 + 0).clone());
- PrimitiveArray fLonMax = lonVar.toDestination( (PrimitiveArray)fileTable.getColumn(dv0 + lonIndex*3 + 1).clone());
- PrimitiveArray fLonNan = fileTable.getColumn(dv0 + lonIndex*3 + 2);
- PrimitiveArray fLatMin = latVar.toDestination( (PrimitiveArray)fileTable.getColumn(dv0 + latIndex*3 + 0).clone());
- PrimitiveArray fLatMax = latVar.toDestination( (PrimitiveArray)fileTable.getColumn(dv0 + latIndex*3 + 1).clone());
- PrimitiveArray fLatNan = fileTable.getColumn(dv0 + latIndex*3 + 2);
- PrimitiveArray fTimeMin = timeVar.toDestination( (PrimitiveArray)fileTable.getColumn(dv0 + timeIndex*3 + 0).clone());
- PrimitiveArray fTimeMax = timeVar.toDestination( (PrimitiveArray)fileTable.getColumn(dv0 + timeIndex*3 + 1).clone());
- PrimitiveArray fTimeNan = fileTable.getColumn(dv0 + timeIndex*3 + 2);
- PrimitiveArray fOfferingMin = offeringVar.toDestination((PrimitiveArray)fileTable.getColumn(dv0 + sosOfferingIndex*3 + 0).clone());
- PrimitiveArray fOfferingMax = offeringVar.toDestination((PrimitiveArray)fileTable.getColumn(dv0 + sosOfferingIndex*3 + 1).clone());
- PrimitiveArray fOfferingNan = fileTable.getColumn(dv0 + sosOfferingIndex*3 + 2);
-
- //make the sos PAs to hold destination values
- sosMinLon = PrimitiveArray.factory(lonVar.destinationDataPAType(), 8, false);
- sosMaxLon = PrimitiveArray.factory(lonVar.destinationDataPAType(), 8, false);
- sosMinLat = PrimitiveArray.factory(latVar.destinationDataPAType(), 8, false);
- sosMaxLat = PrimitiveArray.factory(latVar.destinationDataPAType(), 8, false);
- sosMinTime = PrimitiveArray.factory(timeVar.destinationDataPAType(), 8, false);
- sosMaxTime = PrimitiveArray.factory(timeVar.destinationDataPAType(), 8, false);
+ // Get convenient access to fileTable data min,max,hasNaN
+ // (but converted to destination values).
+ // clone() to ensure original fileTable values are changed.
+ PrimitiveArray fFileName = fileTable.getColumn("fileName");
+ PrimitiveArray fLonMin = lonVar
+ .toDestination((PrimitiveArray) fileTable.getColumn(dv0 + lonIndex * 3 + 0).clone());
+ PrimitiveArray fLonMax = lonVar
+ .toDestination((PrimitiveArray) fileTable.getColumn(dv0 + lonIndex * 3 + 1).clone());
+ PrimitiveArray fLonNan = fileTable.getColumn(dv0 + lonIndex * 3 + 2);
+ PrimitiveArray fLatMin = latVar
+ .toDestination((PrimitiveArray) fileTable.getColumn(dv0 + latIndex * 3 + 0).clone());
+ PrimitiveArray fLatMax = latVar
+ .toDestination((PrimitiveArray) fileTable.getColumn(dv0 + latIndex * 3 + 1).clone());
+ PrimitiveArray fLatNan = fileTable.getColumn(dv0 + latIndex * 3 + 2);
+ PrimitiveArray fTimeMin = timeVar
+ .toDestination((PrimitiveArray) fileTable.getColumn(dv0 + timeIndex * 3 + 0).clone());
+ PrimitiveArray fTimeMax = timeVar
+ .toDestination((PrimitiveArray) fileTable.getColumn(dv0 + timeIndex * 3 + 1).clone());
+ PrimitiveArray fTimeNan = fileTable.getColumn(dv0 + timeIndex * 3 + 2);
+ PrimitiveArray fOfferingMin = offeringVar
+ .toDestination((PrimitiveArray) fileTable.getColumn(dv0 + sosOfferingIndex * 3 + 0).clone());
+ PrimitiveArray fOfferingMax = offeringVar
+ .toDestination((PrimitiveArray) fileTable.getColumn(dv0 + sosOfferingIndex * 3 + 1).clone());
+ PrimitiveArray fOfferingNan = fileTable.getColumn(dv0 + sosOfferingIndex * 3 + 2);
+
+ // make the sos PAs to hold destination values
+ sosMinLon = PrimitiveArray.factory(lonVar.destinationDataPAType(), 8, false);
+ sosMaxLon = PrimitiveArray.factory(lonVar.destinationDataPAType(), 8, false);
+ sosMinLat = PrimitiveArray.factory(latVar.destinationDataPAType(), 8, false);
+ sosMaxLat = PrimitiveArray.factory(latVar.destinationDataPAType(), 8, false);
+ sosMinTime = PrimitiveArray.factory(timeVar.destinationDataPAType(), 8, false);
+ sosMaxTime = PrimitiveArray.factory(timeVar.destinationDataPAType(), 8, false);
sosOfferings = new StringArray();
-
- //Collect info until a file doesn't meet requirements or all files do meet requirements.
- //Do all files contain just one value of sosOfferingIndex (e.g., 1 station)?
- //If so, easy to find min/max lon/lat/time for each station.
+
+ // Collect info until a file doesn't meet requirements or all files do meet
+ // requirements.
+ // Do all files contain just one value of sosOfferingIndex (e.g., 1 station)?
+ // If so, easy to find min/max lon/lat/time for each station.
int tnFiles = fLonMin.size();
- HashMap offeringIndexHM = new HashMap(); //key=offering value=Integer.valueOf(SosXxx index)
+ HashMap offeringIndexHM = new HashMap(); // key=offering value=Integer.valueOf(SosXxx index)
for (int f = 0; f < tnFiles; f++) {
- String offMin = fOfferingMin.getString(f);
- String offMax = fOfferingMax.getString(f);
- boolean offNaN = fOfferingNan.getInt(f) == 1; //hasNaN? 1=true 0=false
+ String offMin = fOfferingMin.getString(f);
+ String offMax = fOfferingMax.getString(f);
+ boolean offNaN = fOfferingNan.getInt(f) == 1; // hasNaN? 1=true 0=false
- //if offerings in this file are all "" or null, ignore it
- if (offNaN &&
- (offMin == null || offMin.length() == 0) &&
- (offMax == null || offMax.length() == 0)) {
+ // if offerings in this file are all "" or null, ignore it
+ if (offNaN &&
+ (offMin == null || offMin.length() == 0) &&
+ (offMax == null || offMax.length() == 0)) {
- //if just one offering in file (no mv), add data to sos arrays
+ // if just one offering in file (no mv), add data to sos arrays
} else if (!offNaN && offMin.equals(offMax)) {
- //find sos PA index
- Integer soI = (Integer)offeringIndexHM.get(offMin);
+ // find sos PA index
+ Integer soI = (Integer) offeringIndexHM.get(offMin);
if (soI == null) {
- //it's a new offering. add it.
+ // it's a new offering. add it.
soI = Integer.valueOf(sosOfferings.size());
offeringIndexHM.put(offMin, soI);
sosMinLon.addFromPA(fLonMin, f);
@@ -1867,173 +2093,192 @@ else if (extractPattern == null)
sosOfferings.addString(offMin);
} else {
- //a previous file had the same offering, so update its info in sos... PA
- //store the min min and the max max.
+ // a previous file had the same offering, so update its info in sos... PA
+ // store the min min and the max max.
int soi = soI.intValue();
- sosMinLon.setDouble( soi, Math2.finiteMin(sosMinLon.getDouble( soi), fLonMin.getDouble(f)));
- sosMaxLon.setDouble( soi, Math2.finiteMax(sosMaxLon.getDouble( soi), fLonMax.getDouble(f)));
- sosMinLat.setDouble( soi, Math2.finiteMin(sosMinLat.getDouble( soi), fLatMin.getDouble(f)));
- sosMaxLat.setDouble( soi, Math2.finiteMax(sosMaxLat.getDouble( soi), fLatMax.getDouble(f)));
+ sosMinLon.setDouble(soi, Math2.finiteMin(sosMinLon.getDouble(soi), fLonMin.getDouble(f)));
+ sosMaxLon.setDouble(soi, Math2.finiteMax(sosMaxLon.getDouble(soi), fLonMax.getDouble(f)));
+ sosMinLat.setDouble(soi, Math2.finiteMin(sosMinLat.getDouble(soi), fLatMin.getDouble(f)));
+ sosMaxLat.setDouble(soi, Math2.finiteMax(sosMaxLat.getDouble(soi), fLatMax.getDouble(f)));
sosMinTime.setDouble(soi, Math2.finiteMin(sosMinTime.getDouble(soi), fTimeMin.getDouble(f)));
sosMaxTime.setDouble(soi, Math2.finiteMax(sosMaxTime.getDouble(soi), fTimeMax.getDouble(f)));
- //sosOfferings is already correct
- }
+ // sosOfferings is already correct
+ }
} else {
- //else trouble: more than one offering per file or contaminated with offering=mv.
- //Abandon this approach.
- //accessibleViaSOS = ""; //???set this?
- if (verbose) String2.log(
- "EDDTableFromFiles can't gather sosOffering min/max for datasetID=" + datasetID +
- "\nfrom fileTable because fileName=" + fFileName.getString(f) +
- " has >1 offering: min=" + offMin + " max=" + offMax + " nan=" + offNaN);
+ // else trouble: more than one offering per file or contaminated with
+ // offering=mv.
+ // Abandon this approach.
+ // accessibleViaSOS = ""; //???set this?
+ if (verbose)
+ String2.log(
+ "EDDTableFromFiles can't gather sosOffering min/max for datasetID=" + datasetID +
+ "\nfrom fileTable because fileName=" + fFileName.getString(f) +
+ " has >1 offering: min=" + offMin + " max=" + offMax + " nan=" + offNaN);
sosOfferingType = null;
sosOfferingIndex = -1;
- sosMinLon = null;
- sosMaxLon = null;
- sosMinLat = null;
- sosMaxLat = null;
- sosMinTime = null;
- sosMaxTime = null;
+ sosMinLon = null;
+ sosMaxLon = null;
+ sosMinLat = null;
+ sosMaxLat = null;
+ sosMinTime = null;
+ sosMaxTime = null;
sosOfferings = null;
break;
}
}
- } //end gathering sosOfferings info
+ } // end gathering sosOfferings info
- //make addVariablesWhereAttNames and addVariablesWhereAttValues
+ // make addVariablesWhereAttNames and addVariablesWhereAttValues
makeAddVariablesWhereAttNamesAndValues(tAddVariablesWhere);
- //if (debugMode) String2.log(">> EDDTableFromFiles " + Calendar2.getCurrentISODateTimeStringLocalTZ() + " finished makeAddVariablesWhere...");
+ // if (debugMode) String2.log(">> EDDTableFromFiles " +
+ // Calendar2.getCurrentISODateTimeStringLocalTZ() + " finished
+ // makeAddVariablesWhere...");
- //ensure the setup is valid
+ // ensure the setup is valid
ensureValid();
- //if (debugMode) String2.log(">> EDDTableFromFiles " + Calendar2.getCurrentISODateTimeStringLocalTZ() + " finished ensureValid");
+ // if (debugMode) String2.log(">> EDDTableFromFiles " +
+ // Calendar2.getCurrentISODateTimeStringLocalTZ() + " finished ensureValid");
- //if cacheFromUrl is remote ERDDAP /files/, subscribe to the dataset
- //This is like code in EDDGridFromFiles but "/tabledap/"
+ // if cacheFromUrl is remote ERDDAP /files/, subscribe to the dataset
+ // This is like code in EDDGridFromFiles but "/tabledap/"
if (!doQuickRestart && EDStatic.subscribeToRemoteErddapDataset &&
- cacheFromUrl != null &&
- cacheFromUrl.startsWith("http") &&
- cacheFromUrl.indexOf("/erddap/files/") > 0) {
+ cacheFromUrl != null &&
+ cacheFromUrl.startsWith("http") &&
+ cacheFromUrl.indexOf("/erddap/files/") > 0) {
- //convert cacheFromUrl from .../files/datasetID/... url into .../tabledap/datasetID
+ // convert cacheFromUrl from .../files/datasetID/... url into
+ // .../tabledap/datasetID
int po1 = cacheFromUrl.indexOf("/erddap/files/");
- int po2 = cacheFromUrl.indexOf('/', po1 + 14); //next / in cacheFromUrl
- if (po2 < 0) po2 = cacheFromUrl.length();
- String remoteUrl = cacheFromUrl.substring(0, po1) + "/erddap/tabledap/" +
- cacheFromUrl.substring(po1 + 14, po2); //datasetID
- tryToSubscribeToRemoteErddapDataset(true, remoteUrl); //logs errors. Won't throw exception.
+ int po2 = cacheFromUrl.indexOf('/', po1 + 14); // next / in cacheFromUrl
+ if (po2 < 0)
+ po2 = cacheFromUrl.length();
+ String remoteUrl = cacheFromUrl.substring(0, po1) + "/erddap/tabledap/" +
+ cacheFromUrl.substring(po1 + 14, po2); // datasetID
+ tryToSubscribeToRemoteErddapDataset(true, remoteUrl); // logs errors. Won't throw exception.
}
- //EDV edv = findDataVariableByDestinationName("longitude");
- //String2.pressEnterToContinue("!!!end of EDDTableFromFiles constructor: sourceName=" + edv.sourceName() +
- // " type=" + edv.sourceDataType() + " min=" + edv.destinationMinDouble());
+ // EDV edv = findDataVariableByDestinationName("longitude");
+ // String2.pressEnterToContinue("!!!end of EDDTableFromFiles constructor:
+ // sourceName=" + edv.sourceName() +
+ // " type=" + edv.sourceDataType() + " min=" + edv.destinationMinDouble());
- //dirTable and fileTable InMemory?
+ // dirTable and fileTable InMemory?
if (!fileTableInMemory) {
dirTable = null;
fileTable = null;
}
- //finally
+ // finally
long cTime = System.currentTimeMillis() - constructionStartMillis;
- if (verbose) String2.log(
- (debugMode? "\n" + toString() : "") +
- "\n*** EDDTableFromFiles " + datasetID + " constructor finished. TIME=" +
- cTime + "ms" + (cTime >= 600000? " (>10m!)" : cTime >= 10000? " (>10s!)" : "") + "\n");
+ if (verbose)
+ String2.log(
+ (debugMode ? "\n" + toString() : "") +
+ "\n*** EDDTableFromFiles " + datasetID + " constructor finished. TIME=" +
+ cTime + "ms" + (cTime >= 600000 ? " (>10m!)" : cTime >= 10000 ? " (>10s!)" : "") + "\n");
}
/**
- * This returns true if this EDDTable knows each variable's actual_range (e.g.,
+ * This returns true if this EDDTable knows each variable's actual_range (e.g.,
* EDDTableFromFiles) or false if it doesn't (e.g., EDDTableFromDatabase).
*
- * @returns true if this EDDTable knows each variable's actual_range (e.g.,
- * EDDTableFromFiles) or false if it doesn't (e.g., EDDTableFromDatabase).
+ * @returns true if this EDDTable knows each variable's actual_range (e.g.,
+ * EDDTableFromFiles) or false if it doesn't (e.g.,
+ * EDDTableFromDatabase).
*/
- public boolean knowsActualRange() {return true; } //EDDTableFromFiles knows actual_range
-
+ public boolean knowsActualRange() {
+ return true;
+ } // EDDTableFromFiles knows actual_range
/**
*/
- public String fileDir() {return fileDir; }
- public String fileNameRegex() {return fileNameRegex; }
+ public String fileDir() {
+ return fileDir;
+ }
+ public String fileNameRegex() {
+ return fileNameRegex;
+ }
/**
- * If using temporary cache system, this ensure file is in cache or throws Exception.
+ * If using temporary cache system, this ensure file is in cache or throws
+ * Exception.
*
* @throws Exception if trouble
*/
void ensureInCache(String localFullName) throws Exception {
- if (cacheFromUrl != null && cacheMaxSizeB > 0) { //cache system is active
- //If desired file is in cache, we're done.
- if (RegexFilenameFilter.touchFileAndRelated(localFullName)) //returns true if localFullName exists
+ if (cacheFromUrl != null && cacheMaxSizeB > 0) { // cache system is active
+ // If desired file is in cache, we're done.
+ if (RegexFilenameFilter.touchFileAndRelated(localFullName)) // returns true if localFullName exists
return;
- //Normally this does nothing and takes ~0 time.
- //When it does something, it takes time, so it's safer to prune first
- // (if needed) then download (so as not to download a file, then prune it)
- // even though new file may put it over the threshold.
+ // Normally this does nothing and takes ~0 time.
+ // When it does something, it takes time, so it's safer to prune first
+ // (if needed) then download (so as not to download a file, then prune it)
+ // even though new file may put it over the threshold.
FileVisitorDNLS.pruneCache(fileDir, cacheMaxSizeB, cacheFraction);
- //then ensureInCache
- FileVisitorDNLS.ensureInCache( //it sets lastMod to 'now'
- cacheFromUrl, fileDir, localFullName); //throws Exception
+ // then ensureInCache
+ FileVisitorDNLS.ensureInCache( // it sets lastMod to 'now'
+ cacheFromUrl, fileDir, localFullName); // throws Exception
}
}
- /**
- * Make arrays to hold expected source add_offset, fillValue, missingValue, scale_factor, units.
+ /**
+ * Make arrays to hold expected source add_offset, fillValue, missingValue,
+ * scale_factor, units.
*
* @return true if successful
* @throws Throwable if serious trouble ("Too many open files")
*/
- private boolean makeExpected(Object[][] tDataVariables,
- StringArray dirList, ShortArray ftDirIndex,
- StringArray ftFileList, LongArray ftLastMod, LongArray ftSize) {
-
- //make arrays to hold addAttributes fillValue, missingValue
- // (so fake mv can be converted to NaN, so source min and max can be
- // determined exclusive of missingValue)
- //may be NaN
- addAttFillValue = new double[sourceDataNames.size()]; //filled with 0's!
+ private boolean makeExpected(Object[][] tDataVariables,
+ StringArray dirList, ShortArray ftDirIndex,
+ StringArray ftFileList, LongArray ftLastMod, LongArray ftSize) {
+
+ // make arrays to hold addAttributes fillValue, missingValue
+ // (so fake mv can be converted to NaN, so source min and max can be
+ // determined exclusive of missingValue)
+ // may be NaN
+ addAttFillValue = new double[sourceDataNames.size()]; // filled with 0's!
addAttMissingValue = new double[sourceDataNames.size()];
- Arrays.fill(addAttFillValue, Double.NaN); //2014-07-21 now filled with NaN's
+ Arrays.fill(addAttFillValue, Double.NaN); // 2014-07-21 now filled with NaN's
Arrays.fill(addAttMissingValue, Double.NaN);
for (int dv = 0; dv < sourceDataNames.size(); dv++) {
- Attributes tAddAtt = (Attributes)tDataVariables[dv][2];
- //if ("depth".equals(sourceDataNames.get(dv)))
- // String2.log("depth addAtt=" + tAddAtt);
+ Attributes tAddAtt = (Attributes) tDataVariables[dv][2];
+ // if ("depth".equals(sourceDataNames.get(dv)))
+ // String2.log("depth addAtt=" + tAddAtt);
if (tAddAtt != null) {
- addAttFillValue[ dv] = tAddAtt.getDouble("_FillValue"); //may be NaN
- addAttMissingValue[dv] = tAddAtt.getDouble("missing_value"); //may be NaN
+ addAttFillValue[dv] = tAddAtt.getDouble("_FillValue"); // may be NaN
+ addAttMissingValue[dv] = tAddAtt.getDouble("missing_value"); // may be NaN
}
- }
+ }
- //make arrays to hold expected source add_offset, fillValue, missingValue, scale_factor, units
- expectedAddOffset = new double[sourceDataNames.size()];
- expectedFillValue = new double[sourceDataNames.size()];
+ // make arrays to hold expected source add_offset, fillValue, missingValue,
+ // scale_factor, units
+ expectedAddOffset = new double[sourceDataNames.size()];
+ expectedFillValue = new double[sourceDataNames.size()];
expectedMissingValue = new double[sourceDataNames.size()];
- expectedScaleFactor = new double[sourceDataNames.size()];
- expectedUnits = new String[sourceDataNames.size()];
- //initially filled with NaNs
- Arrays.fill(expectedAddOffset, Double.NaN);
- Arrays.fill(expectedFillValue, Double.NaN);
+ expectedScaleFactor = new double[sourceDataNames.size()];
+ expectedUnits = new String[sourceDataNames.size()];
+ // initially filled with NaNs
+ Arrays.fill(expectedAddOffset, Double.NaN);
+ Arrays.fill(expectedFillValue, Double.NaN);
Arrays.fill(expectedMissingValue, Double.NaN);
- Arrays.fill(expectedScaleFactor, Double.NaN);
+ Arrays.fill(expectedScaleFactor, Double.NaN);
- //Try to fill expected arrays with info for first file in fileTable.
- //All files should have same info (unless var is missing).
+ // Try to fill expected arrays with info for first file in fileTable.
+ // All files should have same info (unless var is missing).
for (int f = 0; f < ftDirIndex.size(); f++) {
- //find a file that exists and is unchanged and dir and name match current settings
+ // find a file that exists and is unchanged and dir and name match current
+ // settings
String dir = dirList.get(ftDirIndex.get(f));
String name = ftFileList.get(f);
if (filesAreLocal) {
long lastMod = File2.getLastModified(dir + name);
- if (lastMod == 0 || ftLastMod.get(f) != lastMod) //0=trouble: unavailable or changed
+ if (lastMod == 0 || ftLastMod.get(f) != lastMod) // 0=trouble: unavailable or changed
continue;
long size = File2.length(dir + name);
- if (size < 0 || size == Long.MAX_VALUE ||
- (filesAreLocal && ftSize.get(f) != size)) //-1=touble: unavailable or changed
+ if (size < 0 || size == Long.MAX_VALUE ||
+ (filesAreLocal && ftSize.get(f) != size)) // -1=touble: unavailable or changed
continue;
if (!dir.startsWith(fileDir))
continue;
@@ -2042,112 +2287,118 @@ private boolean makeExpected(Object[][] tDataVariables,
}
try {
- //get the metadata
+ // get the metadata
Table table = getSourceDataFromFile(dir, name,
- sourceDataNames, sourceDataTypes,
- -1, Double.NaN, Double.NaN,
- null, null, null, true, false); //getMetadata=true, getData=false. throws Throwable if trouble
- //String2.log("here 2");
+ sourceDataNames, sourceDataTypes,
+ -1, Double.NaN, Double.NaN,
+ null, null, null, true, false); // getMetadata=true, getData=false. throws Throwable if trouble
+ // String2.log("here 2");
- //get the expected attributes; ok if NaN or null
+ // get the expected attributes; ok if NaN or null
for (int dvNec = 0; dvNec < sourceDataNames.size(); dvNec++) {
String tName = sourceDataNames.get(dvNec);
int tableDv = table.findColumnNumber(tName);
- Attributes dvAtts = tableDv < 0? new Attributes() : table.columnAttributes(tableDv);
- expectedAddOffset[dvNec] = dvAtts.getDouble("add_offset");
- expectedFillValue[dvNec] = dvAtts.getDouble("_FillValue");
+ Attributes dvAtts = tableDv < 0 ? new Attributes() : table.columnAttributes(tableDv);
+ expectedAddOffset[dvNec] = dvAtts.getDouble("add_offset");
+ expectedFillValue[dvNec] = dvAtts.getDouble("_FillValue");
expectedMissingValue[dvNec] = dvAtts.getDouble("missing_value");
- expectedScaleFactor[dvNec] = dvAtts.getDouble("scale_factor");
- expectedUnits[dvNec] = dvAtts.getString("units");
+ expectedScaleFactor[dvNec] = dvAtts.getDouble("scale_factor");
+ expectedUnits[dvNec] = dvAtts.getString("units");
}
- //we got what we needed, no need to look at other files
- if (verbose)
- String2.log("got expected metadata from previously good file #" + f +
- " of " + ftDirIndex.size() + ": " + dir + name);
+ // we got what we needed, no need to look at other files
+ if (verbose)
+ String2.log("got expected metadata from previously good file #" + f +
+ " of " + ftDirIndex.size() + ": " + dir + name);
return true;
} catch (Throwable t) {
throw new RuntimeException("Unexpected error when getting expected attributes from " + dir + name, t);
}
}
- if (verbose) String2.log(
- "Didn't get expected attributes because there were no previously valid files,\n" +
- " or none of the previously valid files were unchanged!");
+ if (verbose)
+ String2.log(
+ "Didn't get expected attributes because there were no previously valid files,\n" +
+ " or none of the previously valid files were unchanged!");
return false;
}
- /**
- * The constructor for EDDTableFromHttpGet calls this to set httpGetRequiredVariableNames.
+ /**
+ * The constructor for EDDTableFromHttpGet calls this to set
+ * httpGetRequiredVariableNames.
*/
private void setHttpGetRequiredVariableNames(String tRequiredVariablesCSV) {
if (!String2.isSomething(tRequiredVariablesCSV))
throw new RuntimeException(
- String2.ERROR + " in EDDTableFromHttpGet constructor for datasetID=" +
- datasetID + ": " +
- HTTP_GET_REQUIRED_VARIABLES + " MUST be in globalAttributes.");
+ String2.ERROR + " in EDDTableFromHttpGet constructor for datasetID=" +
+ datasetID + ": " +
+ HTTP_GET_REQUIRED_VARIABLES + " MUST be in globalAttributes.");
httpGetRequiredVariableNames = StringArray.fromCSV(tRequiredVariablesCSV).toStringArray();
- if (verbose) String2.log(" " + HTTP_GET_REQUIRED_VARIABLES + "=" +
- String2.toCSSVString(httpGetRequiredVariableNames));
+ if (verbose)
+ String2.log(" " + HTTP_GET_REQUIRED_VARIABLES + "=" +
+ String2.toCSSVString(httpGetRequiredVariableNames));
}
- /**
+ /**
* The constructor for EDDTableFromHttpGet calls this after the variables are
- * created to set httpGetRequiredVariableTypes.
+ * created to set httpGetRequiredVariableTypes.
*/
private void setHttpGetRequiredVariableTypes() {
int n = httpGetRequiredVariableNames.length;
if (n == 0)
throw new RuntimeException(
- String2.ERROR + " in EDDTableFromHttpGet constructor for datasetID=" +
- datasetID + ": " + HTTP_GET_REQUIRED_VARIABLES + " MUST have one or more variable names.");
+ String2.ERROR + " in EDDTableFromHttpGet constructor for datasetID=" +
+ datasetID + ": " + HTTP_GET_REQUIRED_VARIABLES + " MUST have one or more variable names.");
httpGetRequiredVariableTypes = new String[n];
for (int i = 0; i < n; i++) {
String tSourceName = httpGetRequiredVariableNames[i];
int col = String2.indexOf(dataVariableSourceNames(), tSourceName);
if (col < 0)
throw new RuntimeException(
- String2.ERROR + " in EDDTableFromHttpGet constructor for datasetID=" +
- datasetID + ": all " + HTTP_GET_REQUIRED_VARIABLES + " MUST be in the dataset (" + tSourceName + ").");
+ String2.ERROR + " in EDDTableFromHttpGet constructor for datasetID=" +
+ datasetID + ": all " + HTTP_GET_REQUIRED_VARIABLES + " MUST be in the dataset ("
+ + tSourceName + ").");
httpGetRequiredVariableTypes[i] = dataVariables[col].sourceDataType();
}
}
-
-
- /** The constructor for EDDTableFromHttpGet calls this to set httpGetDirectoryStructure variables. */
+ /**
+ * The constructor for EDDTableFromHttpGet calls this to set
+ * httpGetDirectoryStructure variables.
+ */
private void setHttpGetDirectoryStructure(String tDirStructure) {
if (!String2.isSomething(tDirStructure))
throw new RuntimeException(
- String2.ERROR + " in EDDTableFromHttpGet constructor for datasetID=" +
- datasetID + ": " +
- HTTP_GET_DIRECTORY_STRUCTURE + " MUST be in globalAttributes.");
+ String2.ERROR + " in EDDTableFromHttpGet constructor for datasetID=" +
+ datasetID + ": " +
+ HTTP_GET_DIRECTORY_STRUCTURE + " MUST be in globalAttributes.");
httpGetDirectoryStructureColumnNames = new StringArray();
- httpGetDirectoryStructureNs = new IntArray();
- httpGetDirectoryStructureCalendars = new IntArray();
- EDDTableFromHttpGet.parseHttpGetDirectoryStructure(tDirStructure,
- httpGetDirectoryStructureColumnNames,
- httpGetDirectoryStructureNs,
- httpGetDirectoryStructureCalendars);
- if (verbose) String2.log(
- " httpGetDirectoryStructureColumnNames=" + httpGetDirectoryStructureColumnNames.toString() + "\n" +
- " httpGetDirectoryStructureNs=" + httpGetDirectoryStructureNs.toString() + "\n" +
- " httpGetDirectoryStructureCalendars=" + httpGetDirectoryStructureCalendars.toString());
+ httpGetDirectoryStructureNs = new IntArray();
+ httpGetDirectoryStructureCalendars = new IntArray();
+ EDDTableFromHttpGet.parseHttpGetDirectoryStructure(tDirStructure,
+ httpGetDirectoryStructureColumnNames,
+ httpGetDirectoryStructureNs,
+ httpGetDirectoryStructureCalendars);
+ if (verbose)
+ String2.log(
+ " httpGetDirectoryStructureColumnNames=" + httpGetDirectoryStructureColumnNames.toString() + "\n" +
+ " httpGetDirectoryStructureNs=" + httpGetDirectoryStructureNs.toString() + "\n" +
+ " httpGetDirectoryStructureCalendars=" + httpGetDirectoryStructureCalendars.toString());
}
- /**
- * The constructor for EDDTableFromHttpGet calls this to set HttpGetKeys.
+ /**
+ * The constructor for EDDTableFromHttpGet calls this to set HttpGetKeys.
*
* @param tHttpGetKeys a CSV of author_key values.
*/
private void setHttpGetKeys(String tHttpGetKeys) {
- String msg = String2.ERROR + " in EDDTableFromHttpGet constructor for datasetID=" +
- datasetID + ": ";
+ String msg = String2.ERROR + " in EDDTableFromHttpGet constructor for datasetID=" +
+ datasetID + ": ";
String inForm = "Each of the httpGetKeys must be in the form author_key, with only ASCII characters (but no space, ', \", or comma), and where the key is at least 8 characters long.";
if (tHttpGetKeys == null ||
- tHttpGetKeys.indexOf('\"') >= 0 || //be safe, avoid trickery
- tHttpGetKeys.indexOf('\'') >= 0) //be safe, avoid trickery
+ tHttpGetKeys.indexOf('\"') >= 0 || // be safe, avoid trickery
+ tHttpGetKeys.indexOf('\'') >= 0) // be safe, avoid trickery
throw new RuntimeException(msg + inForm);
httpGetKeys = new HashSet();
String keyAr[] = StringArray.arrayFromCSV(tHttpGetKeys);
@@ -2155,27 +2406,27 @@ private void setHttpGetKeys(String tHttpGetKeys) {
if (String2.isSomething(keyAr[i])) {
keyAr[i] = keyAr[i].trim();
int po = keyAr[i].indexOf('_');
- if (po <= 0 || //can't be 0: so author must be something
- po >= keyAr[i].length() - 8 || //key must be 8+ chars
- !String2.isAsciiPrintable(keyAr[i]) ||
- keyAr[i].indexOf(' ') >= 0 || //isAsciiPrintable allows ' ' (be safe, avoid trickery)
- keyAr[i].indexOf(',') >= 0) { //isAsciiPrintable allows , (be safe, avoid trickery)
+ if (po <= 0 || // can't be 0: so author must be something
+ po >= keyAr[i].length() - 8 || // key must be 8+ chars
+ !String2.isAsciiPrintable(keyAr[i]) ||
+ keyAr[i].indexOf(' ') >= 0 || // isAsciiPrintable allows ' ' (be safe, avoid trickery)
+ keyAr[i].indexOf(',') >= 0) { // isAsciiPrintable allows , (be safe, avoid trickery)
throw new RuntimeException(msg + inForm + " (key #" + i + ")");
} else {
- httpGetKeys.add(keyAr[i]); //not String2.canonical, because then publicly accessible
+ httpGetKeys.add(keyAr[i]); // not String2.canonical, because then publicly accessible
}
}
}
if (httpGetKeys.size() == 0)
throw new RuntimeException(msg +
- HTTP_GET_KEYS + " MUST be in globalAttributes.");
+ HTTP_GET_KEYS + " MUST be in globalAttributes.");
}
/**
* This extracts data from the fileName.
*
* @param fileName
- * @return the extracted String
+ * @return the extracted String
*/
protected String extractFromFileName(String fileName) {
@@ -2185,224 +2436,233 @@ protected String extractFromFileName(String fileName) {
m = preExtractPattern.matcher(eName);
if (m.find()) {
eName = eName.substring(0, m.start()) + eName.substring(m.end());
- //if (debugMode) String2.log("> extractFromFileName found preExtract, result=" + eName);
+ // if (debugMode) String2.log("> extractFromFileName found preExtract, result="
+ // + eName);
}
}
if (postExtractPattern != null) {
m = postExtractPattern.matcher(eName);
if (m.find()) {
eName = eName.substring(0, m.start()) + eName.substring(m.end());
- //if (debugMode) String2.log("> extractFromFileName postExtract, result=" + eName);
+ // if (debugMode) String2.log("> extractFromFileName postExtract, result=" +
+ // eName);
}
}
m = extractPattern.matcher(eName);
if (m.find()) {
eName = eName.substring(m.start(), m.end());
- //if (debugMode) String2.log("> extractFromFileName found extract, result=" + eName);
+ // if (debugMode) String2.log("> extractFromFileName found extract, result=" +
+ // eName);
}
- //if (debugMode) String2.log("> extractFromFileName converted " + fileName + " -> " + eName);
+ // if (debugMode) String2.log("> extractFromFileName converted " + fileName + "
+ // -> " + eName);
return eName;
}
- /**
- * This is used by the constructor and lowUpdate to ensure that a new
+ /**
+ * This is used by the constructor and lowUpdate to ensure that a new
* file's attributes are compatible with the expected attributes.
*
- * @param dvName dataVariable sourceName
- * @param dv number
+ * @param dvName dataVariable sourceName
+ * @param dv number
* @param dvSourceAtts for the variable in the new file
* @throws RuntimeException if not compatible
*/
- protected void testIfNewFileAttsAreCompatible(String dvName, int dv,
- Attributes dvSourceAtts) {
+ protected void testIfNewFileAttsAreCompatible(String dvName, int dv,
+ Attributes dvSourceAtts) {
- double tAddOffset = dvSourceAtts.getDouble("add_offset");
- double tFillValue = dvSourceAtts.getDouble("_FillValue");
+ double tAddOffset = dvSourceAtts.getDouble("add_offset");
+ double tFillValue = dvSourceAtts.getDouble("_FillValue");
double tMissingValue = dvSourceAtts.getDouble("missing_value");
- double tScaleFactor = dvSourceAtts.getDouble("scale_factor");
- String tUnits = dvSourceAtts.getString("units");
- if (Double.isNaN(expectedAddOffset[ dv]))
- expectedAddOffset[ dv] = tAddOffset;
- if (Double.isNaN(expectedFillValue[ dv]))
- expectedFillValue[ dv] = tFillValue;
- if (Double.isNaN(expectedMissingValue[dv]))
- expectedMissingValue[dv] = tMissingValue;
- if (Double.isNaN(expectedScaleFactor[ dv]))
- expectedScaleFactor[ dv] = tScaleFactor;
- if (expectedUnits[dv] == null) expectedUnits[dv] = tUnits;
+ double tScaleFactor = dvSourceAtts.getDouble("scale_factor");
+ String tUnits = dvSourceAtts.getString("units");
+ if (Double.isNaN(expectedAddOffset[dv]))
+ expectedAddOffset[dv] = tAddOffset;
+ if (Double.isNaN(expectedFillValue[dv]))
+ expectedFillValue[dv] = tFillValue;
+ if (Double.isNaN(expectedMissingValue[dv]))
+ expectedMissingValue[dv] = tMissingValue;
+ if (Double.isNaN(expectedScaleFactor[dv]))
+ expectedScaleFactor[dv] = tScaleFactor;
+ if (expectedUnits[dv] == null)
+ expectedUnits[dv] = tUnits;
String oNEe = " sourceAttribute value observed!=expected for sourceName=" + dvName + ".";
- //if null, skip test, since a given file may not have some variable
- //unfortunate: it is also possible that this file has the variable, but not this attribute
- // but in that case, reasonable to pretend it should have the expected attribute value.
- Test.ensureEqual(tAddOffset, expectedAddOffset[ dv], "add_offset" + oNEe);
+ // if null, skip test, since a given file may not have some variable
+ // unfortunate: it is also possible that this file has the variable, but not
+ // this attribute
+ // but in that case, reasonable to pretend it should have the expected attribute
+ // value.
+ Test.ensureEqual(tAddOffset, expectedAddOffset[dv], "add_offset" + oNEe);
if (!Double.isNaN(tFillValue))
- Test.ensureEqual(tFillValue, expectedFillValue[ dv], "_FillValue" + oNEe);
+ Test.ensureEqual(tFillValue, expectedFillValue[dv], "_FillValue" + oNEe);
if (!Double.isNaN(tMissingValue))
Test.ensureEqual(tMissingValue, expectedMissingValue[dv], "missing_value" + oNEe);
- Test.ensureEqual(tScaleFactor, expectedScaleFactor[ dv], "scale_factor" + oNEe);
+ Test.ensureEqual(tScaleFactor, expectedScaleFactor[dv], "scale_factor" + oNEe);
if (!Units2.udunitsAreEquivalent(tUnits, expectedUnits[dv]))
- Test.ensureEqual(tUnits, expectedUnits[dv], "units" + oNEe);
+ Test.ensureEqual(tUnits, expectedUnits[dv], "units" + oNEe);
}
-
/**
* This sets the values on a local fileTable row.
*
- * @param tFileS is just name.ext .
- * @param tTable table (with source atts and data) from a data file
+ * @param tFileS is just name.ext .
+ * @param tTable table (with source atts and data) from a data file
* @param logAsRowNumber the fileTable row number to be printed in log messages,
- * or -1 for no log messages
+ * or -1 for no log messages
* @throws throwable if trouble
*/
protected void setFileTableRow(Table fileTable, int fileListPo,
- int tDirI, String tFileS, long tLastMod, long tSize, Table tTable, int logAsRowNumber) {
+ int tDirI, String tFileS, long tLastMod, long tSize, Table tTable, int logAsRowNumber) {
- ShortArray ftDirIndex = (ShortArray)fileTable.getColumn(FT_DIR_INDEX_COL); //0
- StringArray ftFileList = (StringArray)fileTable.getColumn(FT_FILE_LIST_COL); //1
- LongArray ftLastMod = (LongArray)fileTable.getColumn(FT_LAST_MOD_COL); //2
- LongArray ftSize = (LongArray)fileTable.getColumn(FT_SIZE_COL); //3
- DoubleArray ftSortedSpacing = (DoubleArray)fileTable.getColumn(FT_SORTED_SPACING_COL); //4
+ ShortArray ftDirIndex = (ShortArray) fileTable.getColumn(FT_DIR_INDEX_COL); // 0
+ StringArray ftFileList = (StringArray) fileTable.getColumn(FT_FILE_LIST_COL); // 1
+ LongArray ftLastMod = (LongArray) fileTable.getColumn(FT_LAST_MOD_COL); // 2
+ LongArray ftSize = (LongArray) fileTable.getColumn(FT_SIZE_COL); // 3
+ DoubleArray ftSortedSpacing = (DoubleArray) fileTable.getColumn(FT_SORTED_SPACING_COL); // 4
ftDirIndex.setInt(fileListPo, tDirI);
ftFileList.set(fileListPo, tFileS);
ftLastMod.set(fileListPo, tLastMod);
ftSize.set(fileListPo, tSize);
- ftSortedSpacing.set(fileListPo, -1); //default, usually set below
+ ftSortedSpacing.set(fileListPo, -1); // default, usually set below
- //get min,max for dataVariables
+ // get min,max for dataVariables
int tTableNCols = tTable.nColumns();
int ndv = sourceDataTypes.length;
for (int dv = 0; dv < ndv; dv++) {
- fileTable.setStringData(dv0 + dv*3 + 0, fileListPo, ""); //numeric will be NaN
- fileTable.setStringData(dv0 + dv*3 + 1, fileListPo, "");
- fileTable.setStringData(dv0 + dv*3 + 2, fileListPo, ""); //hasNaN unspecified
+ fileTable.setStringData(dv0 + dv * 3 + 0, fileListPo, ""); // numeric will be NaN
+ fileTable.setStringData(dv0 + dv * 3 + 1, fileListPo, "");
+ fileTable.setStringData(dv0 + dv * 3 + 2, fileListPo, ""); // hasNaN unspecified
- //skip this variable if not in this source file
+ // skip this variable if not in this source file
String dvName = sourceDataNames.get(dv);
int c = tTable.findColumnNumber(dvName);
if (c < 0) {
- //String2.log(" " + dvName + " not in source file");
+ // String2.log(" " + dvName + " not in source file");
continue;
}
- //attributes are as expected???
+ // attributes are as expected???
Attributes dvSourceAtts = tTable.columnAttributes(c);
- testIfNewFileAttsAreCompatible( //throws exception if trouble
- dvName, dv, dvSourceAtts);
-
- //convert missing_value and _FillValue to NaN
- //doubles? type not important here, tTable is temporary
- //others attributes (e.g., scale, add_offset, units) not needed for calculation of min max below
- //(if data is packed, missing_value and _FillValue are packed, too)
- if (!Double.isNaN(addAttFillValue[ dv])) dvSourceAtts.set("_FillValue", addAttFillValue[ dv]);
- if (!Double.isNaN(addAttMissingValue[dv])) dvSourceAtts.set("missing_value", addAttMissingValue[dv]);
+ testIfNewFileAttsAreCompatible( // throws exception if trouble
+ dvName, dv, dvSourceAtts);
+
+ // convert missing_value and _FillValue to NaN
+ // doubles? type not important here, tTable is temporary
+ // others attributes (e.g., scale, add_offset, units) not needed for calculation
+ // of min max below
+ // (if data is packed, missing_value and _FillValue are packed, too)
+ if (!Double.isNaN(addAttFillValue[dv]))
+ dvSourceAtts.set("_FillValue", addAttFillValue[dv]);
+ if (!Double.isNaN(addAttMissingValue[dv]))
+ dvSourceAtts.set("missing_value", addAttMissingValue[dv]);
tTable.convertToStandardMissingValues(c);
- //process source min and max for this column's data
+ // process source min and max for this column's data
PrimitiveArray pa = tTable.getColumn(c);
PAType tPaPAType = pa.elementType();
if (tPaPAType == PAType.STRING ||
- tPaPAType == PAType.CHAR ||
- tPaPAType == PAType.LONG ||
- tPaPAType == PAType.ULONG) { //so long and ulong are set exactly
- //get [0]=n,[1]=min,[2]=max (of non-null and non-"") as Strings
- String nMinMax[] = pa.getNMinMax();
+ tPaPAType == PAType.CHAR ||
+ tPaPAType == PAType.LONG ||
+ tPaPAType == PAType.ULONG) { // so long and ulong are set exactly
+ // get [0]=n,[1]=min,[2]=max (of non-null and non-"") as Strings
+ String nMinMax[] = pa.getNMinMax();
int tn = String2.parseInt(nMinMax[0]);
- if (tn > 0) { //there is a non-"" value
- fileTable.setStringData(dv0 + dv*3 + 0, fileListPo, nMinMax[1]);
- fileTable.setStringData(dv0 + dv*3 + 1, fileListPo, nMinMax[2]);
+ if (tn > 0) { // there is a non-"" value
+ fileTable.setStringData(dv0 + dv * 3 + 0, fileListPo, nMinMax[1]);
+ fileTable.setStringData(dv0 + dv * 3 + 1, fileListPo, nMinMax[2]);
}
- fileTable.setIntData(dv0 + dv*3 + 2, fileListPo, tn < pa.size()? 1 : 0); //hasNaN
+ fileTable.setIntData(dv0 + dv * 3 + 2, fileListPo, tn < pa.size() ? 1 : 0); // hasNaN
- } else {
- //numeric
+ } else {
+ // numeric
double stats[] = pa.calculateStats();
int tn = Math2.roundToInt(stats[PrimitiveArray.STATS_N]);
- //if (dvName.equals("bucket_sal")) String2.log(" " + dvName + " stats=" + String2.toCSSVString(stats));
- fileTable.setIntData(dv0 + dv*3 + 2, fileListPo, tn < pa.size()? 1 : 0); //hasNaN
+ // if (dvName.equals("bucket_sal")) String2.log(" " + dvName + " stats=" +
+ // String2.toCSSVString(stats));
+ fileTable.setIntData(dv0 + dv * 3 + 2, fileListPo, tn < pa.size() ? 1 : 0); // hasNaN
if (tn > 0) {
- fileTable.setDoubleData(dv0 + dv*3 + 0, fileListPo, stats[PrimitiveArray.STATS_MIN]);
- fileTable.setDoubleData(dv0 + dv*3 + 1, fileListPo, stats[PrimitiveArray.STATS_MAX]);
+ fileTable.setDoubleData(dv0 + dv * 3 + 0, fileListPo, stats[PrimitiveArray.STATS_MIN]);
+ fileTable.setDoubleData(dv0 + dv * 3 + 1, fileListPo, stats[PrimitiveArray.STATS_MAX]);
if (dv == sortedDVI) {
String ts = pa.isAscending();
double tSortedSpacing;
if (tn > 1 && ts.length() == 0) {
ts = pa.isEvenlySpaced();
if (ts.length() == 0) {
- tSortedSpacing =
- (stats[PrimitiveArray.STATS_MAX] -
- stats[PrimitiveArray.STATS_MIN]) / (tn - 1);
+ tSortedSpacing = (stats[PrimitiveArray.STATS_MAX] -
+ stats[PrimitiveArray.STATS_MIN]) / (tn - 1);
if (logAsRowNumber >= 0)
- String2.log(logAsRowNumber + " " + sortedColumnSourceName +
- " is evenly spaced=" + tSortedSpacing);
- } else {
+ String2.log(logAsRowNumber + " " + sortedColumnSourceName +
+ " is evenly spaced=" + tSortedSpacing);
+ } else {
if (logAsRowNumber >= 0)
- String2.log(logAsRowNumber + " " + sortedColumnSourceName +
- " isAscending but " + ts);
+ String2.log(logAsRowNumber + " " + sortedColumnSourceName +
+ " isAscending but " + ts);
tSortedSpacing = 0;
}
} else {
- if (logAsRowNumber >= 0)
- String2.log(logAsRowNumber + " " +
- sortedColumnSourceName + " " + ts);
+ if (logAsRowNumber >= 0)
+ String2.log(logAsRowNumber + " " +
+ sortedColumnSourceName + " " + ts);
tSortedSpacing = -1;
}
ftSortedSpacing.set(fileListPo, tSortedSpacing);
}
}
}
- //if (logThis)
- // String2.log(dvName +
- // " min=" + fileTable.getStringData(dv0 + dv*3 + 0, fileListPo) +
- // " max=" + fileTable.getStringData(dv0 + dv*3 + 1, fileListPo));
- // " hasNaN=" + fileTable.getIntData( dv0 + dv*3 + 2, fileListPo));
+ // if (logThis)
+ // String2.log(dvName +
+ // " min=" + fileTable.getStringData(dv0 + dv*3 + 0, fileListPo) +
+ // " max=" + fileTable.getStringData(dv0 + dv*3 + 1, fileListPo));
+ // " hasNaN=" + fileTable.getIntData( dv0 + dv*3 + 2, fileListPo));
}
}
- /**
- * This is used to make a new minMaxTable just before saving a changed fileTable.
+ /**
+ * This is used to make a new minMaxTable just before saving a changed
+ * fileTable.
*
- * @param dirList the up-to-date dirList
+ * @param dirList the up-to-date dirList
* @param fileTable the new fileTable
* @return the new minMaxTable
- */
+ */
protected Table makeMinMaxTable(StringArray dirList, Table fileTable) {
- ShortArray ftDirIndex = (ShortArray)fileTable.getColumn(FT_DIR_INDEX_COL); //0
- StringArray ftFileList = (StringArray)fileTable.getColumn(FT_FILE_LIST_COL); //1
+ ShortArray ftDirIndex = (ShortArray) fileTable.getColumn(FT_DIR_INDEX_COL); // 0
+ StringArray ftFileList = (StringArray) fileTable.getColumn(FT_FILE_LIST_COL); // 1
Table minMaxTable = new Table();
int ndv = sourceDataTypes.length;
for (int dv = 0; dv < ndv; dv++) {
- //String2.log("dv=" + dv + " " + sourceDataTypes[dv]);
- PrimitiveArray minMaxPa =
- PrimitiveArray.factory(PAType.fromCohortString(sourceDataTypes[dv]), 3, false);
- minMaxPa.addString(""); //min initially "" or NaN
- minMaxPa.addString(""); //max initially "" or NaN
- minMaxPa.addString(""); //hasNaN initially NaN
+ // String2.log("dv=" + dv + " " + sourceDataTypes[dv]);
+ PrimitiveArray minMaxPa = PrimitiveArray.factory(PAType.fromCohortString(sourceDataTypes[dv]), 3, false);
+ minMaxPa.addString(""); // min initially "" or NaN
+ minMaxPa.addString(""); // max initially "" or NaN
+ minMaxPa.addString(""); // hasNaN initially NaN
minMaxTable.addColumn(safeSourceDataNames.get(dv), minMaxPa);
- //calculate min of the min values
- PrimitiveArray pa = fileTable.getColumn(dv0 + dv*3 + 0);
- //String2.log(sourceDataNames.get(dv) + " minCol=" + pa.toString());
+ // calculate min of the min values
+ PrimitiveArray pa = fileTable.getColumn(dv0 + dv * 3 + 0);
+ // String2.log(sourceDataNames.get(dv) + " minCol=" + pa.toString());
boolean isCharOrString = pa.elementType() == PAType.CHAR ||
- pa.elementType() == PAType.STRING;
- String nMinMax[] = pa.getNMinMax(); //[0]=n, [1]=min, [2]=max as strings
+ pa.elementType() == PAType.STRING;
+ String nMinMax[] = pa.getNMinMax(); // [0]=n, [1]=min, [2]=max as strings
int tn = String2.parseInt(nMinMax[0]);
if (tn > 0) {
minMaxPa.setString(0, nMinMax[1]);
if (reallyVerbose) {
int row = pa.indexOf(nMinMax[1]);
- String2.log(sourceDataNames.get(dv) + " minMin=" +
- (isCharOrString? String2.toJson(nMinMax[1], 256) : "" + nMinMax[1]) +
- (row < 0? " (fileTable row not found)" :
- " file=" + dirList.get(ftDirIndex.get(row)) + ftFileList.get(row)));
+ String2.log(sourceDataNames.get(dv) + " minMin=" +
+ (isCharOrString ? String2.toJson(nMinMax[1], 256) : "" + nMinMax[1]) +
+ (row < 0 ? " (fileTable row not found)"
+ : " file=" + dirList.get(ftDirIndex.get(row)) + ftFileList.get(row)));
}
}
- //calculate max of the max values
- pa = fileTable.getColumn(dv0 + dv*3 + 1);
- //String2.log(sourceDataNames.get(dv) + " maxCol=" + pa.toString());
+ // calculate max of the max values
+ pa = fileTable.getColumn(dv0 + dv * 3 + 1);
+ // String2.log(sourceDataNames.get(dv) + " maxCol=" + pa.toString());
nMinMax = pa.getNMinMax();
tn = String2.parseInt(nMinMax[0]);
if (tn > 0) {
@@ -2410,17 +2670,18 @@ protected Table makeMinMaxTable(StringArray dirList, Table fileTable) {
if (reallyVerbose) {
int row = pa.indexOf(nMinMax[2]);
String2.log(sourceDataNames.get(dv) + " maxMax=" +
- (isCharOrString? String2.toJson(nMinMax[2], 256) : "" + nMinMax[2]) +
- (row < 0? " (fileTable row not found)" :
- " file=" + dirList.get(ftDirIndex.get(row)) + ftFileList.get(row)));
+ (isCharOrString ? String2.toJson(nMinMax[2], 256) : "" + nMinMax[2]) +
+ (row < 0 ? " (fileTable row not found)"
+ : " file=" + dirList.get(ftDirIndex.get(row)) + ftFileList.get(row)));
}
}
- //calculate hasNaN
- pa = fileTable.getColumn(dv0 + dv*3 + 2);
- minMaxPa.setInt(2, pa.indexOf("1") >= 0? 1 : 0); //does any file hasNaN?
+ // calculate hasNaN
+ pa = fileTable.getColumn(dv0 + dv * 3 + 2);
+ minMaxPa.setInt(2, pa.indexOf("1") >= 0 ? 1 : 0); // does any file hasNaN?
}
- if (verbose) String2.log("minMaxTable=\n" + minMaxTable.dataToString()); //it's always small
+ if (verbose)
+ String2.log("minMaxTable=\n" + minMaxTable.dataToString()); // it's always small
return minMaxTable;
}
@@ -2698,26 +2959,33 @@ private boolean handleEventContexts(StringArray contexts, String msg) throws Thr
}
/**
- * This does the actual incremental update of this dataset
+ * This does the actual incremental update of this dataset
* (i.e., for real time datasets).
*
- * Concurrency issue: The changes here are first prepared and
+ *
+ * Concurrency issue: The changes here are first prepared and
* then applied as quickly as possible (but not atomically!).
* There is a chance that another thread will get inconsistent information
* (from some things updated and some things not yet updated).
* But I don't want to synchronize all activities of this class.
*
- * @param language the index of the selected language
- * @param msg the start of a log message, e.g., "update(thisDatasetID): ".
+ * @param language the index of the selected language
+ * @param msg the start of a log message, e.g.,
+ * "update(thisDatasetID): ".
* @param startUpdateMillis the currentTimeMillis at the start of this update.
* @return true if a change was made
- * @throws Throwable if serious trouble.
- * For simple failures, this writes info to log.txt but doesn't throw an exception.
- * If the dataset has changed in a serious / incompatible way and needs a full
- * reload, this throws WaitThenTryAgainException
- * (usually, catcher calls LoadDatasets.tryToUnload(...) and EDD.requestReloadASAP(tDatasetID))..
- * If the changes needed are probably fine but are too extensive to deal with here,
- * this calls EDD.requestReloadASAP(tDatasetID) and returns without doing anything.
+ * @throws Throwable if serious trouble.
+ * For simple failures, this writes info to log.txt but
+ * doesn't throw an exception.
+ * If the dataset has changed in a serious / incompatible way
+ * and needs a full
+ * reload, this throws WaitThenTryAgainException
+ * (usually, catcher calls LoadDatasets.tryToUnload(...) and
+ * EDD.requestReloadASAP(tDatasetID))..
+ * If the changes needed are probably fine but are too
+ * extensive to deal with here,
+ * this calls EDD.requestReloadASAP(tDatasetID) and returns
+ * without doing anything.
*/
public boolean lowUpdate(int language, String msg, long startUpdateMillis) throws Throwable {
@@ -2726,31 +2994,34 @@ public boolean lowUpdate(int language, String msg, long startUpdateMillis) throw
return false;
}
- //Most of this lowUpdate code is identical in EDDGridFromFiles and EDDTableFromFiles
+ // Most of this lowUpdate code is identical in EDDGridFromFiles and
+ // EDDTableFromFiles
if (watchDirectory == null)
- return false; //no changes
+ return false; // no changes
- //get the file events
+ // get the file events
ArrayList eventKinds = new ArrayList();
- StringArray contexts = new StringArray();
+ StringArray contexts = new StringArray();
int nEvents = watchDirectory.getEvents(eventKinds, contexts);
if (nEvents == 0) {
- if (reallyVerbose) String2.log(msg + "found 0 events.");
- return false; //no changes
+ if (reallyVerbose)
+ String2.log(msg + "found 0 events.");
+ return false; // no changes
}
- //if any OVERFLOW, reload this dataset
+ // if any OVERFLOW, reload this dataset
for (int evi = 0; evi < nEvents; evi++) {
if (eventKinds.get(evi) == WatchDirectory.OVERFLOW) {
- if (reallyVerbose) String2.log(msg +
- "caught OVERFLOW event in " + contexts.get(evi) +
- ", so I called requestReloadASAP() instead of making changes here.");
+ if (reallyVerbose)
+ String2.log(msg +
+ "caught OVERFLOW event in " + contexts.get(evi) +
+ ", so I called requestReloadASAP() instead of making changes here.");
requestReloadASAP();
- return false;
+ return false;
}
}
return handleEventContexts(contexts, msg);
-
+
}
/**
@@ -2765,20 +3036,20 @@ protected void updateDestinationMinMax(Table tMinMaxTable) {
PrimitiveArray minMaxPa = tMinMaxTable.getColumn(dv);
EDV edv = dataVariables[dv];
if (edv.isFixedValue()) // min/max won't change
- continue;
+ continue;
if (minMaxPa instanceof StringArray) {
if (edv instanceof EDVTimeStamp) {
- EDVTimeStamp edvts = (EDVTimeStamp)edv;
+ EDVTimeStamp edvts = (EDVTimeStamp) edv;
edvts.setDestinationMinMax(
- PAOne.fromDouble(edvts.sourceTimeToEpochSeconds(minMaxPa.getString(0))),
- PAOne.fromDouble(edvts.sourceTimeToEpochSeconds(minMaxPa.getString(1))));
+ PAOne.fromDouble(edvts.sourceTimeToEpochSeconds(minMaxPa.getString(0))),
+ PAOne.fromDouble(edvts.sourceTimeToEpochSeconds(minMaxPa.getString(1))));
edvts.setActualRangeFromDestinationMinMax();
}
- } else { //minMaxPa is numeric
+ } else { // minMaxPa is numeric
edv.setDestinationMinMaxFromSource(
- minMaxPa.getPAOne(0),
- minMaxPa.getPAOne(1));
+ minMaxPa.getPAOne(0),
+ minMaxPa.getPAOne(1));
edv.setActualRangeFromDestinationMinMax();
}
@@ -2789,94 +3060,99 @@ protected void updateDestinationMinMax(Table tMinMaxTable) {
combinedGlobalAttributes().set("geospatial_lat_min", edv.destinationMinDouble());
combinedGlobalAttributes().set("geospatial_lat_max", edv.destinationMaxDouble());
} else if (dv == altIndex || dv == depthIndex) {
- //this works with alt and depth because positive=up|down deals with meaning
+ // this works with alt and depth because positive=up|down deals with meaning
combinedGlobalAttributes().set("geospatial_vertical_min", edv.destinationMinDouble());
combinedGlobalAttributes().set("geospatial_vertical_max", edv.destinationMaxDouble());
} else if (dv == timeIndex) {
combinedGlobalAttributes().set("time_coverage_start", edv.destinationMinString());
- combinedGlobalAttributes().set("time_coverage_end", edv.destinationMaxString());
+ combinedGlobalAttributes().set("time_coverage_end", edv.destinationMaxString());
}
}
}
- /**
- * This gets the dirTable (perhaps the private copy) for read-only use.
+ /**
+ * This gets the dirTable (perhaps the private copy) for read-only use.
*
* @throw Throwable if trouble
*/
public Table getDirTable() throws Throwable {
- Table tDirTable = fileTableInMemory?
- dirTable : tryToLoadDirFileTable(datasetDir() + DIR_TABLE_FILENAME); //may be null
+ Table tDirTable = fileTableInMemory ? dirTable : tryToLoadDirFileTable(datasetDir() + DIR_TABLE_FILENAME); // may
+ // be
+ // null
Test.ensureNotNull(tDirTable, "dirTable");
return tDirTable;
}
- /**
- * This gets the fileTable (perhaps the private copy) for read-only use.
+ /**
+ * This gets the fileTable (perhaps the private copy) for read-only use.
*
* @throw Throwable if trouble
*/
public Table getFileTable() throws Throwable {
- Table tFileTable = fileTableInMemory?
- fileTable : tryToLoadDirFileTable(datasetDir() + FILE_TABLE_FILENAME); //may be null
+ Table tFileTable = fileTableInMemory ? fileTable : tryToLoadDirFileTable(datasetDir() + FILE_TABLE_FILENAME); // may
+ // be
+ // null
Test.ensureNotNull(tFileTable, "fileTable");
return tFileTable;
}
- /**
- * This gets a copy of the dirTable (not the private copy) for read/write use.
+ /**
+ * This gets a copy of the dirTable (not the private copy) for read/write use.
*
* @returns the table, not null
* @throw Throwable if trouble
*/
public Table getDirTableCopy() throws Throwable {
- Table tDirTable = fileTableInMemory?
- (Table)(dirTable.clone()) : tryToLoadDirFileTable(datasetDir() + DIR_TABLE_FILENAME); //may be null
+ Table tDirTable = fileTableInMemory ? (Table) (dirTable.clone())
+ : tryToLoadDirFileTable(datasetDir() + DIR_TABLE_FILENAME); // may be null
Test.ensureNotNull(tDirTable, "dirTable");
return tDirTable;
}
- /**
- * This gets a copy of the fileTable (not the private copy) for read/write use.
+ /**
+ * This gets a copy of the fileTable (not the private copy) for read/write use.
*
* @returns the table, not null
* @throw Throwable if trouble
*/
public Table getFileTableCopy() throws Throwable {
- Table tFileTable = fileTableInMemory?
- (Table)(fileTable.clone()) : tryToLoadDirFileTable(datasetDir() + FILE_TABLE_FILENAME); //may be null
+ Table tFileTable = fileTableInMemory ? (Table) (fileTable.clone())
+ : tryToLoadDirFileTable(datasetDir() + FILE_TABLE_FILENAME); // may be null
Test.ensureNotNull(tFileTable, "fileTable");
return tFileTable;
}
- /**
+ /**
* Try to load the dirTable or fileTable.
- * fileTable PrimitiveArrays: 0=ftDirIndex 1=ftFileList 2=ftLastMod 3=ftSize 4=ftSortedSpacing,
- * then sourceMin, sourceMax, hasNaN columns for each dv.
+ * fileTable PrimitiveArrays: 0=ftDirIndex 1=ftFileList 2=ftLastMod 3=ftSize
+ * 4=ftSortedSpacing,
+ * then sourceMin, sourceMax, hasNaN columns for each dv.
*
* @param fileName dirTableFileName or fileTableFileName
- * @return the dirTable fileTable (null if minor trouble, eg no such file)
- * @throws Throwable if serious trouble (e.g., Too many open files, out of memory)
+ * @return the dirTable fileTable (null if minor trouble, eg no such file)
+ * @throws Throwable if serious trouble (e.g., Too many open files, out of
+ * memory)
*/
protected Table tryToLoadDirFileTable(String fileName) throws Throwable {
try {
if (File2.isFile(fileName)) {
Table table = new Table();
- //table.readFlatNc(fileName, null, 0); //standardizeWhat=0
- Test.ensureEqual(table.readEnhancedFlatNc(fileName, null), //it logs fileName and nRows=
- Table.ENHANCED_VERSION, "old/unsupported enhancedVersion");
+ // table.readFlatNc(fileName, null, 0); //standardizeWhat=0
+ Test.ensureEqual(table.readEnhancedFlatNc(fileName, null), // it logs fileName and nRows=
+ Table.ENHANCED_VERSION, "old/unsupported enhancedVersion");
int oldVersion = table.globalAttributes().getInt(_dirFileTableVersion_);
Test.ensureEqual(oldVersion,
- DIR_FILE_TABLE_VERSION, "old/unsupported " + _dirFileTableVersion_);
+ DIR_FILE_TABLE_VERSION, "old/unsupported " + _dirFileTableVersion_);
int fileStandardizeWhat = table.globalAttributes().getInt(STANDARDIZEWHAT);
if (fileStandardizeWhat == Integer.MAX_VALUE)
- fileStandardizeWhat = defaultStandardizeWhat(); //e.g., unspecified is same as (usually) 0
+ fileStandardizeWhat = defaultStandardizeWhat(); // e.g., unspecified is same as (usually) 0
Test.ensureEqual(fileStandardizeWhat, standardizeWhat,
- "Different value for 'standardizeWhat'.");
+ "Different value for 'standardizeWhat'.");
- //FUTURE: you can also test erddapVersion vs specific standardizeWhat features.
- // e.g., If standardizeWhat & 4096 == 4096 and the standardize units system changed in 1.88,
- // And erddapVersion<1.88, then throw error to force re-read all files.
+ // FUTURE: you can also test erddapVersion vs specific standardizeWhat features.
+ // e.g., If standardizeWhat & 4096 == 4096 and the standardize units system
+ // changed in 1.88,
+ // And erddapVersion<1.88, then throw error to force re-read all files.
String tErddapVersion = table.globalAttributes().getString("erddapVersion");
if (tErddapVersion == null)
tErddapVersion = "1.82";
@@ -2884,46 +3160,47 @@ protected Table tryToLoadDirFileTable(String fileName) throws Throwable {
return table;
} else {
- if (verbose) String2.log("dir/file table doesn't exist: " + fileName);
+ if (verbose)
+ String2.log("dir/file table doesn't exist: " + fileName);
return null;
}
} catch (Throwable t) {
String msg = MustBe.throwableToString(t);
- String2.log(String2.ERROR + " reading dir/file table " + fileName + "\n" +
- msg);
+ String2.log(String2.ERROR + " reading dir/file table " + fileName + "\n" +
+ msg);
- //serious problem?
+ // serious problem?
if (Thread.currentThread().isInterrupted() ||
- t instanceof InterruptedException ||
- msg.indexOf(Math2.TooManyOpenFiles) >= 0 ||
- msg.toLowerCase().indexOf(Math2.memory) >= 0)
- throw t;
+ t instanceof InterruptedException ||
+ msg.indexOf(Math2.TooManyOpenFiles) >= 0 ||
+ msg.toLowerCase().indexOf(Math2.memory) >= 0)
+ throw t;
- //if minor problem
+ // if minor problem
File2.delete(datasetDir() + DIR_TABLE_FILENAME);
File2.delete(datasetDir() + FILE_TABLE_FILENAME);
return null;
}
}
- /**
+ /**
* This gets a table with the DNLS info all the files.
* lastMod is type=LongArray epochMillis. size is type=LongArray.
*
* @throws Throwable if trouble
*/
public Table getDnlsTable() throws Throwable {
- //get a copy of the source file information
- Table tDirTable = getDirTableCopy(); //not null, throws Throwable
- Table tFileTable = getFileTableCopy(); //not null, throws Throwable
+ // get a copy of the source file information
+ Table tDirTable = getDirTableCopy(); // not null, throws Throwable
+ Table tFileTable = getFileTableCopy(); // not null, throws Throwable
- //make the results Table
+ // make the results Table
Table dnlsTable = FileVisitorDNLS.makeEmptyTable();
dnlsTable.setColumn(0, tFileTable.getColumn(FT_DIR_INDEX_COL));
dnlsTable.setColumn(1, tFileTable.getColumn(FT_FILE_LIST_COL));
dnlsTable.setColumn(2, tFileTable.getColumn(FT_LAST_MOD_COL));
- dnlsTable.setColumn(3, tFileTable.getColumn(FT_SIZE_COL));
- //convert dir Index to dir names
+ dnlsTable.setColumn(3, tFileTable.getColumn(FT_SIZE_COL));
+ // convert dir Index to dir names
tDirTable.addColumn(0, "dirIndex", new IntArray(0, tDirTable.nRows() - 1));
dnlsTable.join(1, 0, "", tDirTable);
dnlsTable.removeColumn(0);
@@ -2931,20 +3208,23 @@ public Table getDnlsTable() throws Throwable {
return dnlsTable;
}
- /**
- * This returns a fileTable
+ /**
+ * This returns a fileTable
* with valid files (or null if unavailable or any trouble).
* This is a copy of any internal data, so client can modify the contents.
*
* @param language the index of the selected language
- * @param nextPath is the partial path (with trailing slash) to be appended
- * onto the local fileDir (or wherever files are, even url).
+ * @param nextPath is the partial path (with trailing slash) to be appended
+ * onto the local fileDir (or wherever files are, even url).
* @return null if trouble,
- * or Object[3] where
- * [0] is a sorted table with file "Name" (String), "Last modified" (long millis),
- * "Size" (long), and "Description" (String, but usually no content),
- * [1] is a sorted String[] with the short names of directories that are 1 level lower, and
- * [2] is the local directory corresponding to this (or null, if not a local dir).
+ * or Object[3] where
+ * [0] is a sorted table with file "Name" (String), "Last modified"
+ * (long millis),
+ * "Size" (long), and "Description" (String, but usually no content),
+ * [1] is a sorted String[] with the short names of directories that are
+ * 1 level lower, and
+ * [2] is the local directory corresponding to this (or null, if not a
+ * local dir).
*/
public Object[] accessibleViaFilesFileTable(int language, String nextPath) {
if (!accessibleViaFiles)
@@ -2952,10 +3232,11 @@ public Object[] accessibleViaFilesFileTable(int language, String nextPath) {
try {
Table dnlsTable = getDnlsTable();
- //remove files other than fileDir+nextPath and generate array of immediate subDir names
+ // remove files other than fileDir+nextPath and generate array of immediate
+ // subDir names
String subDirs[] = FileVisitorDNLS.reduceDnlsTableToOneDir(dnlsTable, fileDir + nextPath);
accessibleViaFilesMakeReadyForUser(dnlsTable);
- return new Object[]{dnlsTable, subDirs, fileDir + nextPath};
+ return new Object[] { dnlsTable, subDirs, fileDir + nextPath };
} catch (Throwable t) {
String2.log(MustBe.throwableToString(t));
@@ -2964,62 +3245,67 @@ public Object[] accessibleViaFilesFileTable(int language, String nextPath) {
}
/**
- * This converts a relativeFileName into a full localFileName (which may be a url).
+ * This converts a relativeFileName into a full localFileName (which may be a
+ * url).
*
- * @param language the index of the selected language
+ * @param language the index of the selected language
* @param relativeFileName (for most EDDTypes, just offset by fileDir)
* @return full localFileName or null if any error (including, file isn't in
- * list of valid files for this dataset)
+ * list of valid files for this dataset)
*/
public String accessibleViaFilesGetLocal(int language, String relativeFileName) {
- //identical code in EDDGridFromFiles and EDDTableFromFiles
+ // identical code in EDDGridFromFiles and EDDTableFromFiles
if (!accessibleViaFiles)
- return null;
+ return null;
String msg = datasetID() + " accessibleViaFilesGetLocal(" + relativeFileName + "): ";
try {
String fullName = fileDir + relativeFileName;
- String localDir = File2.getDirectory(fullName);
+ String localDir = File2.getDirectory(fullName);
String nameAndExt = File2.getNameAndExtension(fullName);
- //ensure that fullName is in file list
+ // ensure that fullName is in file list
- //get dir index
- Table dirTable = getDirTable(); //no need to get copy since not changing it
- Table fileTable = getFileTable(); //no need to get copy since not changing it
- PrimitiveArray dirNames = dirTable.getColumn(0); //the only column
+ // get dir index
+ Table dirTable = getDirTable(); // no need to get copy since not changing it
+ Table fileTable = getFileTable(); // no need to get copy since not changing it
+ PrimitiveArray dirNames = dirTable.getColumn(0); // the only column
int dirIndex = FileVisitorDNLS.indexOfDirectory(dirNames, localDir);
if (dirIndex < 0) {
String2.log(msg + "localDir=" + localDir + " not in dirTable.");
return null;
}
- //get file index
- ShortArray dirIndexCol = (ShortArray)fileTable.getColumn(FT_DIR_INDEX_COL);
- StringArray fileNameCol = (StringArray)fileTable.getColumn(FT_FILE_LIST_COL);
+ // get file index
+ ShortArray dirIndexCol = (ShortArray) fileTable.getColumn(FT_DIR_INDEX_COL);
+ StringArray fileNameCol = (StringArray) fileTable.getColumn(FT_FILE_LIST_COL);
int n = dirIndexCol.size();
for (int i = 0; i < n; i++) {
if (dirIndexCol.get(i) == dirIndex &&
- fileNameCol.get(i).equals(nameAndExt))
- return fullName; //it's a valid file in the fileTable
+ fileNameCol.get(i).equals(nameAndExt))
+ return fullName; // it's a valid file in the fileTable
}
- String2.log(msg + "fullName=" + localDir + " not in dirTable+fileTable.");
+ String2.log(msg + "fullName=" + localDir + " not in dirTable+fileTable.");
return null;
} catch (Throwable t) {
String2.log(msg + "\n" +
- MustBe.throwableToString(t));
+ MustBe.throwableToString(t));
return null;
}
}
/**
* This tests if 'old' is different from this in any way.
- *
This test is from the view of a subscriber who wants to know
- * when a dataset has changed in any way.
- *
So some things like onChange and reloadEveryNMinutes are not checked.
- *
This only lists the first change found.
+ *
+ * This test is from the view of a subscriber who wants to know
+ * when a dataset has changed in any way.
+ *
+ * So some things like onChange and reloadEveryNMinutes are not checked.
+ *
+ * This only lists the first change found.
*
- * EDDGrid overwrites this to also check the axis variables.
+ *
+ * EDDGrid overwrites this to also check the axis variables.
*
* @param old
* @return "" if same or message if not.
@@ -3035,456 +3321,539 @@ public String changed(EDD old) {
* Some subclasses overwrite this.
*
* @param recursive true if the file search should also search subdirectories
- * @return a table with columns with DIRECTORY, NAME, LASTMODIFIED, and SIZE columns;
+ * @return a table with columns with DIRECTORY, NAME, LASTMODIFIED, and SIZE
+ * columns;
* @throws Throwable if trouble
*/
- public Table getFileInfo(String fileDir, String fileNameRegex,
- boolean recursive, String pathRegex) throws Throwable {
- //String2.log("EDDTableFromFiles getFileInfo");
+ public Table getFileInfo(String fileDir, String fileNameRegex,
+ boolean recursive, String pathRegex) throws Throwable {
+ // String2.log("EDDTableFromFiles getFileInfo");
- //if temporary cache system active, make it look like all remote files are in local dir
+ // if temporary cache system active, make it look like all remote files are in
+ // local dir
if (cacheFromUrl != null && cacheMaxSizeB > 0) {
- Table table = FileVisitorDNLS.oneStepCache(cacheFromUrl, //throws IOException
- fileDir, fileNameRegex, recursive, pathRegex, false); //dirsToo
- if (table.nRows() == 0)
+ Table table = FileVisitorDNLS.oneStepCache(cacheFromUrl, // throws IOException
+ fileDir, fileNameRegex, recursive, pathRegex, false); // dirsToo
+ if (table.nRows() == 0)
throw new Exception("No matching files at " + cacheFromUrl);
return table;
}
-
- return FileVisitorDNLS.oneStep( //throws IOException if "Too many open files"
- fileDir, fileNameRegex, recursive, pathRegex, false); //dirsToo
+
+ return FileVisitorDNLS.oneStep( // throws IOException if "Too many open files"
+ fileDir, fileNameRegex, recursive, pathRegex, false); // dirsToo
}
/**
* This is the low level method to get source data from one file.
* This is only called by getSourceDataFromFile();
*
- *
This is used by the constructor to get all of the data from each file.
- * So it is good if this also tests the validity of the file and throws
+ *
+ * This is used by the constructor to get all of the data from each file.
+ * So it is good if this also tests the validity of the file and throws
* exception if not valid.
*
- *
Constraints are specified by 2 systems:
- *
1) ...Sorted - the old simple system
- *
2) sourceConVars, sourceConOps, sourceConValues - a comprehensive system
- *
Each subclass can use either, both, or neither.
+ *
+ * Constraints are specified by 2 systems:
+ *
+ * 1) ...Sorted - the old simple system
+ *
+ * 2) sourceConVars, sourceConOps, sourceConValues - a comprehensive system
+ *
+ * Each subclass can use either, both, or neither.
*
* @param tFileDir
* @param tFileName
* @param sourceDataNames the names of the desired source columns.
- * All constraintVariables (except columnNameForExtract) will be included in this list.
- * !!!This will not include columnNameForExtract.
- * @param sourceDataTypes the data types of the desired source columns
- * (e.g., "String" or "float"). "boolean" indicates data should be
- * interpreted as boolean, but stored in the response table as bytes.
- * @param sortedSpacing
- * -1: this method will assume nothing about sorted-ness of sortColumn.
- * 0: this method will assume sortColumn is sorted ascending
- * positive: this method will assume sortColumn is sorted ascending,
- * evenly spaced by this increment.
- * @param minSorted the minimum desired value for the sortedColumn
- * (use NaN if no limit) (ignored if no sortedColumn).
- *
Subclasses can ignore this and get all of the data if they need to.
- *
With respect to scale_factor and add_offset, this is a source value.
- *
For time, this is the source time, not epochSeconds.
- * @param maxSorted the maximum desired value for the sortedColumn
- * (use NaN if no limit) (ignored if no sortedColumn).
- *
Subclasses can ignore this and get all of the data if they need to.
- *
If minSorted is non-NaN, maxSorted will be non-NaN.
- *
With respect to scale_factor and add_offset, this is a source value.
- *
For time, this is the source time, not epochSeconds.
- * @param sourceConVars the source constraint variables. May be null or size=0.
- * CONSTRAIN_YES constraints MUST be handled here.
- * The CONSTRAIN_NO and CONSTRAIN_PARTIAL constraints may be handled
- * here: these constraints are requests here (if convenient for the subclass)
- * not obligations.
- * @param sourceConOps the source constraint operators.
- * regex is always PrimitiveArray.REGEX_OP, not sourceCanConstrainStringRegex.
+ * All constraintVariables (except columnNameForExtract)
+ * will be included in this list.
+ * !!!This will not include columnNameForExtract.
+ * @param sourceDataTypes the data types of the desired source columns
+ * (e.g., "String" or "float"). "boolean" indicates data
+ * should be
+ * interpreted as boolean, but stored in the response
+ * table as bytes.
+ * @param sortedSpacing
+ * -1: this method will assume nothing about sorted-ness
+ * of sortColumn.
+ * 0: this method will assume sortColumn is sorted
+ * ascending
+ * positive: this method will assume sortColumn is sorted
+ * ascending,
+ * evenly spaced by this increment.
+ * @param minSorted the minimum desired value for the sortedColumn
+ * (use NaN if no limit) (ignored if no sortedColumn).
+ *
+ * Subclasses can ignore this and get all of the data if
+ * they need to.
+ *
+ * With respect to scale_factor and add_offset, this is a
+ * source value.
+ *
+ * For time, this is the source time, not epochSeconds.
+ * @param maxSorted the maximum desired value for the sortedColumn
+ * (use NaN if no limit) (ignored if no sortedColumn).
+ *
+ * Subclasses can ignore this and get all of the data if
+ * they need to.
+ *
+ * If minSorted is non-NaN, maxSorted will be non-NaN.
+ *
+ * With respect to scale_factor and add_offset, this is a
+ * source value.
+ *
+ * For time, this is the source time, not epochSeconds.
+ * @param sourceConVars the source constraint variables. May be null or
+ * size=0.
+ * CONSTRAIN_YES constraints MUST be handled here.
+ * The CONSTRAIN_NO and CONSTRAIN_PARTIAL constraints may
+ * be handled
+ * here: these constraints are requests here (if
+ * convenient for the subclass)
+ * not obligations.
+ * @param sourceConOps the source constraint operators.
+ * regex is always PrimitiveArray.REGEX_OP, not
+ * sourceCanConstrainStringRegex.
* @param sourceConValues the source constraint values.
- * timeStamp constraints are numeric source values.
- * If a timeStamp has String source values or timeStamp op is regex, the constraint has been removed.
- * @param getMetadata if true, this must get global and variable metadata, too.
- * @param mustGetData if true, the caller must get the actual data;
- * otherwise it may (optional!) just return the global atts, the columns (with 0 rows),
- * and column atts.
+ * timeStamp constraints are numeric source values.
+ * If a timeStamp has String source values or timeStamp
+ * op is regex, the constraint has been removed.
+ * @param getMetadata if true, this must get global and variable metadata,
+ * too.
+ * @param mustGetData if true, the caller must get the actual data;
+ * otherwise it may (optional!) just return the global
+ * atts, the columns (with 0 rows),
+ * and column atts.
* @return a table with the results (with the requested sourceDataTypes).
- *
It may have more or fewer columns than sourceDataNames.
- *
These are raw source results: scale_factor and add_offset will not yet have been applied.
- *
If there is no matching data, it is best to return an empty table, not throw Throwable.
+ *
+ * It may have more or fewer columns than sourceDataNames.
+ *
+ * These are raw source results: scale_factor and add_offset will not
+ * yet have been applied.
+ *
+ * If there is no matching data, it is best to return an empty table,
+ * not throw Throwable.
* @throws Throwable if trouble.
- * If the file doesn't have a sourceDataName, it isn't an error -- it returns a column of mv's.
- * If there is trouble, this doesn't call addBadFile or requestReloadASAP().
- * This throws an exception if too much data.
- * This won't throw an exception if no data.
+ * If the file doesn't have a sourceDataName, it isn't an
+ * error -- it returns a column of mv's.
+ * If there is trouble, this doesn't call addBadFile or
+ * requestReloadASAP().
+ * This throws an exception if too much data.
+ * This won't throw an exception if no data.
*/
- public abstract Table lowGetSourceDataFromFile(String tFileDir, String tFileName,
- StringArray sourceDataNames, String sourceDataTypes[],
- double sortedSpacing, double minSorted, double maxSorted,
- StringArray sourceConVars, StringArray sourceConOps, StringArray sourceConValues,
- boolean getMetadata, boolean mustGetData) throws Throwable;
-
-
- /**
- * This parent method for lowGetSourceDataFromFile
- * handles global: sourceDataNames specially (to convert
- * a file's global metadata to be a data column).
- * See lowGetSourceDataFromFile params.
- *
- * @param sourceDataTypes e.g., "float", "String", "boolean"
- * indicates the data should be interpreted as a boolean, but stored as a byte.
- * @throws an exception if too much data and other problems.
- * This won't (shouldn't) throw an exception if no data.
- */
- public Table getSourceDataFromFile(String tFileDir, String tFileName,
- StringArray sourceDataNames, String sourceDataTypes[],
- double sortedSpacing, double minSorted, double maxSorted,
- StringArray sourceConVars, StringArray sourceConOps, StringArray sourceConValues,
- boolean getMetadata, boolean mustGetData) throws Throwable {
-
- //grab any "global:..." and "variable:..." sourceDataNames
- int nSourceDataNames = sourceDataNames.size();
- HashSet sourceNamesSet = new HashSet();
- HashSet needOtherSourceNames = new HashSet();
- StringArray sourceNames = new StringArray(); //subset with true sourceNames (actual vars)
- StringArray sourceTypes = new StringArray();
- String columnNameForExtractType = null;
+ public abstract Table lowGetSourceDataFromFile(String tFileDir, String tFileName,
+ StringArray sourceDataNames, String sourceDataTypes[],
+ double sortedSpacing, double minSorted, double maxSorted,
+ StringArray sourceConVars, StringArray sourceConOps, StringArray sourceConValues,
+ boolean getMetadata, boolean mustGetData) throws Throwable;
+
+ private class SourceDataInfo {
+ String columnNameForExtractType = null;
StringArray globalNames = null;
StringArray globalTypes = null;
- StringArray variableNames = null;
+ StringArray variableNames = null;
StringArray variableAttNames = null;
- StringArray variableTypes = null;
- StringArray fileNameNames = null;
- StringArray fileNameTypes = null;
- StringArray fileNameRegexes = null;
- IntArray fileNameCGs = null;
- StringArray pathNameNames = null;
- StringArray pathNameTypes = null;
- StringArray pathNameRegexes = null;
- IntArray pathNameCGs = null;
- StringArray scriptNames = null;
- StringArray scriptTypes = null;
+ StringArray variableTypes = null;
+ StringArray fileNameNames = null;
+ StringArray fileNameTypes = null;
+ StringArray fileNameRegexes = null;
+ IntArray fileNameCGs = null;
+ StringArray pathNameNames = null;
+ StringArray pathNameTypes = null;
+ StringArray pathNameRegexes = null;
+ IntArray pathNameCGs = null;
+ StringArray scriptNames = null;
+ StringArray scriptTypes = null;
+ }
+ private void processSourceNames(StringArray sourceDataNames, String sourceDataTypes[],
+ StringArray sourceNames, StringArray sourceTypes, SourceDataInfo sourceInfo,
+ HashSet needOtherSourceNames, HashSet sourceNamesSet) {
+ // grab any "global:..." and "variable:..." sourceDataNames
+ int nSourceDataNames = sourceDataNames.size();
for (int i = 0; i < nSourceDataNames; i++) {
String name = sourceDataNames.get(i);
if (name.equals(columnNameForExtract)) {
- columnNameForExtractType = sourceDataTypes[i];
+ sourceInfo.columnNameForExtractType = sourceDataTypes[i];
} else if (name.startsWith("global:")) {
- //promote a global attribute
- if (globalNames == null) {
- globalNames = new StringArray();
- globalTypes = new StringArray();
+ // promote a global attribute
+ if (sourceInfo.globalNames == null) {
+ sourceInfo.globalNames = new StringArray();
+ sourceInfo.globalTypes = new StringArray();
}
- globalNames.add(name.substring(7));
- globalTypes.add(sourceDataTypes[i]);
+ sourceInfo.globalNames.add(name.substring(7));
+ sourceInfo.globalTypes.add(sourceDataTypes[i]);
} else if (name.startsWith("variable:")) {
- //promote a variable attribute
- if (variableNames == null) {
- variableNames = new StringArray();
- variableAttNames = new StringArray();
- variableTypes = new StringArray();
+ // promote a variable attribute
+ if (sourceInfo.variableNames == null) {
+ sourceInfo.variableNames = new StringArray();
+ sourceInfo.variableAttNames = new StringArray();
+ sourceInfo.variableTypes = new StringArray();
}
String s = name.substring(9);
int cpo = s.indexOf(':');
- if (cpo <= 0)
+ if (cpo <= 0) {
throw new SimpleException("datasets.xml error: " +
- "To convert variable metadata to data, sourceName should be " +
- "variable:[varName]:[attributeName]. " +
- "Invalid sourceName=" + name);
+ "To convert variable metadata to data, sourceName should be " +
+ "variable:[varName]:[attributeName]. " +
+ "Invalid sourceName=" + name);
+ }
String tVarName = s.substring(0, cpo);
- variableNames.add(tVarName);
- variableAttNames.add(s.substring(cpo + 1));
- variableTypes.add(sourceDataTypes[i]);
+ sourceInfo.variableNames.add(tVarName);
+ sourceInfo.variableAttNames.add(s.substring(cpo + 1));
+ sourceInfo.variableTypes.add(sourceDataTypes[i]);
needOtherSourceNames.add(tVarName);
} else if (name.startsWith("***fileName,")) {
- //grab content from the fileName
- if (fileNameNames == null) {
- fileNameNames = new StringArray();
- fileNameTypes = new StringArray();
- fileNameRegexes = new StringArray();
- fileNameCGs = new IntArray();
+ // grab content from the fileName
+ if (sourceInfo.fileNameNames == null) {
+ sourceInfo.fileNameNames = new StringArray();
+ sourceInfo.fileNameTypes = new StringArray();
+ sourceInfo.fileNameRegexes = new StringArray();
+ sourceInfo.fileNameCGs = new IntArray();
}
String csv[] = StringArray.arrayFromCSV(name.substring(12), ",");
- if (csv.length != 2)
+ if (csv.length != 2) {
throw new SimpleException("datasets.xml error: " +
- "To extract data from a fileName, sourceName should be " +
- "***fileName,[extractRegex],[captureGroupNumber] . " +
- "Invalid sourceName=" + name);
- fileNameNames.add(name);
- fileNameTypes.add(sourceDataTypes[i]);
- fileNameRegexes.add(csv[0]);
- fileNameCGs.add(String2.parseInt(csv[1]));
+ "To extract data from a fileName, sourceName should be " +
+ "***fileName,[extractRegex],[captureGroupNumber] . " +
+ "Invalid sourceName=" + name);
+ }
+ sourceInfo.fileNameNames.add(name);
+ sourceInfo.fileNameTypes.add(sourceDataTypes[i]);
+ sourceInfo.fileNameRegexes.add(csv[0]);
+ sourceInfo.fileNameCGs.add(String2.parseInt(csv[1]));
} else if (name.startsWith("***pathName,")) {
- //grab content from the pathName
- if (pathNameNames == null) {
- pathNameNames = new StringArray();
- pathNameTypes = new StringArray();
- pathNameRegexes = new StringArray();
- pathNameCGs = new IntArray();
+ // grab content from the pathName
+ if (sourceInfo.pathNameNames == null) {
+ sourceInfo.pathNameNames = new StringArray();
+ sourceInfo.pathNameTypes = new StringArray();
+ sourceInfo.pathNameRegexes = new StringArray();
+ sourceInfo.pathNameCGs = new IntArray();
}
String csv[] = StringArray.arrayFromCSV(name.substring(12), ",");
- if (csv.length != 2)
+ if (csv.length != 2) {
throw new SimpleException("datasets.xml error: " +
- "To extract data from a pathName, sourceName should be " +
- "***pathName,[extractRegex],[captureGroupNumber] . " +
- "Invalid sourceName=" + name);
- pathNameNames.add(name);
- pathNameTypes.add(sourceDataTypes[i]);
- pathNameRegexes.add(csv[0]);
- pathNameCGs.add(String2.parseInt(csv[1]));
+ "To extract data from a pathName, sourceName should be " +
+ "***pathName,[extractRegex],[captureGroupNumber] . " +
+ "Invalid sourceName=" + name);
+ }
+ sourceInfo.pathNameNames.add(name);
+ sourceInfo.pathNameTypes.add(sourceDataTypes[i]);
+ sourceInfo.pathNameRegexes.add(csv[0]);
+ sourceInfo.pathNameCGs.add(String2.parseInt(csv[1]));
} else if (name.startsWith("=")) {
- //content comes from a script
- if (scriptNames == null) {
- scriptNames = new StringArray();
- scriptTypes = new StringArray();
+ // content comes from a script
+ if (sourceInfo.scriptNames == null) {
+ sourceInfo.scriptNames = new StringArray();
+ sourceInfo.scriptTypes = new StringArray();
}
- scriptNames.add(name);
- scriptTypes.add(sourceDataTypes[i]);
+ sourceInfo.scriptNames.add(name);
+ sourceInfo.scriptTypes.add(sourceDataTypes[i]);
- //later: ensure columns referenced in script are in sourceNamesSet
+ // later: ensure columns referenced in script are in sourceNamesSet
needOtherSourceNames.addAll(scriptNeedsColumns.get(name));
-
} else {
- //regular variable. Keep it.
- if (sourceNamesSet.add(name)) { //if not already present
+ // regular variable. Keep it.
+ if (sourceNamesSet.add(name)) { // if not already present
sourceNames.add(name);
sourceTypes.add(sourceDataTypes[i]);
}
}
}
+ }
+
+ /**
+ * This parent method for lowGetSourceDataFromFile
+ * handles global: sourceDataNames specially (to convert
+ * a file's global metadata to be a data column).
+ * See lowGetSourceDataFromFile params.
+ *
+ * @param sourceDataTypes e.g., "float", "String", "boolean"
+ * indicates the data should be interpreted as a boolean,
+ * but stored as a byte.
+ * @throws an exception if too much data and other problems.
+ * This won't (shouldn't) throw an exception if no data.
+ */
+ public Table getSourceDataFromFile(String tFileDir, String tFileName,
+ StringArray sourceDataNames, String sourceDataTypes[],
+ double sortedSpacing, double minSorted, double maxSorted,
+ StringArray sourceConVars, StringArray sourceConOps, StringArray sourceConValues,
+ boolean getMetadata, boolean mustGetData) throws Throwable {
+
+ HashSet sourceNamesSet = new HashSet<>();
+ HashSet needOtherSourceNames = new HashSet<>();
+ StringArray sourceNames = new StringArray(); // subset with true sourceNames (actual vars)
+ StringArray sourceTypes = new StringArray();
+ SourceDataInfo sourceInfo = new SourceDataInfo();
+
+ processSourceNames(sourceDataNames, sourceDataTypes, sourceNames, sourceTypes, sourceInfo, needOtherSourceNames,
+ sourceNamesSet);
+
+ if (needOtherSourceNames.size() > 0) {
+ StringArray secondPassNames = new StringArray();
+ StringArray secondPassTypes = new StringArray();
+ for (String osName : needOtherSourceNames) {
+ if (!sourceNamesSet.contains(osName)) { // if not already present
+ secondPassNames.add(osName);
+ secondPassTypes.add("String");
+ }
+ }
+ if (secondPassNames.size() > 0) {
+ needOtherSourceNames.clear();
+ processSourceNames(secondPassNames, secondPassTypes.toArray(), sourceNames, sourceTypes, sourceInfo,
+ needOtherSourceNames, sourceNamesSet);
+ }
+ }
- //?! remove sourceConVars, sourceConOps, sourceConValues that are used by
- //special variables. Not necessary, since no column in source will have the special name.
+ // ?! remove sourceConVars, sourceConOps, sourceConValues that are used by
+ // special variables. Not necessary, since no column in source will have the
+ // special name.
- //then ensure needOtherSourceNames are in sourceNames
+ // third pass, ensure needOtherSourceNames are in sourceNames
+ // This assumes all source names in this "third" pass are actually in the
+ // source.
for (String osName : needOtherSourceNames) {
- if (sourceNamesSet.add(osName)) { //if not already present
+ if (sourceNamesSet.add(osName)) { // if not already present
sourceNames.add(osName);
- //initially treat all other source vars as Strings
- // for variable: it is irrelevant
- // for script: the e.g., row.columnInt("osName") will specify the data type. String works for all types.
- sourceTypes.add("String");
+ // initially treat all other source vars as Strings
+ // for variable: it is irrelevant
+ // for script: the e.g., row.columnInt("osName") will specify the data type.
+ // String works for all types.
+ sourceTypes.add("String");
}
- }
+ }
sourceDataTypes = sourceTypes.toArray();
- //if (debugMode) String2.log(">> revised requested sourceNames=" + sourceNames.toString());
+ // if (debugMode) String2.log(">> revised requested sourceNames=" +
+ // sourceNames.toString());
Table table;
int nRows;
- if (sourceNames.size() == 0 && globalNames == null) { //if globalNames!=null, we need global atts
- //we don't need anything from the file, just special variables added below.
+ if (sourceNames.size() == 0 && sourceInfo.globalNames == null) { // if globalNames!=null, we need global atts
+ // we don't need anything from the file, just special variables added below.
table = new Table();
- nRows = 1; //so one row of special values will be added below
+ nRows = 1; // so one row of special values will be added below
if (reallyVerbose)
String2.log(" Skip reading file because just need special variables.");
} else {
- //if using temporary cache system, ensure file is in cache
- ensureInCache(tFileDir + tFileName); //throws Exception
-
- //get the data
- table = lowGetSourceDataFromFile( //this is the only place that calls this method
- tFileDir, tFileName,
- sourceNames, sourceDataTypes, //the revised list
- sortedSpacing, minSorted, maxSorted,
- //It's okay that sourceConVars may include variable:[varName]:[attName]
- //or =[script] because no source var with that name in the file.
- //If this leads to trouble, remove them above.
- sourceConVars, sourceConOps, sourceConValues,
- getMetadata || globalNames != null || variableNames != null,
- mustGetData);
- nRows = table.nRows(); //may be 0 if mustGetData=false
- //if (debugMode) String2.log(table.getNCHeader("row"));
+ // if using temporary cache system, ensure file is in cache
+ ensureInCache(tFileDir + tFileName); // throws Exception
+
+ // get the data
+ table = lowGetSourceDataFromFile( // this is the only place that calls this method
+ tFileDir, tFileName,
+ sourceNames, sourceDataTypes, // the revised list
+ sortedSpacing, minSorted, maxSorted,
+ // It's okay that sourceConVars may include variable:[varName]:[attName]
+ // or =[script] because no source var with that name in the file.
+ // If this leads to trouble, remove them above.
+ sourceConVars, sourceConOps, sourceConValues,
+ getMetadata || sourceInfo.globalNames != null || sourceInfo.variableNames != null,
+ mustGetData);
+ nRows = table.nRows(); // may be 0 if mustGetData=false
+ // if (debugMode) String2.log(table.getNCHeader("row"));
}
- //columnNameForExtract
- if (columnNameForExtractType != null) {
+ // columnNameForExtract
+ if (sourceInfo.columnNameForExtractType != null) {
String value = extractFromFileName(tFileName);
PrimitiveArray pa = PrimitiveArray.factory(
- PAType.fromCohortString(columnNameForExtractType), nRows, value);
+ PAType.fromCohortString(sourceInfo.columnNameForExtractType), nRows, value);
table.addColumn(columnNameForExtract, pa);
}
- //convert global: metadata to be data columns
- if (globalNames != null) {
+ // convert global: metadata to be data columns
+ if (sourceInfo.globalNames != null) {
Attributes globalAtts = table.globalAttributes();
- int nGlobalNames = globalNames.size();
+ int nGlobalNames = sourceInfo.globalNames.size();
for (int gni = 0; gni < nGlobalNames; gni++) {
- PrimitiveArray pa = globalAtts.remove(globalNames.get(gni));
+ PrimitiveArray pa = globalAtts.remove(sourceInfo.globalNames.get(gni));
if (pa != null && pa.size() > 0) {
- //make pa size=1
- if (pa.size() > 1)
- pa.removeRange(1, pa.size()); //just the first value
+ // make pa size=1
+ if (pa.size() > 1)
+ pa.removeRange(1, pa.size()); // just the first value
- //force column to be specified type
+ // force column to be specified type
PrimitiveArray newPa = PrimitiveArray.factory(
- PAType.fromCohortString(globalTypes.get(gni)), 1, false);
+ PAType.fromCohortString(sourceInfo.globalTypes.get(gni)), 1, false);
newPa.append(pa);
pa = newPa;
- //duplicate the value
- if (nRows == 0) { //e.g., when just getting metadata
- pa.clear();
+ int count = nRows - 1;
+ if (nRows == 0) {
+ count = 1;
+ }
+ // duplicate the value
+ if (nRows == 0 && !mustGetData) {
+ // e.g., when just getting metadata
+ pa.clear();
} else if (pa instanceof StringArray) {
String ts = pa.getString(0);
- pa.addNStrings(nRows - 1, ts == null? "" : ts);
+ pa.addNStrings(count, ts == null ? "" : ts);
} else {
- pa.addNDoubles(nRows - 1, pa.getDouble(0));
+ pa.addNDoubles(count, pa.getDouble(0));
}
- //add pa to the table
- table.addColumn("global:" + globalNames.get(gni), pa);
- } //If att not in results, just don't add to results table.
+ // add pa to the table
+ table.addColumn("global:" + sourceInfo.globalNames.get(gni), pa);
+ } // If att not in results, just don't add to results table.
}
}
- //convert variable: metadata to be data columns
- if (variableNames != null) {
- int nVariableNames = variableNames.size();
+ // convert variable: metadata to be data columns
+ if (sourceInfo.variableNames != null) {
+ int nVariableNames = sourceInfo.variableNames.size();
for (int vni = 0; vni < nVariableNames; vni++) {
- int col = table.findColumnNumber(variableNames.get(vni));
+ int col = table.findColumnNumber(sourceInfo.variableNames.get(vni));
if (col >= 0) {
- //var is in file. Try to get attribute
- PrimitiveArray pa = table.columnAttributes(col).get(variableAttNames.get(vni));
+ // var is in file. Try to get attribute
+ PrimitiveArray pa = table.columnAttributes(col).get(sourceInfo.variableAttNames.get(vni));
if (pa != null && pa.size() > 0) {
- pa = PrimitiveArray.factory(PAType.fromCohortString(variableTypes.get(vni)), pa);
+ pa = PrimitiveArray.factory(PAType.fromCohortString(sourceInfo.variableTypes.get(vni)), pa);
- //make pa size=1
+ // make pa size=1
if (pa.size() > 1) {
if (pa instanceof StringArray) {
- String ts = pa.toString(); //eg actual_range as stringArray -> "0.0, 94.0"
+ String ts = pa.toString(); // eg actual_range as stringArray -> "0.0, 94.0"
pa.setString(0, ts);
}
- pa.removeRange(1, pa.size()); //just the first value
+ pa.removeRange(1, pa.size()); // just the first value
}
- //duplicate the value
+ // duplicate the value
if (nRows == 0) {
pa.clear();
} else {
if (pa instanceof StringArray) {
String ts = pa.getString(0);
- pa.addNStrings(nRows - 1, ts == null? "" : ts);
+ pa.addNStrings(nRows - 1, ts == null ? "" : ts);
} else {
pa.addNDoubles(nRows - 1, pa.getDouble(0));
}
}
- //add pa to the table
- table.addColumn("variable:" + variableNames.get(vni) +
- ":" + variableAttNames.get(vni),
- pa);
+ // add pa to the table
+ table.addColumn("variable:" + sourceInfo.variableNames.get(vni) +
+ ":" + sourceInfo.variableAttNames.get(vni),
+ pa);
}
} else {
- if (reallyVerbose) String2.log("WARNING: extract varName=" + variableNames.get(vni) + " not in table.");
+ if (reallyVerbose)
+ String2.log("WARNING: extract varName=" + sourceInfo.variableNames.get(vni) + " not in table.");
}
- //If var or att not in results, just don't add to results table.
+ // If var or att not in results, just don't add to results table.
}
}
- //convert "***fileName," extract into data column
- if (fileNameNames != null) {
- int nFileNameNames = fileNameNames.size();
+ // convert "***fileName," extract into data column
+ if (sourceInfo.fileNameNames != null) {
+ int nFileNameNames = sourceInfo.fileNameNames.size();
for (int fni = 0; fni < nFileNameNames; fni++) {
- Matcher matcher = Pattern.compile(fileNameRegexes.get(fni)).matcher(tFileName);
- if (matcher.matches()) {
- String val = matcher.group(fileNameCGs.get(fni));
+ Matcher matcher = Pattern.compile(sourceInfo.fileNameRegexes.get(fni)).matcher(tFileName);
+ if (matcher.matches()) {
+ String val = matcher.group(sourceInfo.fileNameCGs.get(fni));
PrimitiveArray newPa = PrimitiveArray.factory(
- PAType.fromCohortString(fileNameTypes.get(fni)),
- nRows, val);
- table.addColumn(fileNameNames.get(fni), newPa);
- } //if no match, just don't add to results table.
+ PAType.fromCohortString(
+ sourceInfo.fileNameTypes.get(fni)),
+ nRows, val);
+ table.addColumn(sourceInfo.fileNameNames.get(fni), newPa);
+ } // if no match, just don't add to results table.
}
}
- //convert "***pathName," extract into data column
- if (pathNameNames != null) {
- int nPathNameNames = pathNameNames.size();
+ // convert "***pathName," extract into data column
+ if (sourceInfo.pathNameNames != null) {
+ int nPathNameNames = sourceInfo.pathNameNames.size();
for (int fni = 0; fni < nPathNameNames; fni++) {
- Matcher matcher = Pattern.compile(pathNameRegexes.get(fni)).matcher(tFileDir + tFileName);
- if (matcher.matches()) {
- String val = matcher.group(pathNameCGs.get(fni));
+ Matcher matcher = Pattern.compile(sourceInfo.pathNameRegexes.get(fni)).matcher(tFileDir + tFileName);
+ if (matcher.matches()) {
+ String val = matcher.group(sourceInfo.pathNameCGs.get(fni));
PrimitiveArray newPa = PrimitiveArray.factory(
- PAType.fromCohortString(pathNameTypes.get(fni)),
- nRows, val);
- table.addColumn(pathNameNames.get(fni), newPa);
- } //if no match, just don't add to results table.
+ PAType.fromCohortString(
+ sourceInfo.pathNameTypes.get(fni)),
+ nRows, val);
+ table.addColumn(sourceInfo.pathNameNames.get(fni), newPa);
+ } // if no match, just don't add to results table.
}
}
- //convert script columns into data columns
- if (scriptNames != null)
- convertScriptColumnsToDataColumns(tFileDir + tFileName, table,
- scriptNames, scriptTypes, scriptNeedsColumns);
+ // convert script columns into data columns
+ if (sourceInfo.scriptNames != null)
+ convertScriptColumnsToDataColumns(tFileDir + tFileName, table,
+ sourceInfo.scriptNames, sourceInfo.scriptTypes, scriptNeedsColumns);
return table;
}
- /**
- * This gets the data (chunk by chunk) from this EDDTable for the
- * OPeNDAP DAP-style query and writes it to the TableWriter.
+ /**
+ * This gets the data (chunk by chunk) from this EDDTable for the
+ * OPeNDAP DAP-style query and writes it to the TableWriter.
* See the EDDTable method documentation.
*
- * @param language the index of the selected language
- * @param loggedInAs the user's login name if logged in (or null if not logged in).
- * @param requestUrl the part of the user's request, after EDStatic.baseUrl, before '?'.
- * @param userDapQuery the part of the user's request after the '?', still percentEncoded, may be null.
+ * @param language the index of the selected language
+ * @param loggedInAs the user's login name if logged in (or null if not logged
+ * in).
+ * @param requestUrl the part of the user's request, after EDStatic.baseUrl,
+ * before '?'.
+ * @param userDapQuery the part of the user's request after the '?', still
+ * percentEncoded, may be null.
* @param tableWriter
* @throws Throwable if trouble (notably, WaitThenTryAgainException)
*/
- public void getDataForDapQuery(int language, String loggedInAs, String requestUrl,
- String userDapQuery, TableWriter tableWriter) throws Throwable {
-
- //get the sourceDapQuery (a query that the source can handle)
+ public void getDataForDapQuery(int language, String loggedInAs, String requestUrl,
+ String userDapQuery, TableWriter tableWriter) throws Throwable {
+
+ // get the sourceDapQuery (a query that the source can handle)
StringArray resultsVariables = new StringArray();
- //constraints are sourceVars Ops Values
- StringArray conVars = new StringArray();
- StringArray conOps = new StringArray();
- StringArray conValues = new StringArray();
+ // constraints are sourceVars Ops Values
+ StringArray conVars = new StringArray();
+ StringArray conOps = new StringArray();
+ StringArray conValues = new StringArray();
getSourceQueryFromDapQuery(language, userDapQuery,
- resultsVariables, //sourceNames
- conVars, conOps, conValues); //timeStamp constraints other than regex are epochSeconds
- if (reallyVerbose) String2.log("getDataForDapQuery sourceQuery=" +
- formatAsDapQuery(resultsVariables.toArray(),
- conVars.toArray(), conOps.toArray(), conValues.toArray()));
+ resultsVariables, // sourceNames
+ conVars, conOps, conValues); // timeStamp constraints other than regex are epochSeconds
+ if (reallyVerbose)
+ String2.log("getDataForDapQuery sourceQuery=" +
+ formatAsDapQuery(resultsVariables.toArray(),
+ conVars.toArray(), conOps.toArray(), conValues.toArray()));
boolean isFromHttpGet = "EDDTableFromHttpGet".equals(className);
- //get a local reference to dirTable and fileTable
- Table tDirTable = getDirTable();
+ // get a local reference to dirTable and fileTable
+ Table tDirTable = getDirTable();
Table tFileTable = getFileTable();
- StringArray dirList = (StringArray)tDirTable.getColumn(0);
- ShortArray ftDirIndex = (ShortArray)tFileTable.getColumn(0);
- StringArray ftFileList = (StringArray)tFileTable.getColumn(1);
- LongArray ftLastMod = (LongArray)tFileTable.getColumn(2);
- LongArray ftSize = (LongArray)tFileTable.getColumn(3);
- DoubleArray ftSortedSpacing = (DoubleArray)tFileTable.getColumn(4);
-
-
- //no need to further prune constraints.
- //minMaxTable and testing each file (below) deal with constraints.
- //sourceCanConstrainNumericData = CONSTRAIN_PARTIAL; //all partially handled
- //sourceCanConstrainStringData = CONSTRAIN_PARTIAL; //all partially handled
- //sourceCanConstrainStringRegex = PrimitiveArray.REGEX_OP; //partially
-
- //is request for just columnNameForExtract?
+ StringArray dirList = (StringArray) tDirTable.getColumn(0);
+ ShortArray ftDirIndex = (ShortArray) tFileTable.getColumn(0);
+ StringArray ftFileList = (StringArray) tFileTable.getColumn(1);
+ LongArray ftLastMod = (LongArray) tFileTable.getColumn(2);
+ LongArray ftSize = (LongArray) tFileTable.getColumn(3);
+ DoubleArray ftSortedSpacing = (DoubleArray) tFileTable.getColumn(4);
+
+ // no need to further prune constraints.
+ // minMaxTable and testing each file (below) deal with constraints.
+ // sourceCanConstrainNumericData = CONSTRAIN_PARTIAL; //all partially handled
+ // sourceCanConstrainStringData = CONSTRAIN_PARTIAL; //all partially handled
+ // sourceCanConstrainStringRegex = PrimitiveArray.REGEX_OP; //partially
+
+ // is request for just columnNameForExtract?
if (columnNameForExtract.length() > 0 &&
- resultsVariables.size() == 1 &&
- resultsVariables.get(0).equals(columnNameForExtract)) {
+ resultsVariables.size() == 1 &&
+ resultsVariables.get(0).equals(columnNameForExtract)) {
Table table = new Table();
- PrimitiveArray names = (StringArray)(tFileTable.getColumn(
- dv0 + extractedColNameIndex*3 + 0).clone());
- PrimitiveArray unique = names.makeIndices(new IntArray()); //it returns unique values, sorted
+ PrimitiveArray names = (StringArray) (tFileTable.getColumn(
+ dv0 + extractedColNameIndex * 3 + 0).clone());
+ PrimitiveArray unique = names.makeIndices(new IntArray()); // it returns unique values, sorted
table.addColumn(columnNameForExtract, unique);
- //standardizeResultsTable applies all constraints
- preStandardizeResultsTable(loggedInAs, table);
+ // standardizeResultsTable applies all constraints
+ preStandardizeResultsTable(loggedInAs, table);
standardizeResultsTable(language, requestUrl, userDapQuery, table);
tableWriter.writeAllAndFinish(table);
@@ -3492,40 +3861,45 @@ public void getDataForDapQuery(int language, String loggedInAs, String requestUr
return;
}
- //find dvi for each resultsVariable and make resultsTypes
- int dvi[] = new int[resultsVariables.size()]; //store var indexes in dataVariables
- String resultsTypes[] = new String[resultsVariables.size()];
- //String2.log("dataVariableSourceNames=" + String2.toCSSVString(dataVariableSourceNames()));
+ // find dvi for each resultsVariable and make resultsTypes
+ int dvi[] = new int[resultsVariables.size()]; // store var indexes in dataVariables
+ String resultsTypes[] = new String[resultsVariables.size()];
+ // String2.log("dataVariableSourceNames=" +
+ // String2.toCSSVString(dataVariableSourceNames()));
for (int rv = 0; rv < resultsVariables.size(); rv++) {
String sourceName = resultsVariables.get(rv);
dvi[rv] = String2.indexOf(dataVariableSourceNames(), sourceName);
EDV edv = dataVariables[dvi[rv]];
- resultsTypes[rv] = edv.isBoolean()? "boolean" : edv.sourceDataType();
- //String2.log("rv=" + rv + ": " + sourceName + " dv=" + dvi[rv] + " " + resultsTypes[rv]);
+ resultsTypes[rv] = edv.isBoolean() ? "boolean" : edv.sourceDataType();
+ // String2.log("rv=" + rv + ": " + sourceName + " dv=" + dvi[rv] + " " +
+ // resultsTypes[rv]);
}
- //find cdvi (index in dataVariables) for each conVar
+ // find cdvi (index in dataVariables) for each conVar
int nCon = conVars.size();
int cdvi[] = new int[nCon];
double conValuesD[] = new double[nCon];
for (int con = 0; con < nCon; con++) {
cdvi[con] = String2.indexOf(dataVariableSourceNames(), conVars.get(con));
conValuesD[con] = String2.parseDouble(conValues.get(con));
- //op may be PrimitiveArray.REGEX_OP (won't be sourceCanConstrainStringRegex)
+ // op may be PrimitiveArray.REGEX_OP (won't be sourceCanConstrainStringRegex)
}
- //String2.log(">> 1 conValuesD=" + String2.toCSSVString(conValuesD));
+ // String2.log(">> 1 conValuesD=" + String2.toCSSVString(conValuesD));
- //distinct? sometimes minMaxTable indicates there is only 1 value in the file
- String[] parts = Table.getDapQueryParts(userDapQuery); //decoded.
+ // distinct? sometimes minMaxTable indicates there is only 1 value in the file
+ String[] parts = Table.getDapQueryParts(userDapQuery); // decoded.
boolean distinct = String2.indexOf(parts, "distinct()") >= 0;
- if (reallyVerbose && distinct) String2.log(" query includes \"distinct()\"");
+ if (reallyVerbose && distinct)
+ String2.log(" query includes \"distinct()\"");
- //check if constraints can't be met by this dataset (by checking minMaxTable)
- //(this is just an expedient. checking each file below has same result, but slower.)
+ // check if constraints can't be met by this dataset (by checking minMaxTable)
+ // (this is just an expedient. checking each file below has same result, but
+ // slower.)
String reasonNotOk = null;
- //and make true sourceCon where timeStamp constraints are sourceValues (and not regex)
- StringArray sourceConVars = new StringArray();
- StringArray sourceConOps = new StringArray();
+ // and make true sourceCon where timeStamp constraints are sourceValues (and not
+ // regex)
+ StringArray sourceConVars = new StringArray();
+ StringArray sourceConOps = new StringArray();
StringArray sourceConValues = new StringArray();
for (int con = 0; con < nCon; con++) {
int dv = cdvi[con];
@@ -3533,129 +3907,136 @@ public void getDataForDapQuery(int language, String loggedInAs, String requestUr
boolean isTimeStamp = edv instanceof EDVTimeStamp;
PAType conEdvSourcePAType = edv.sourceDataPAType();
String tOp = conOps.get(con);
- //tValue initially: usually a source val, but time is epochSeconds
- String tValue = conValues.get(con);
- //String2.log(">> con=" + con + " sourcePAType=" + conEdvSourcePAType + " " + edv.destinationName() + tOp + tValue);
+ // tValue initially: usually a source val, but time is epochSeconds
+ String tValue = conValues.get(con);
+ // String2.log(">> con=" + con + " sourcePAType=" + conEdvSourcePAType + " " +
+ // edv.destinationName() + tOp + tValue);
- //it EDVTimeStamp, convert tValue epochSeconds into source time string
+ // it EDVTimeStamp, convert tValue epochSeconds into source time string
if (isTimeStamp && !tOp.equals(PrimitiveArray.REGEX_OP)) {
double epSec = conValuesD[con];
- //2018-07-05 removed: now, given updateEveryNMillis, assume that info about files is perfectly up-to-data
+ // 2018-07-05 removed: now, given updateEveryNMillis, assume that info about
+ // files is perfectly up-to-data
/*
- //when testing whole dataset, ignore any constraints for today+/-2 days
- double currentEpSec = System.currentTimeMillis() / 1000.0;
- //String2.log(">> currentEpSec=" + currentEpSec + " - constraintEpSec=" + epSec + " = diffDays=" + ((currentEpSec - epSec)/Calendar2.SECONDS_PER_DAY));
- if (!isFromHttpGet && //it is assumed to be perfectly precise and needs e.g., timestamp constraint
- Math.abs(epSec - currentEpSec) < 2 * Calendar2.SECONDS_PER_DAY)
- continue;
- */
-
- //convert any remaining time constraints to source time (includes scaleAddOffset)
- tValue = ((EDVTimeStamp)edv).epochSecondsToSourceTimeString(epSec);
- if (debugMode) String2.log(">>source var=" + conVars.get(con) + " constraint=" + tValue + " value=" + tValue);
+ * //when testing whole dataset, ignore any constraints for today+/-2 days
+ * double currentEpSec = System.currentTimeMillis() / 1000.0;
+ * //String2.log(">> currentEpSec=" + currentEpSec + " - constraintEpSec=" +
+ * epSec + " = diffDays=" + ((currentEpSec - epSec)/Calendar2.SECONDS_PER_DAY));
+ * if (!isFromHttpGet && //it is assumed to be perfectly precise and needs e.g.,
+ * timestamp constraint
+ * Math.abs(epSec - currentEpSec) < 2 * Calendar2.SECONDS_PER_DAY)
+ * continue;
+ */
+
+ // convert any remaining time constraints to source time (includes
+ // scaleAddOffset)
+ tValue = ((EDVTimeStamp) edv).epochSecondsToSourceTimeString(epSec);
+ if (debugMode)
+ String2.log(">>source var=" + conVars.get(con) + " constraint=" + tValue + " value=" + tValue);
}
if (tOp.equals(PrimitiveArray.REGEX_OP)) {
- //don't check regex vs minMaxTable. too simplistic.
+ // don't check regex vs minMaxTable. too simplistic.
} else if (conEdvSourcePAType == PAType.STRING) {
if (isTimeStamp &&
- (!((EDVTimeStamp)edv).sourceTimeFormat().toLowerCase().startsWith("yyyy") || //so string sort order is correct order
- tValue.equals(edv.safeStringMissingValue()))) {
- //don't check
- //because minMaxTable has simplistic/misleading min and max strings
- // because of string time format like MM/dd/yyyy
- //or because quick tests for mv are unreliable
+ (!((EDVTimeStamp) edv).sourceTimeFormat().toLowerCase().startsWith("yyyy") || // so string sort
+ // order is
+ // correct order
+ tValue.equals(edv.safeStringMissingValue()))) {
+ // don't check
+ // because minMaxTable has simplistic/misleading min and max strings
+ // because of string time format like MM/dd/yyyy
+ // or because quick tests for mv are unreliable
} else {
- String dsMin = minMaxTable.getStringData(dv, 0);
- String dsMax = minMaxTable.getStringData(dv, 1);
- int dsHasNaN = minMaxTable.getIntData( dv, 2);
+ String dsMin = minMaxTable.getStringData(dv, 0);
+ String dsMax = minMaxTable.getStringData(dv, 1);
+ int dsHasNaN = minMaxTable.getIntData(dv, 2);
if (!isOK(dsMin, dsMax, dsHasNaN, tOp, tValue)) {
reasonNotOk = "No data matches " +
- edv.destinationName() + tOp + String2.toJson(conValues.get(con), 65536) +
- " because the String variable's source min=" +
- String2.toJson(dsMin, 65536) +
- ", max=" + String2.toJson(dsMax, 65536) +
- ", and hasNaN=" + (dsHasNaN != 0) + ".";
- if (reallyVerbose) String2.log(reasonNotOk);
+ edv.destinationName() + tOp + String2.toJson(conValues.get(con), 65536) +
+ " because the String variable's source min=" +
+ String2.toJson(dsMin, 65536) +
+ ", max=" + String2.toJson(dsMax, 65536) +
+ ", and hasNaN=" + (dsHasNaN != 0) + ".";
+ if (reallyVerbose)
+ String2.log(reasonNotOk);
break;
}
}
} else {
- //numeric variables (and not PrimitiveArray.REGEX_OP)
- double dsMin = minMaxTable.getDoubleData(dv, 0); //a source value
- double dsMax = minMaxTable.getDoubleData(dv, 1);
- int dsHasNaN = minMaxTable.getIntData( dv, 2);
- double conValD = String2.parseDouble(conValues.get(con)); //if time, conValD is epochSeconds
- double tValueD = conEdvSourcePAType == PAType.CHAR?
- (tValue.length() == 0? Double.NaN : (double)tValue.charAt(0)) :
- String2.parseDouble(tValue); //if time, tValueD is a numeric source time
- if (!isOK(isTimeStamp? PAType.LONG : conEdvSourcePAType,
- dsMin, dsMax, dsHasNaN, tOp, tValueD)) {
- reasonNotOk =
- "No data matches " +
- edv.destinationName() + tOp +
- (edv instanceof EDVTimeStamp ets?
- Calendar2.epochSecondsToLimitedIsoStringT(
- ets.time_precision(), conValD, "NaN") :
- conValues.get(con)) +
- " because the numeric variable's source min=" +
- edv.destinationMinString() + //works well with numbers and numeric EDVTimeStamp
- ", max=" + edv.destinationMaxString() +
- ", and hasNaN=" + (dsHasNaN != 0) + "." +
- (debugMode? "\nconValD=" + conValD + " tValueD=" + tValueD +
+ // numeric variables (and not PrimitiveArray.REGEX_OP)
+ double dsMin = minMaxTable.getDoubleData(dv, 0); // a source value
+ double dsMax = minMaxTable.getDoubleData(dv, 1);
+ int dsHasNaN = minMaxTable.getIntData(dv, 2);
+ double conValD = String2.parseDouble(conValues.get(con)); // if time, conValD is epochSeconds
+ double tValueD = conEdvSourcePAType == PAType.CHAR
+ ? (tValue.length() == 0 ? Double.NaN : (double) tValue.charAt(0))
+ : String2.parseDouble(tValue); // if time, tValueD is a numeric source time
+ if (!isOK(isTimeStamp ? PAType.LONG : conEdvSourcePAType,
+ dsMin, dsMax, dsHasNaN, tOp, tValueD)) {
+ reasonNotOk = "No data matches " +
+ edv.destinationName() + tOp +
+ (edv instanceof EDVTimeStamp ets ? Calendar2.epochSecondsToLimitedIsoStringT(
+ ets.time_precision(), conValD, "NaN") : conValues.get(con))
+ +
+ " because the numeric variable's source min=" +
+ edv.destinationMinString() + // works well with numbers and numeric EDVTimeStamp
+ ", max=" + edv.destinationMaxString() +
+ ", and hasNaN=" + (dsHasNaN != 0) + "." +
+ (debugMode ? "\nconValD=" + conValD + " tValueD=" + tValueD +
" dsMin=" + dsMin + " " + dsMax : "");
- if (reallyVerbose) String2.log(reasonNotOk);
+ if (reallyVerbose)
+ String2.log(reasonNotOk);
break;
}
}
- //make true sourceCon (even time constraint values are source values)
+ // make true sourceCon (even time constraint values are source values)
boolean keepCon = false;
- boolean sourceIsNumeric =
- edv.sourceDataPAType() != PAType.STRING &&
- edv.sourceDataPAType() != PAType.CHAR;
-
- if (isTimeStamp && (standardizeWhat & (2 + 1024 + 2048)) != 0) {
- //Standardized times always appear as numeric.
- //In general, use of standardizeWhat implies that the source is variable,
- // so there is no generally applicable "source constraint".
- //So remove constraints for
- // 2 standardizes numeric times
- // 1024,2048 cause string times to appear as numeric times
+ boolean sourceIsNumeric = edv.sourceDataPAType() != PAType.STRING &&
+ edv.sourceDataPAType() != PAType.CHAR;
+
+ if (isTimeStamp && (standardizeWhat & (2 + 1024 + 2048)) != 0) {
+ // Standardized times always appear as numeric.
+ // In general, use of standardizeWhat implies that the source is variable,
+ // so there is no generally applicable "source constraint".
+ // So remove constraints for
+ // 2 standardizes numeric times
+ // 1024,2048 cause string times to appear as numeric times
keepCon = false;
} else if (sourceIsNumeric && (standardizeWhat & (1 + 256)) != 0) {
- //In general, use of standardizeWhat implies that the source is variable.
- //So the unpacked values or mv values will differ in defferent files.
- //So remove constraints for
- // 1 unpack numeric values
- // 256 find numeric mv
+ // In general, use of standardizeWhat implies that the source is variable.
+ // So the unpacked values or mv values will differ in defferent files.
+ // So remove constraints for
+ // 1 unpack numeric values
+ // 256 find numeric mv
keepCon = false;
} else if (!sourceIsNumeric && (standardizeWhat & (4 + 512)) != 0) {
- //In general, use of standardizeWhat implies that the source is variable.
- //So the unpacked values or mv values will differ in defferent files.
- //So remove constraints for
- // 4 applies string mv
- // 512 allows a wide variety of string missing values
+ // In general, use of standardizeWhat implies that the source is variable.
+ // So the unpacked values or mv values will differ in defferent files.
+ // So remove constraints for
+ // 4 applies string mv
+ // 512 allows a wide variety of string missing values
keepCon = false;
} else if (tOp.equals(PrimitiveArray.REGEX_OP)) {
keepCon = sourceCanConstrainStringRegex.length() > 0 &&
- edv.destValuesEqualSourceValues();
+ edv.destValuesEqualSourceValues();
} else if (isTimeStamp) {
- keepCon = ((EDVTimeStamp)edv).sourceTimeIsNumeric(); //just keep numeric time constraints
-
+ keepCon = ((EDVTimeStamp) edv).sourceTimeIsNumeric(); // just keep numeric time constraints
+
} else {
keepCon = true;
}
if (keepCon) {
- //keep this con
+ // keep this con
sourceConVars.add(conVars.get(con));
- sourceConOps.add(tOp); //regex is always PrimitiveArray.REGEX_OP
+ sourceConOps.add(tOp); // regex is always PrimitiveArray.REGEX_OP
sourceConValues.add(tValue);
}
}
@@ -3663,11 +4044,11 @@ public void getDataForDapQuery(int language, String loggedInAs, String requestUr
cumNNotRead += tFileTable.nRows();
throw new SimpleException(MustBe.THERE_IS_NO_DATA + " (" + reasonNotOk + ")");
}
- //String2.log(">> 1 sourceConValues=" + sourceConValues.toString());
+ // String2.log(">> 1 sourceConValues=" + sourceConValues.toString());
- //if dataset has sortedColumnName, look for min,max constraints for it.
- //if sortedDVI is time, min/maxSorted are source values.
- double minSorted = -Double.MAX_VALUE, maxSorted = Double.MAX_VALUE; //get everything
+ // if dataset has sortedColumnName, look for min,max constraints for it.
+ // if sortedDVI is time, min/maxSorted are source values.
+ double minSorted = -Double.MAX_VALUE, maxSorted = Double.MAX_VALUE; // get everything
if (sortedDVI >= 0) {
for (int con = 0; con < nCon; con++) {
if (conVars.get(con).equals(sortedColumnSourceName)) {
@@ -3678,22 +4059,25 @@ public void getDataForDapQuery(int language, String loggedInAs, String requestUr
continue;
double valD = String2.parseDouble(conValues.get(con));
- //convert time constraints from epochSeconds to source values
+ // convert time constraints from epochSeconds to source values
if (edv instanceof EDVTimeStamp) {
- EDVTimeStamp edvts = (EDVTimeStamp)dataVariables[sortedDVI];
+ EDVTimeStamp edvts = (EDVTimeStamp) dataVariables[sortedDVI];
if (!edvts.sourceTimeIsNumeric())
continue;
valD = edvts.epochSecondsToSourceTimeDouble(valD);
}
- //0"!=", 1REGEX_OP, 2"<=", 3">=", 4"=", 5"<", 6">"};
- //It is possible that multiple ops conflict (e.g., multiple < and/or =).
- //Do some things to deal with it.
- if (op.charAt(0) == '<') maxSorted = Math.min(maxSorted, valD); //lowest is most important
- else if (op.charAt(0) == '>') minSorted = Math.max(minSorted, valD); //highest is most important
+ // 0"!=", 1REGEX_OP, 2"<=", 3">=", 4"=", 5"<", 6">"};
+ // It is possible that multiple ops conflict (e.g., multiple < and/or =).
+ // Do some things to deal with it.
+ if (op.charAt(0) == '<')
+ maxSorted = Math.min(maxSorted, valD); // lowest is most important
+ else if (op.charAt(0) == '>')
+ minSorted = Math.max(minSorted, valD); // highest is most important
else if (op.charAt(0) == '=') {
- minSorted = valD; maxSorted = valD;
- break; //so that it is last found (trumps others)
+ minSorted = valD;
+ maxSorted = valD;
+ break; // so that it is last found (trumps others)
}
}
}
@@ -3702,17 +4086,18 @@ else if (op.charAt(0) == '=') {
minSorted = Double.NaN;
maxSorted = Double.NaN;
}
- if (reallyVerbose) String2.log("minSorted=" + minSorted + " maxSorted=" + maxSorted);
- //String2.log(">> 2 sourceConValues=" + sourceConValues.toString());
+ if (reallyVerbose)
+ String2.log("minSorted=" + minSorted + " maxSorted=" + maxSorted);
+ // String2.log(">> 2 sourceConValues=" + sourceConValues.toString());
- //go though files in tFileTable
+ // go though files in tFileTable
int nFiles = tFileTable.nRows();
Table distinctTable = null;
- int task = 0; //number for next task to be created
- long nNotRead = 0; //either don't have matching data or do ('distinct' and 1 value matches)
+ int task = 0; // number for next task to be created
+ long nNotRead = 0; // either don't have matching data or do ('distinct' and 1 value matches)
ResultStatisticsAccumulator accumulator = new ResultStatisticsAccumulator();
- int tnThreads = nThreads >= 1 && nThreads < Integer.MAX_VALUE? nThreads : EDStatic.nTableThreads;
- //reduce tnThreads based on memory available
+ int tnThreads = nThreads >= 1 && nThreads < Integer.MAX_VALUE ? nThreads : EDStatic.nTableThreads;
+ // reduce tnThreads based on memory available
tnThreads = adjustNThreads(tnThreads);
ThreadedWorkManager workManager = new ThreadedWorkManager<>(
tnThreads,
@@ -3722,26 +4107,26 @@ else if (op.charAt(0) == '=') {
} else {
accumulator.incrementMatch();
if (debugMode) {
- String2.log(">> task #" + (accumulator.getMatch() + accumulator.getNoMatch() -1) + " is writing to tableWriter.");
+ String2.log(">> task #" + (accumulator.getMatch() + accumulator.getNoMatch() - 1)
+ + " is writing to tableWriter.");
}
tableWriter.writeSome(result);
- //if exception, will be caught below
+ // if exception, will be caught below
if (tableWriter.noMoreDataPlease) {
throw new NoMoreDataPleaseException();
}
}
});
-
+
try {
- FILE_LOOP:
- for (int f = 0; f < nFiles; f++) {
- if (Thread.interrupted()) {
+ FILE_LOOP: for (int f = 0; f < nFiles; f++) {
+ if (Thread.interrupted()) {
if (workManager != null)
workManager.forceShutdown();
- throw new InterruptedException(); //consume the interrupted status
+ throw new InterruptedException(); // consume the interrupted status
}
- //can file be rejected based on constraints?
+ // can file be rejected based on constraints?
boolean ok = true;
for (int con = 0; con < nCon; con++) {
String op = conOps.get(con);
@@ -3749,110 +4134,114 @@ else if (op.charAt(0) == '=') {
EDV edv = dataVariables[dv];
PAType conEdvSourcePAType = edv.sourceDataPAType();
if (op.equals(PrimitiveArray.REGEX_OP)) {
- //only reject a file based on regex_op if file has just one String value
- //(since numbers may be further processed)
+ // only reject a file based on regex_op if file has just one String value
+ // (since numbers may be further processed)
if (conEdvSourcePAType == PAType.CHAR ||
- conEdvSourcePAType == PAType.STRING) {
- String tMin = tFileTable.getStringData(dv0 + dv*3 + 0, f);
- String tMax = tFileTable.getStringData(dv0 + dv*3 + 1, f);
- int tNaN = tFileTable.getIntData( dv0 + dv*3 + 2, f);
- if (//tMin.equals(tMax) && //this is tested by isOK for regex
- !isOK(tMin, tMax, tNaN, op, conValues.get(con))) {
- if (debugMode)
- String2.log("file " + f +
- " rejected because failed String test " +
- conVars.get(con) + op + String2.toJson(conValues.get(con), 256) +
- " when file min=" + String2.toJson(tMin, 256) +
- ", max=" + String2.toJson(tMax, 256) +
- ", hasNaN=" + (tNaN != 0) + ".");
+ conEdvSourcePAType == PAType.STRING) {
+ String tMin = tFileTable.getStringData(dv0 + dv * 3 + 0, f);
+ String tMax = tFileTable.getStringData(dv0 + dv * 3 + 1, f);
+ int tNaN = tFileTable.getIntData(dv0 + dv * 3 + 2, f);
+ if (// tMin.equals(tMax) && //this is tested by isOK for regex
+ !isOK(tMin, tMax, tNaN, op, conValues.get(con))) {
+ if (debugMode)
+ String2.log("file " + f +
+ " rejected because failed String test " +
+ conVars.get(con) + op + String2.toJson(conValues.get(con), 256) +
+ " when file min=" + String2.toJson(tMin, 256) +
+ ", max=" + String2.toJson(tMax, 256) +
+ ", hasNaN=" + (tNaN != 0) + ".");
ok = false;
break;
}
- } //else don't reject based on this constraint
-
+ } // else don't reject based on this constraint
} else if (edv instanceof EDVTimeStamp tdv) {
- //conValue is epochSeconds (not source time units), so convert fMin,fMax to epSeconds
+ // conValue is epochSeconds (not source time units), so convert fMin,fMax to
+ // epSeconds
if (conEdvSourcePAType == PAType.STRING &&
- (!tdv.sourceTimeFormat().toLowerCase().startsWith("yyyy") ||
- Double.isNaN(conValuesD[con]))) {
- //strings are sorted incorrectly by e.g., MM/dd/yyyy
- // so tests vs min,max are invalid
- //test comparing to NaN are unreliable
- //so test all files
+ (!tdv.sourceTimeFormat().toLowerCase().startsWith("yyyy") ||
+ Double.isNaN(conValuesD[con]))) {
+ // strings are sorted incorrectly by e.g., MM/dd/yyyy
+ // so tests vs min,max are invalid
+ // test comparing to NaN are unreliable
+ // so test all files
} else {
- //numeric timestamp, or yyyy... String timestamp
- double fMin = tdv.sourceTimeToEpochSeconds(tFileTable.getStringData(dv0 + dv*3 + 0, f));
- double fMax = tdv.sourceTimeToEpochSeconds(tFileTable.getStringData(dv0 + dv*3 + 1, f));
- int fNaN = tFileTable.getIntData(dv0 + dv*3 + 2, f);
- //if min/max String times are invalid, act as if far in past/future
- if (Double.isNaN(fMin))
+ // numeric timestamp, or yyyy... String timestamp
+ double fMin = tdv.sourceTimeToEpochSeconds(tFileTable.getStringData(dv0 + dv * 3 + 0, f));
+ double fMax = tdv.sourceTimeToEpochSeconds(tFileTable.getStringData(dv0 + dv * 3 + 1, f));
+ int fNaN = tFileTable.getIntData(dv0 + dv * 3 + 2, f);
+ // if min/max String times are invalid, act as if far in past/future
+ if (Double.isNaN(fMin))
fMin = -1e100;
if (Double.isNaN(fMax))
fMax = 1e100;
- //2018-07-05 removed: now, given updateEveryNMillis, assume that info about files is perfectly up-to-data
+ // 2018-07-05 removed: now, given updateEveryNMillis, assume that info about
+ // files is perfectly up-to-data
/*
- //if fMax is in 20 hours ago to 4 hours in future, set fMax to Now+4hours)
- // 4hrs avoids clock drift problems
- double secondsNowP4 = System.currentTimeMillis() / 1000 + 4 * Calendar2.SECONDS_PER_HOUR;
- if (!Double.isNaN(fMax) &&
- secondsNowP4 - fMax > 0 &&
- secondsNowP4 - fMax < Calendar2.SECONDS_PER_DAY &&
- !isFromHttpGet) { //but HttpGet is assumed to be perfectly up-to-date
- if (reallyVerbose)
- String2.log("file " + tdv.destinationName() +
- " maxTime is within last 24hrs, so ERDDAP is pretending file maxTime is now+4hours.");
- fMax = secondsNowP4;
- }
- */
-
- //String2.log(">> test file=" + f + " fMin=" + fMin + " fMax=" + fMax + " op='" + op + "' conValuesD[con]=" + conValuesD[con]);
- if (!isOK(PAType.STRING, //test all times with full precision
- fMin, fMax, fNaN, op, conValuesD[con])) { //test w epochSeconds
- if (debugMode)
- String2.log("file " + f +
- " rejected because failed time test " +
- conVars.get(con) + op + conValues.get(con) +
- "(" + Calendar2.safeEpochSecondsToIsoStringTZ(conValuesD[con], "\"\"") +
- ") when file min=" + fMin + " max=" + fMax +
- ", hasNaN=" + (fNaN != 0) + ".");
+ * //if fMax is in 20 hours ago to 4 hours in future, set fMax to Now+4hours)
+ * // 4hrs avoids clock drift problems
+ * double secondsNowP4 = System.currentTimeMillis() / 1000 + 4 *
+ * Calendar2.SECONDS_PER_HOUR;
+ * if (!Double.isNaN(fMax) &&
+ * secondsNowP4 - fMax > 0 &&
+ * secondsNowP4 - fMax < Calendar2.SECONDS_PER_DAY &&
+ * !isFromHttpGet) { //but HttpGet is assumed to be perfectly up-to-date
+ * if (reallyVerbose)
+ * String2.log("file " + tdv.destinationName() +
+ * " maxTime is within last 24hrs, so ERDDAP is pretending file maxTime is now+4hours."
+ * );
+ * fMax = secondsNowP4;
+ * }
+ */
+
+ // String2.log(">> test file=" + f + " fMin=" + fMin + " fMax=" + fMax + " op='"
+ // + op + "' conValuesD[con]=" + conValuesD[con]);
+ if (!isOK(PAType.STRING, // test all times with full precision
+ fMin, fMax, fNaN, op, conValuesD[con])) { // test w epochSeconds
+ if (debugMode)
+ String2.log("file " + f +
+ " rejected because failed time test " +
+ conVars.get(con) + op + conValues.get(con) +
+ "(" + Calendar2.safeEpochSecondsToIsoStringTZ(conValuesD[con], "\"\"") +
+ ") when file min=" + fMin + " max=" + fMax +
+ ", hasNaN=" + (fNaN != 0) + ".");
ok = false;
break;
- }
+ }
}
} else if (conEdvSourcePAType == PAType.CHAR ||
- conEdvSourcePAType == PAType.STRING) {
- //String variables
- String fMin = tFileTable.getStringData(dv0 + dv*3 + 0, f);
- String fMax = tFileTable.getStringData(dv0 + dv*3 + 1, f);
- int fNaN = tFileTable.getIntData( dv0 + dv*3 + 2, f);
+ conEdvSourcePAType == PAType.STRING) {
+ // String variables
+ String fMin = tFileTable.getStringData(dv0 + dv * 3 + 0, f);
+ String fMax = tFileTable.getStringData(dv0 + dv * 3 + 1, f);
+ int fNaN = tFileTable.getIntData(dv0 + dv * 3 + 2, f);
if (!isOK(fMin, fMax, fNaN, op, conValues.get(con))) {
- if (debugMode)
- String2.log("file " + f +
- " rejected because failed String test " +
- conVars.get(con) + op + String2.toJson(conValues.get(con), 256) +
- " when file min=" + String2.toJson(fMin, 256) +
- ", max=" + String2.toJson(fMax, 256) +
- ", hasNaN=" + (fNaN != 0) + ".");
+ if (debugMode)
+ String2.log("file " + f +
+ " rejected because failed String test " +
+ conVars.get(con) + op + String2.toJson(conValues.get(con), 256) +
+ " when file min=" + String2.toJson(fMin, 256) +
+ ", max=" + String2.toJson(fMax, 256) +
+ ", hasNaN=" + (fNaN != 0) + ".");
ok = false;
break;
}
} else {
- //numeric variables (and not PrimitiveArray.REGEX_OP)
- double fMin = tFileTable.getDoubleData(dv0 + dv*3 + 0, f);
- double fMax = tFileTable.getDoubleData(dv0 + dv*3 + 1, f);
- int fNaN = tFileTable.getIntData( dv0 + dv*3 + 2, f);
+ // numeric variables (and not PrimitiveArray.REGEX_OP)
+ double fMin = tFileTable.getDoubleData(dv0 + dv * 3 + 0, f);
+ double fMax = tFileTable.getDoubleData(dv0 + dv * 3 + 1, f);
+ int fNaN = tFileTable.getIntData(dv0 + dv * 3 + 2, f);
if (!isOK(conEdvSourcePAType, fMin, fMax, fNaN, op, conValuesD[con])) {
- if (debugMode)
- String2.log("file " + f +
- " rejected because failed numeric test " +
- conVars.get(con) + op + conValues.get(con) +
- " when file min=" + fMin + ", max=" + fMax +
- ", hasNaN=" + (fNaN != 0) + ".");
+ if (debugMode)
+ String2.log("file " + f +
+ " rejected because failed numeric test " +
+ conVars.get(con) + op + conValues.get(con) +
+ " when file min=" + fMin + ", max=" + fMax +
+ ", hasNaN=" + (fNaN != 0) + ".");
ok = false;
break;
}
@@ -3863,60 +4252,63 @@ else if (op.charAt(0) == '=') {
continue;
}
- //if request is for distinct() values and this file just has 1 value for all requested variables,
- //then no need to even look in the file
+ // if request is for distinct() values and this file just has 1 value for all
+ // requested variables,
+ // then no need to even look in the file
if (distinct) {
- //FUTURE: this could be sped up by keeping the table for a run of qualified files
- // then calling standardizeResultsTable (instead of 1 row at a time).
+ // FUTURE: this could be sped up by keeping the table for a run of qualified
+ // files
+ // then calling standardizeResultsTable (instead of 1 row at a time).
boolean allDistinct = true;
for (int rvi = 0; rvi < dvi.length; rvi++) {
int dv = dvi[rvi];
if (resultsTypes[rvi].equals("String")) {
- String fMin = tFileTable.getStringData(dv0 + dv*3 + 0, f);
- String fMax = tFileTable.getStringData(dv0 + dv*3 + 1, f);
+ String fMin = tFileTable.getStringData(dv0 + dv * 3 + 0, f);
+ String fMax = tFileTable.getStringData(dv0 + dv * 3 + 1, f);
if (!fMin.equals(fMax)) {
allDistinct = false;
break;
}
} else {
- double fMin = tFileTable.getNiceDoubleData(dv0 + dv*3 + 0, f);
- double fMax = tFileTable.getNiceDoubleData(dv0 + dv*3 + 1, f);
+ double fMin = tFileTable.getNiceDoubleData(dv0 + dv * 3 + 0, f);
+ double fMax = tFileTable.getNiceDoubleData(dv0 + dv * 3 + 1, f);
if (fMin != fMax) {
allDistinct = false;
- break;
+ break;
}
}
}
-
- //if all requested columns are distinct, add a row to distinctTable
+
+ // if all requested columns are distinct, add a row to distinctTable
if (allDistinct) {
- //if (reallyVerbose) String2.log("file " + f + " is allDistinct");
+ // if (reallyVerbose) String2.log("file " + f + " is allDistinct");
boolean newDistinctTable = distinctTable == null;
if (newDistinctTable)
distinctTable = new Table();
for (int rvi = 0; rvi < dvi.length; rvi++) {
int dv = dvi[rvi];
- String tVal = tFileTable.getStringData(dv0 + dv*3 + 0, f);
+ String tVal = tFileTable.getStringData(dv0 + dv * 3 + 0, f);
if (newDistinctTable) {
EDV edv = dataVariables[dv];
- distinctTable.addColumn(edv.sourceName(),
- PrimitiveArray.factory(edv.sourceDataPAType(), 1, tVal));
+ distinctTable.addColumn(edv.sourceName(),
+ PrimitiveArray.factory(edv.sourceDataPAType(), 1, tVal));
} else {
distinctTable.getColumn(rvi).addString(tVal);
}
}
- //if (newDistinctTable) String2.log(" initial distinctTable=\n" + distinctTable.dataToString());
+ // if (newDistinctTable) String2.log(" initial distinctTable=\n" +
+ // distinctTable.dataToString());
nNotRead++;
- continue; //to next file;
+ continue; // to next file;
}
}
- //end of run for files which added info to distinctTable
- //so empty out distinctTable
+ // end of run for files which added info to distinctTable
+ // so empty out distinctTable
if (distinctTable != null) {
- //standardizeResultsTable applies all constraints
- preStandardizeResultsTable(loggedInAs, distinctTable);
+ // standardizeResultsTable applies all constraints
+ preStandardizeResultsTable(loggedInAs, distinctTable);
if (distinctTable.nRows() > 0) {
standardizeResultsTable(language, requestUrl, userDapQuery, distinctTable);
tableWriter.writeSome(distinctTable);
@@ -3928,32 +4320,34 @@ else if (op.charAt(0) == '=') {
distinctTable = null;
}
- //Read all data from file within minSorted to maxSorted.
- //This throws Throwable if trouble. I think that's appropriate.
+ // Read all data from file within minSorted to maxSorted.
+ // This throws Throwable if trouble. I think that's appropriate.
Table table;
int tDirIndex = ftDirIndex.get(f);
String tDir = dirList.get(tDirIndex);
String tName = ftFileList.get(f);
- String tExtractValue = extractedColNameIndex >= 0?
- tFileTable.getStringData(dv0 + extractedColNameIndex*3 + 0, f) :
- null;
-
- if (reallyVerbose) String2.log("#" + f + " get data from " + tDir + tName);
-
- //*** The new parallelized version of reading data files
- workManager.addTask(new EDDTableFromFilesCallable(language,
- ">> " + className + " " + datasetID + " nThreads=" + tnThreads +
- //parent thread's name (so in ERDDAP I can distinguish different user requests)
- " thread=" + Thread.currentThread().getName() +
- " task=" + task,
- this, loggedInAs, requestUrl, userDapQuery,
+ String tExtractValue = extractedColNameIndex >= 0
+ ? tFileTable.getStringData(dv0 + extractedColNameIndex * 3 + 0, f)
+ : null;
+
+ if (reallyVerbose)
+ String2.log("#" + f + " get data from " + tDir + tName);
+
+ // *** The new parallelized version of reading data files
+ workManager.addTask(new EDDTableFromFilesCallable(language,
+ ">> " + className + " " + datasetID + " nThreads=" + tnThreads +
+ // parent thread's name (so in ERDDAP I can distinguish different user requests)
+ " thread=" + Thread.currentThread().getName() +
+ " task=" + task,
+ this, loggedInAs, requestUrl, userDapQuery,
tDirIndex, tDir, tName, ftLastMod.get(f),
- resultsVariables, resultsTypes,
- ftSortedSpacing.get(f), minSorted, maxSorted,
+ resultsVariables, resultsTypes,
+ ftSortedSpacing.get(f), minSorted, maxSorted,
sourceConVars, sourceConOps, sourceConValues));
task++;
- } //end of FILE_LOOP
- if (debugMode) String2.log(">> File loop is done.");
+ } // end of FILE_LOOP
+ if (debugMode)
+ String2.log(">> File loop is done.");
workManager.finishedEnqueing();
// all is well. process all pending tasks
@@ -3961,83 +4355,89 @@ else if (op.charAt(0) == '=') {
} catch (Throwable t) {
- while (t instanceof ExecutionException) //may be doubly wrapped
+ while (t instanceof ExecutionException) // may be doubly wrapped
t = t.getCause();
- EDStatic.rethrowClientAbortException(t); //first throwable type handled
+ EDStatic.rethrowClientAbortException(t); // first throwable type handled
- //if interrupted, OutOfMemoryError or too much data, rethrow t
+ // if interrupted, OutOfMemoryError or too much data, rethrow t
String tToString = t.toString();
if (Thread.currentThread().isInterrupted() ||
- t instanceof InterruptedException ||
- t instanceof OutOfMemoryError ||
- tToString.indexOf(Math2.memoryTooMuchData) >= 0 ||
- tToString.indexOf(Math2.TooManyOpenFiles) >= 0)
+ t instanceof InterruptedException ||
+ t instanceof OutOfMemoryError ||
+ tToString.indexOf(Math2.memoryTooMuchData) >= 0 ||
+ tToString.indexOf(Math2.TooManyOpenFiles) >= 0)
throw t;
- if (!(t instanceof NoMoreDataPleaseException)) { //the only exception to keep going
+ if (!(t instanceof NoMoreDataPleaseException)) { // the only exception to keep going
String2.log(MustBe.throwableToString(t));
throw t;
- //throw t instanceof WaitThenTryAgainException? t :
- // new WaitThenTryAgainException(
- // EDStatic.simpleBilingual(language, EDStatic.waitThenTryAgainAr) +
- // "\n(" + EDStatic.errorFromDataSource + tToString + ")", t);
+ // throw t instanceof WaitThenTryAgainException? t :
+ // new WaitThenTryAgainException(
+ // EDStatic.simpleBilingual(language, EDStatic.waitThenTryAgainAr) +
+ // "\n(" + EDStatic.errorFromDataSource + tToString + ")", t);
}
} finally {
- //shut everything down
+ // shut everything down
if (workManager != null) {
- try {workManager.forceShutdown();} catch (Exception e) {}
+ try {
+ workManager.forceShutdown();
+ } catch (Exception e) {
+ }
workManager = null;
}
}
- //flush distinctTable
+ // flush distinctTable
if (distinctTable != null) {
- //standardizeResultsTable applies all constraints
- preStandardizeResultsTable(loggedInAs, distinctTable);
+ // standardizeResultsTable applies all constraints
+ preStandardizeResultsTable(loggedInAs, distinctTable);
if (distinctTable.nRows() > 0) {
standardizeResultsTable(language, requestUrl, userDapQuery, distinctTable);
tableWriter.writeSome(distinctTable);
}
distinctTable = null;
}
- cumNNotRead += nNotRead;
+ cumNNotRead += nNotRead;
int nReadHaveMatch = accumulator.getMatch();
int nReadNoMatch = accumulator.getNoMatch();
cumNReadHaveMatch += nReadHaveMatch;
- cumNReadNoMatch += nReadNoMatch;
- if (reallyVerbose) {
+ cumNReadNoMatch += nReadNoMatch;
+ if (reallyVerbose) {
long total = Math.max(1, nNotRead + nReadHaveMatch + nReadNoMatch);
- String2.log(" notRead=" + String2.right("" + (nNotRead * 100 / total), 3) +
- "% readHaveMatch=" + String2.right("" + (nReadHaveMatch * 100 / total), 3) +
- "% readNoMatch=" + String2.right("" + (nReadNoMatch * 100 / total), 3) +
- "% total=" + total);
- long cumTotal = Math.max(1, cumNNotRead + cumNReadHaveMatch + cumNReadNoMatch);
- String2.log(" cumNotRead=" + String2.right("" + (cumNNotRead * 100 / cumTotal), 3) +
- "% cumReadHaveMatch=" + String2.right("" + (cumNReadHaveMatch * 100 / cumTotal), 3) +
- "% cumReadNoMatch=" + String2.right("" + (cumNReadNoMatch * 100 / cumTotal), 3) +
- "% cumTotal=" + cumTotal + " " + datasetID);
+ String2.log(" notRead=" + String2.right("" + (nNotRead * 100 / total), 3) +
+ "% readHaveMatch=" + String2.right("" + (nReadHaveMatch * 100 / total), 3) +
+ "% readNoMatch=" + String2.right("" + (nReadNoMatch * 100 / total), 3) +
+ "% total=" + total);
+ long cumTotal = Math.max(1, cumNNotRead + cumNReadHaveMatch + cumNReadNoMatch);
+ String2.log(" cumNotRead=" + String2.right("" + (cumNNotRead * 100 / cumTotal), 3) +
+ "% cumReadHaveMatch=" + String2.right("" + (cumNReadHaveMatch * 100 / cumTotal), 3) +
+ "% cumReadNoMatch=" + String2.right("" + (cumNReadNoMatch * 100 / cumTotal), 3) +
+ "% cumTotal=" + cumTotal + " " + datasetID);
}
- //done
+ // done
tableWriter.finish();
}
-
+
private class ResultStatisticsAccumulator {
private int nReadHaveMatch = 0;
private int nReadNoMatch = 0;
-
+
public void incrementMatch() {
nReadHaveMatch++;
}
+
public void incrementNoMatch() {
nReadNoMatch++;
}
+
public int getMatch() {
return nReadHaveMatch;
}
+
public int getNoMatch() {
return nReadNoMatch;
}
@@ -4048,60 +4448,69 @@ public int getNoMatch() {
* EDDTableFromPostNcFiles uses this to remove data not accessible to this user.
*/
public void preStandardizeResultsTable(String loggedInAs, Table table) {
- //this base version does nothing
+ // this base version does nothing
}
-
/**
- * For String variables (or numeric variables and PrimitiveArray.REGEX_OP),
+ * For String variables (or numeric variables and PrimitiveArray.REGEX_OP),
* given a min, max, hasNaN value for a given file (or the whole dataset),
* this returns true if the file *may* have data matching opIndex,opValue.
*
- * See PrimitiveArray.testValueOpValue: Note that "" is not treated specially. "" isn't like NaN.
- *
testValueOpValue("a" > "") will return true.
- *
testValueOpValue("a" < "") will return false.
- *
Having min here be exclusive of "" allows better testing
- *
e.g., it can say that a file with "", "a", "z", !isOK for ="A",
- * which is an important type of test (given that .subset generates "=" constraints).
+ *
+ * See PrimitiveArray.testValueOpValue: Note that "" is not treated specially.
+ * "" isn't like NaN.
+ *
+ * testValueOpValue("a" > "") will return true.
+ *
+ * testValueOpValue("a" < "") will return false.
+ *
+ * Having min here be exclusive of "" allows better testing
+ *
+ * e.g., it can say that a file with "", "a", "z", !isOK for ="A",
+ * which is an important type of test (given that .subset generates "="
+ * constraints).
*
- * @param min exclusive of "". If no valid values, min and max should be ""
- * @param max
- * @param hasNaN 0=false 1=true (has "" values)
+ * @param min exclusive of "". If no valid values, min and max should be ""
+ * @param max
+ * @param hasNaN 0=false 1=true (has "" values)
* @param conOp
* @param conValue the constaintValue
*/
public static boolean isOK(String min, String max, int hasNaN, String conOp, String conValue) {
- //THE SPECIAL TESTS REQUIRE LOTS OF THOUGHT!!!
+ // THE SPECIAL TESTS REQUIRE LOTS OF THOUGHT!!!
- //deal with special tests when hasNaN (where hasNaN=1 makes a difference)
+ // deal with special tests when hasNaN (where hasNaN=1 makes a difference)
if (hasNaN == 1) {
- if (conValue.equals("") && // ""="" returns true
- (conOp.equals(PrimitiveArray.REGEX_OP) || conOp.equals("=") ||
- conOp.equals(">=") || conOp.equals("<=")))
+ if (conValue.equals("") && // ""="" returns true
+ (conOp.equals(PrimitiveArray.REGEX_OP) || conOp.equals("=") ||
+ conOp.equals(">=") || conOp.equals("<=")))
return true;
- else if (conOp.equals("<"))
- return !conValue.equals(""); // ""<"a" returns true
- // ""<"" returns false
+ else if (conOp.equals("<"))
+ return !conValue.equals(""); // ""<"a" returns true
+ // ""<"" returns false
}
- //below here, hasNaN is irrelevant (think as if hasNaN=0; tests done with min=someValue)
+ // below here, hasNaN is irrelevant (think as if hasNaN=0; tests done with
+ // min=someValue)
- int minC = min.compareTo(conValue); //<0 indicates min < conValue; >0 indicates min > conValue
+ int minC = min.compareTo(conValue); // <0 indicates min < conValue; >0 indicates min > conValue
int maxC = max.compareTo(conValue);
- //0"!=", 1REGEX_OP, 2"<=", 3">=", 4"=", 5"<", 6">"};
+ // 0"!=", 1REGEX_OP, 2"<=", 3">=", 4"=", 5"<", 6">"};
if (conOp.equals("!=")) {
- if (min.equals(max) && min.equals(conValue)) return false;
+ if (min.equals(max) && min.equals(conValue))
+ return false;
} else if (conOp.equals(PrimitiveArray.REGEX_OP)) {
- if (min.equals(max) && !min.matches(conValue)) return false;
+ if (min.equals(max) && !min.matches(conValue))
+ return false;
} else if (conOp.equals("<=")) {
- return minC <= 0;
+ return minC <= 0;
} else if (conOp.equals(">=")) {
return maxC >= 0;
- } else if (conOp.equals("=")) {
+ } else if (conOp.equals("=")) {
return minC <= 0 && maxC >= 0;
} else if (conOp.equals("<")) {
- return minC < 0;
+ return minC < 0;
} else if (conOp.equals(">")) {
return maxC > 0;
}
@@ -4114,58 +4523,62 @@ else if (conOp.equals("<"))
* given a min and a max value for a given file (or the whole dataset),
* this returns true if the file may have data matching opIndex,opValue.
*
- * @param tPAType tPAType is only used as in indicator of precision.
- * If you want the full precision, use PAType.LONG.
- * @param min if no valid values, this should be NaN
- * @param max if no valid values, this should be NaN
- * @param hasNaN 0=false 1=true
- * @param conOp Must *not* be PrimitiveArray.REGEX_OP
+ * @param tPAType tPAType is only used as in indicator of precision.
+ * If you want the full precision, use PAType.LONG.
+ * @param min if no valid values, this should be NaN
+ * @param max if no valid values, this should be NaN
+ * @param hasNaN 0=false 1=true
+ * @param conOp Must *not* be PrimitiveArray.REGEX_OP
* @param conValue the constaintValue
*/
- public static boolean isOK(PAType tPAType, double min, double max, int hasNaN,
- String conOp, double conValue) {
- //THE SPECIAL TESTS REQUIRE LOTS OF THOUGHT!!!
- //String2.log(">> isOK file min=" + min + " max=" + max + " op=" + conOp + " conVal=" + conValue);
+ public static boolean isOK(PAType tPAType, double min, double max, int hasNaN,
+ String conOp, double conValue) {
+ // THE SPECIAL TESTS REQUIRE LOTS OF THOUGHT!!!
+ // String2.log(">> isOK file min=" + min + " max=" + max + " op=" + conOp + "
+ // conVal=" + conValue);
- //conValue=NaN tests
+ // conValue=NaN tests
if (Double.isNaN(conValue)) {
- if (conOp.equals("=") || conOp.equals("<=") || conOp.equals(">="))
- // NaN=NaN returns true
- return hasNaN == 1; // 5 =NaN returns false
- else if (conOp.equals("!=")) // 5 !=NaN returns true
+ if (conOp.equals("=") || conOp.equals("<=") || conOp.equals(">="))
+ // NaN=NaN returns true
+ return hasNaN == 1; // 5 =NaN returns false
+ else if (conOp.equals("!=")) // 5 !=NaN returns true
return !Double.isNaN(max); // NaN!=NaN returns false
- else return false; //NaN tests other than = != return false
+ else
+ return false; // NaN tests other than = != return false
}
- //file has just NaN
- if (Double.isNaN(min) && Double.isNaN(max)) { //and we know conValue isn't NaN
- if (conOp.equals("!="))
- return true; //always: NaN != 5
- else return false; //never: NaN = 5 and other ops, too
+ // file has just NaN
+ if (Double.isNaN(min) && Double.isNaN(max)) { // and we know conValue isn't NaN
+ if (conOp.equals("!="))
+ return true; // always: NaN != 5
+ else
+ return false; // never: NaN = 5 and other ops, too
}
- //0"!=", 1REGEX_OP, 2"<=", 3">=", 4"=", 5"<", 6">"};
- //precision=5, 9, or 18 (full) significant figures
- int p = tPAType == PAType.FLOAT? 5 :
- tPAType == PAType.DOUBLE? 9 : 18; //18 (full) for integer types, char and String (dates)
+ // 0"!=", 1REGEX_OP, 2"<=", 3">=", 4"=", 5"<", 6">"};
+ // precision=5, 9, or 18 (full) significant figures
+ int p = tPAType == PAType.FLOAT ? 5 : tPAType == PAType.DOUBLE ? 9 : 18; // 18 (full) for integer types, char
+ // and String (dates)
if (tPAType == PAType.LONG) {
- //notably for time, round down min, max to nearest second to avoid
- // request to nearest second and actual more precise
- //if data is really longs, min and max will already be integers
+ // notably for time, round down min, max to nearest second to avoid
+ // request to nearest second and actual more precise
+ // if data is really longs, min and max will already be integers
min = Math.floor(min);
- max = Math.ceil(max);
+ max = Math.ceil(max);
}
if (conOp.equals("!=")) {
- if (min == max && min == conValue) return false; //be strict to reject
- //PrimitiveArray.REGEX_OP is handled by String isOK
+ if (min == max && min == conValue)
+ return false; // be strict to reject
+ // PrimitiveArray.REGEX_OP is handled by String isOK
} else if (conOp.equals("<=")) {
- return Math2.lessThanAE(p, min, conValue);
+ return Math2.lessThanAE(p, min, conValue);
} else if (conOp.equals(">=")) {
return Math2.greaterThanAE(p, max, conValue);
- } else if (conOp.equals("=")) {
+ } else if (conOp.equals("=")) {
return Math2.lessThanAE(p, min, conValue) && Math2.greaterThanAE(p, max, conValue);
} else if (conOp.equals("<")) {
- return min < conValue;
+ return min < conValue;
} else if (conOp.equals(">")) {
return max > conValue;
}
diff --git a/WEB-INF/classes/gov/noaa/pfel/erddap/dataset/EDDTableFromMultidimNcFiles.java b/WEB-INF/classes/gov/noaa/pfel/erddap/dataset/EDDTableFromMultidimNcFiles.java
index 6eedcdb4..def3ab7c 100644
--- a/WEB-INF/classes/gov/noaa/pfel/erddap/dataset/EDDTableFromMultidimNcFiles.java
+++ b/WEB-INF/classes/gov/noaa/pfel/erddap/dataset/EDDTableFromMultidimNcFiles.java
@@ -17,6 +17,7 @@
import gov.noaa.pfel.coastwatch.griddata.NcHelper;
import gov.noaa.pfel.coastwatch.pointdata.Table;
+import gov.noaa.pfel.coastwatch.pointdata.TableFromMultidimNcFile;
import gov.noaa.pfel.coastwatch.util.FileVisitorDNLS;
import gov.noaa.pfel.erddap.util.EDStatic;
@@ -116,7 +117,9 @@ public Table lowGetSourceDataFromFile(String tFileDir, String tFileName,
tFileDir + tFileName, fileDir, decompressedDirectory(),
EDStatic.decompressedCacheMaxGB, true); //reuseExisting
if (mustGetData) {
- table.readMultidimNc(decompFullName, sourceDataNames, null,
+
+ TableFromMultidimNcFile reader = new TableFromMultidimNcFile(table);
+ reader.readMultidimNc(decompFullName, sourceDataNames, null,
treatDimensionsAs,
getMetadata, standardizeWhat, removeMVRows,
sourceConVars, sourceConOps, sourceConValues);
@@ -231,7 +234,8 @@ public static String generateDatasetsXml(
//read the sample file
tStandardizeWhat = tStandardizeWhat < 0 || tStandardizeWhat == Integer.MAX_VALUE?
DEFAULT_STANDARDIZEWHAT : tStandardizeWhat;
- dataSourceTable.readMultidimNc(sampleFileName, null, useDimensions,
+ TableFromMultidimNcFile reader = new TableFromMultidimNcFile(dataSourceTable);
+ reader.readMultidimNc(sampleFileName, null, useDimensions,
tDimAs, //treatDimensionsAs
true, tStandardizeWhat, tRemoveMVRows, //getMetadata, standardizeWhat, removeMVRows
null, null, null); //conVars, conOps, conVals
diff --git a/download/setupDatasetsXml.html b/download/setupDatasetsXml.html
index d78debbc..5facf082 100644
--- a/download/setupDatasetsXml.html
+++ b/download/setupDatasetsXml.html
@@ -172,6 +172,7 @@
units
+ <removeMVRows>
@@ -15755,7 +15756,20 @@ <removeMVRows>
+ is an OPTIONAL tag within a tag in datasets.xml for EDDTableFromFiles (including all subclasses) datasets, though it is only used for EDDTableFromMultidimNcFiles. It can have a value of true or false. For example,
+ true
+
+ This removes any block of rows at the end of a group where all the values are missing_value, _FillValue, or the CoHort ...Array native missing value (or char=#32 for CharArrays).
+ This is for the CF DSG Multidimensional Array file type and similar files.
+ If true, this does the proper test and so always loads all the max dim variables, so it may take extra time.
+
+ The default value of is false.
+
+ Recommendation -- If possible for your dataset, we recommend setting removeMVRows to false. Setting removeMVRows to true can significantly slow down requests, though may be needed for some datasets.
+
+
+
diff --git a/pom.xml b/pom.xml
index fc8c802d..f606fac4 100644
--- a/pom.xml
+++ b/pom.xml
@@ -46,7 +46,7 @@
v2.23
1.0.0
5.5.3
- test1.01
+ test1.02
12.0.10
diff --git a/src/test/java/gov/noaa/pfel/coastwatch/pointdata/TableTests.java b/src/test/java/gov/noaa/pfel/coastwatch/pointdata/TableTests.java
index f260b7d3..5c09ea0a 100644
--- a/src/test/java/gov/noaa/pfel/coastwatch/pointdata/TableTests.java
+++ b/src/test/java/gov/noaa/pfel/coastwatch/pointdata/TableTests.java
@@ -371,11 +371,13 @@ void testReadNcCFMATimeSeriesReversed(boolean readAsNcCF) throws Exception {
null, // all vars
0, // standardizeWhat=0
StringArray.fromCSV("time"), StringArray.fromCSV(">"), StringArray.fromCSV("3426.69"));
- else
- table.readMultidimNc(fileName,
+ else {
+ TableFromMultidimNcFile reader = new TableFromMultidimNcFile(table);
+ reader.readMultidimNc(fileName,
null, null, null, // read default dimensions
true, 0, true, // getMetadata, standardizeWhat, removeMVRows,
StringArray.fromCSV("time"), StringArray.fromCSV(">"), StringArray.fromCSV("3426.69"));
+ }
String2.log("time=" + (System.currentTimeMillis() - time) + "ms");
results = table.dataToString();
expected =
@@ -401,13 +403,15 @@ void testReadNcCFMATimeSeriesReversed(boolean readAsNcCF) throws Exception {
StringArray.fromCSV("station,latitude,longitude"),
0, // standardizeWhat=0
null, null, null);
- else
- table.readMultidimNc(fileName,
+ else {
+ TableFromMultidimNcFile reader = new TableFromMultidimNcFile(table);
+ reader.readMultidimNc(fileName,
StringArray.fromCSV("station,latitude,longitude"),
null, // dimensions
null, // treatDimensionsAs
true, 0, true, // getMetadata, standardizeWhat, removeMVRows,
null, null, null);
+ }
String2.log("time=" + (System.currentTimeMillis() - time) + "ms");
results = table.dataToString();
expected = "station,latitude,longitude\n" +
@@ -440,12 +444,14 @@ void testReadNcCFMATimeSeriesReversed(boolean readAsNcCF) throws Exception {
StringArray.fromCSV("station,latitude,longitude"),
0, // standardizeWhat=0
StringArray.fromCSV("latitude"), StringArray.fromCSV("<"), StringArray.fromCSV("39.1"));
- else
- table.readMultidimNc(fileName,
+ else {
+ TableFromMultidimNcFile reader = new TableFromMultidimNcFile(table);
+ reader.readMultidimNc(fileName,
StringArray.fromCSV("station,latitude,longitude"),
null, null, // dimensions, treatDimensionsAs
true, 0, true, // getMetadata, standardizeWhat, removeMVRows,
StringArray.fromCSV("latitude"), StringArray.fromCSV("<"), StringArray.fromCSV("39.1"));
+ }
String2.log("time=" + (System.currentTimeMillis() - time) + "ms");
results = table.dataToString();
expected = "station,latitude,longitude\n" +
@@ -461,12 +467,15 @@ void testReadNcCFMATimeSeriesReversed(boolean readAsNcCF) throws Exception {
StringArray.fromCSV("time,discharge"),
0, // standardizeWhat=0
StringArray.fromCSV("discharge"), StringArray.fromCSV(">"), StringArray.fromCSV("5400"));
- else
- table.readMultidimNc(fileName,
+ else {
+ TableFromMultidimNcFile reader = new TableFromMultidimNcFile(table);
+ reader.readMultidimNc(fileName,
StringArray.fromCSV("time,discharge"),
null, null, // dimensions, treatDimensionsAs
true, 0, true, // getMetadata, standardizeWhat, removeMVRows,
StringArray.fromCSV("discharge"), StringArray.fromCSV(">"), StringArray.fromCSV("5400"));
+
+ }
String2.log("time=" + (System.currentTimeMillis() - time) + "ms");
results = table.dataToString();
expected = "time,discharge\n" +
@@ -492,12 +501,14 @@ void testReadNcCFMATimeSeriesReversed(boolean readAsNcCF) throws Exception {
StringArray.fromCSV("station,latitude,longitude,time,discharge"),
0, // standardizeWhat=0
StringArray.fromCSV("station"), StringArray.fromCSV("="), StringArray.fromCSV("1463500.0"));
- else
- table.readMultidimNc(fileName,
+ else {
+ TableFromMultidimNcFile reader = new TableFromMultidimNcFile(table);
+ reader.readMultidimNc(fileName,
StringArray.fromCSV("station,latitude,longitude,time,discharge"),
null, null, // dimensions, treatDimensionsAs
true, 0, true, // getMetadata, standardizeWhat, removeMVRows,
StringArray.fromCSV("station"), StringArray.fromCSV("="), StringArray.fromCSV("1463500.0"));
+ }
String2.log("time=" + (System.currentTimeMillis() - time) + "ms");
results = table.dataToString();
expected =
@@ -537,11 +548,13 @@ void testReadNcCFMATimeSeriesReversed(boolean readAsNcCF) throws Exception {
table.readNcCF(fileName,
null, 0, // standardizeWhat=0
null, null, null);
- else
- table.readMultidimNc(fileName,
+ else {
+ TableFromMultidimNcFile reader = new TableFromMultidimNcFile(table);
+ reader.readMultidimNc(fileName,
null, null, null, // read all dimensions
true, 0, true, // getMetadata, standardizeWhat, removeMVRows,
null, null, null);
+ }
String2.log("time=" + (System.currentTimeMillis() - time) + "ms");
results = table.dataToString(10);
expected = readAsNcCF ? "discharge,station,time,longitude,latitude\n" +
@@ -579,12 +592,14 @@ void testReadNcCFMATimeSeriesReversed(boolean readAsNcCF) throws Exception {
StringArray.fromCSV("station,latitude,longitude,time,discharge"),
0, // standardizeWhat=0
StringArray.fromCSV("discharge"), StringArray.fromCSV(">"), StringArray.fromCSV("5400"));
- else
- table.readMultidimNc(fileName,
+ else {
+ TableFromMultidimNcFile reader = new TableFromMultidimNcFile(table);
+ reader.readMultidimNc(fileName,
StringArray.fromCSV("station,latitude,longitude,time,discharge"),
null, null, // dimensions, treatDimensionsAs
true, 0, true, // getMetadata, standardizeWhat, removeMVRows,
StringArray.fromCSV("discharge"), StringArray.fromCSV(">"), StringArray.fromCSV("5400"));
+ }
String2.log("time=" + (System.currentTimeMillis() - time) + "ms");
results = table.dataToString();
expected = "station,latitude,longitude,time,discharge\n" +
@@ -611,13 +626,15 @@ void testReadNcCFMATimeSeriesReversed(boolean readAsNcCF) throws Exception {
0, // standardizeWhat=0
StringArray.fromCSV("station,discharge"), StringArray.fromCSV("=,>"),
StringArray.fromCSV("1463500.0,5400"));
- else
- table.readMultidimNc(fileName,
+ else {
+ TableFromMultidimNcFile reader = new TableFromMultidimNcFile(table);
+ reader.readMultidimNc(fileName,
StringArray.fromCSV("station,latitude,longitude,time,discharge"),
null, null, // dimensions, treatDimensionsAs
true, 0, true, // getMetadata, standardizeWhat, removeMVRows,
StringArray.fromCSV("station,discharge"), StringArray.fromCSV("=,>"),
StringArray.fromCSV("1463500.0,5400"));
+ }
String2.log("time=" + (System.currentTimeMillis() - time) + "ms");
results = table.dataToString();
expected = "station,latitude,longitude,time,discharge\n" +
@@ -2916,7 +2933,8 @@ void testReadMultidimNc() throws Exception {
/* */
// ** don't specify varNames or dimNames -- it find vars with most dims
- table.readMultidimNc(fiName, new StringArray(), new StringArray(), null,
+ TableFromMultidimNcFile reader = new TableFromMultidimNcFile(table);
+ reader.readMultidimNc(fiName, new StringArray(), new StringArray(), null,
true, 0, false, // readMetadata, standardizeWhat=0, removeMVRows
null, null, null); // conVars, conOps, conVals
results = table.dataToString(3);
@@ -2936,7 +2954,7 @@ void testReadMultidimNc() throws Exception {
Test.ensureEqual(table.nRows(), 762, "nRows"); // 254*3
// * same but quick reject based on constraint
- table.readMultidimNc(fiName, new StringArray(), new StringArray(), null,
+ reader.readMultidimNc(fiName, new StringArray(), new StringArray(), null,
true, 0, false, // readMetadata, standardizeWhat=0, removeMVRows
StringArray.fromCSV("FORMAT_VERSION,FORMAT_VERSION"), // conVars
StringArray.fromCSV("=,="), // conOps
@@ -2944,7 +2962,7 @@ void testReadMultidimNc() throws Exception {
Test.ensureEqual(table.nRows(), 0, "nRows");
// * test don't removeMVRows
- table.readMultidimNc(fiName, null,
+ reader.readMultidimNc(fiName, null,
StringArray.fromCSV("ZZTOP, N_PROF, N_LEVELS"),
null,
true, 0, false, // readMetadata, standardizeWhat=0, removeMVRows
@@ -3223,7 +3241,7 @@ void testReadMultidimNc() throws Exception {
Test.ensureEqual(results, expectedEnd, "results=\n" + results);
// * test do removeMVRows
- table.readMultidimNc(fiName, null, StringArray.fromCSV("ZZTOP, N_PROF, N_LEVELS"), null,
+ reader.readMultidimNc(fiName, null, StringArray.fromCSV("ZZTOP, N_PROF, N_LEVELS"), null,
true, 0, true, // readMetadata, standardizeWhat, removeMVRows
null, null, null); // conVars, conOps, conVals
results = table.dataToString(3);
@@ -3244,7 +3262,7 @@ void testReadMultidimNc() throws Exception {
// * same but quick reject based on constraint LAT,LON 26.587,154.853
// *** this takes 9ms while test above takes 99ms!
- table.readMultidimNc(fiName, null, StringArray.fromCSV("ZZTOP, N_PROF, N_LEVELS"), null,
+ reader.readMultidimNc(fiName, null, StringArray.fromCSV("ZZTOP, N_PROF, N_LEVELS"), null,
true, 0, true, // readMetadata, standardizeWhat, removeMVRows
StringArray.fromCSV("LATITUDE"), // conVars
StringArray.fromCSV("="), // conOps
@@ -3252,7 +3270,7 @@ void testReadMultidimNc() throws Exception {
Test.ensureEqual(table.nRows(), 0, "nRows");
// * test different dim order (should be rearranged so the same)
- table.readMultidimNc(fiName, null, StringArray.fromCSV("N_LEVELS, ZZTOP, N_PROF"), null,
+ reader.readMultidimNc(fiName, null, StringArray.fromCSV("N_LEVELS, ZZTOP, N_PROF"), null,
true, 0, true, // readMetadata, standardizeWhat, removeMVRows
null, null, null); // conVars, conOps, conVals
results = table.dataToString(3);
@@ -3269,7 +3287,7 @@ void testReadMultidimNc() throws Exception {
// "Returning an empty table because var=PLATFORM_NUMBER failed its constraints,
// including =2901175. time=0"
- table.readMultidimNc(fiName, StringArray.fromCSV(
+ reader.readMultidimNc(fiName, StringArray.fromCSV(
"DATA_TYPE, FORMAT_VERSION, HANDBOOK_VERSION, REFERENCE_DATE_TIME, DATE_CREATION, " +
"DATE_UPDATE, PLATFORM_NUMBER, PROJECT_NAME, PI_NAME, CYCLE_NUMBER, DIRECTION, " +
"DATA_CENTRE, DC_REFERENCE, DATA_STATE_INDICATOR, DATA_MODE, PLATFORM_TYPE, " +
@@ -3317,7 +3335,7 @@ void testReadMultidimNc() throws Exception {
Test.ensureEqual(table.nRows(), 17266, "nRows"); // same as when all variables were explicitly loaded
// * test different varNames
- table.readMultidimNc(fiName,
+ reader.readMultidimNc(fiName,
StringArray.fromCSV(
"DATA_TYPE,FORMAT_VERSION,HANDBOOK_VERSION,REFERENCE_DATE_TIME,DATE_CREATION,DATE_UPDATE,PLATFORM_NUMBER,PROJECT_NAME,PI_NAME,CYCLE_NUMBER,DIRECTION,DATA_CENTRE,DC_REFERENCE,DATA_STATE_INDICATOR,DATA_MODE,PLATFORM_TYPE,FLOAT_SERIAL_NO,FIRMWARE_VERSION,WMO_INST_TYPE,JULD,JULD_QC,JULD_LOCATION,LATITUDE,LONGITUDE,POSITION_QC,POSITIONING_SYSTEM,PROFILE_PRES_QC,PROFILE_TEMP_QC,PROFILE_PSAL_QC,VERTICAL_SAMPLING_SCHEME,CONFIG_MISSION_NUMBER,PRES,PRES_QC,PRES_ADJUSTED,PRES_ADJUSTED_QC,PRES_ADJUSTED_ERROR,TEMP,TEMP_QC,TEMP_ADJUSTED,TEMP_ADJUSTED_QC,TEMP_ADJUSTED_ERROR,PSAL,PSAL_QC,PSAL_ADJUSTED,PSAL_ADJUSTED_QC,PSAL_ADJUSTED_ERROR"),
null, null,
@@ -3334,7 +3352,7 @@ void testReadMultidimNc() throws Exception {
// * test do removeMVRows when loadVariables is limited (to ensure all are
// loaded for the test)
- table.readMultidimNc(fiName, StringArray.fromCSV("LONGITUDE,PRES,PSAL_ADJUSTED_ERROR"),
+ reader.readMultidimNc(fiName, StringArray.fromCSV("LONGITUDE,PRES,PSAL_ADJUSTED_ERROR"),
null, null,
true, 0, true, // readMetadata, standardizeWhat, removeMVRows
null, null, null); // conVars, conOps, conVals
@@ -3357,7 +3375,7 @@ void testReadMultidimNc() throws Exception {
Test.ensureEqual(results, expectedEnd, "results=\n" + results);
// * test read JULD
- table.readMultidimNc(fiName, StringArray.fromCSV("JULD"), null, null,
+ reader.readMultidimNc(fiName, StringArray.fromCSV("JULD"), null, null,
true, 0, true, // readMetadata, standardizeWhat, removeMVRows
null, null, null); // conVars, conOps, conVals
results = table.dataToString(3);
@@ -3378,7 +3396,7 @@ void testReadMultidimNc() throws Exception {
Test.ensureEqual(results, expectedStart, "results=\n" + results);
// * test read JULD && PRES
- table.readMultidimNc(fiName, StringArray.fromCSV("JULD,PRES"), null, null,
+ reader.readMultidimNc(fiName, StringArray.fromCSV("JULD,PRES"), null, null,
true, 0, true, // readMetadata, standardizeWhat, removeMVRows
null, null, null); // conVars, conOps, conVals
results = table.dataToString(3);
@@ -3399,7 +3417,7 @@ void testReadMultidimNc() throws Exception {
Test.ensureEqual(results, expectedStart, "results=\n" + results);
// * test read just static vars, in a different order
- table.readMultidimNc(fiName,
+ reader.readMultidimNc(fiName,
StringArray.fromCSV("HANDBOOK_VERSION,FORMAT_VERSION,DATA_TYPE"),
null, null,
true, 0, true, // readMetadata, standardizeWhat, removeMVRows
@@ -3410,7 +3428,7 @@ void testReadMultidimNc() throws Exception {
Test.ensureEqual(results, expectedStart, "results=\n" + results);
// * test read 0 dim variable -> empty table
- table.readMultidimNc(fiName,
+ reader.readMultidimNc(fiName,
StringArray.fromCSV("HISTORY_INSTITUTION"),
null, null,
true, 0, true, // readMetadata, standardizeWhat, removeMVRows
@@ -3419,7 +3437,7 @@ void testReadMultidimNc() throws Exception {
Test.ensureEqual(table.nColumns(), 0, "");
// * test read non-existent dim -> just scalar vars
- table.readMultidimNc(fiName,
+ reader.readMultidimNc(fiName,
null, StringArray.fromCSV("ZZTOP"), null,
true, 0, true, // readMetadata, standardizeWhat, removeMVRows
null, null, null); // conVars, conOps, conVals
@@ -3429,7 +3447,7 @@ void testReadMultidimNc() throws Exception {
Test.ensureEqual(results, expectedStart, "results=\n" + results);
// * test read non-existent Var -> empty table
- table.readMultidimNc(fiName,
+ reader.readMultidimNc(fiName,
StringArray.fromCSV("ZZTOP"), null, null,
true, 0, true, // readMetadata, standardizeWhat, removeMVRows
null, null, null); // conVars, conOps, conVals
@@ -4682,7 +4700,8 @@ void testHardReadMultidimNc() throws Exception {
// Table.debugMode = true;
Table table = new Table();
// String2.log(NcHelper.ncdump(fileName, "-h"));
- table.readMultidimNc(fileName,
+ TableFromMultidimNcFile reader = new TableFromMultidimNcFile(table);
+ reader.readMultidimNc(fileName,
StringArray.fromCSV("TIME,LATITUDE,LONGITUDE,DEPTH,TEMP,TEMP_DM"), // loadVarNames,
null, // loadDimNames,
new String[][] { { "LATITUDE", "LONGITUDE", "TIME" } }, // treatDimensionsAs
@@ -4723,7 +4742,7 @@ void testHardReadMultidimNc() throws Exception {
// standardizeWhat
table.clear();
- table.readMultidimNc(fileName,
+ reader.readMultidimNc(fileName,
StringArray.fromCSV("TIME,LATITUDE,LONGITUDE,DEPTH,TEMP,TEMP_DM"), // loadVarNames,
null, // loadDimNames,
new String[][] { { "LATITUDE", "LONGITUDE", "TIME" } }, // treatDimensionsAs
@@ -4779,7 +4798,8 @@ void testUnpack() throws Exception {
String results, expected;
// ** test the original packed format for comparison
- table.readMultidimNc(fiName, new StringArray(), new StringArray(), null,
+ TableFromMultidimNcFile reader = new TableFromMultidimNcFile(table);
+ reader.readMultidimNc(fiName, new StringArray(), new StringArray(), null,
true, 0, false, // readMetadata, unpack, removeMVRows
null, null, null); // conVars, conOps, conVals
results = table.toString(3);
@@ -4957,7 +4977,7 @@ void testUnpack() throws Exception {
Test.ensureEqual(results, expected, "results=\n" + results);
// ** don't specify varNames or dimNames -- it find vars with most dims
- table.readMultidimNc(fiName, new StringArray(), new StringArray(), null,
+ reader.readMultidimNc(fiName, new StringArray(), new StringArray(), null,
true, 3, false, // readMetadata, standardizeWhat, removeMVRows
null, null, null); // conVars, conOps, conVals
results = table.toString(3);
@@ -5163,7 +5183,8 @@ void testReadVlenNc() throws Exception {
/* */
// ** don't specify varNames or dimNames -- it find vars with most dims
- table.readMultidimNc(fiName, new StringArray(), new StringArray(), null,
+ TableFromMultidimNcFile reader = new TableFromMultidimNcFile(table);
+ reader.readMultidimNc(fiName, new StringArray(), new StringArray(), null,
true, 0, false, // readMetadata, standardizeWhat, removeMVRows
null, null, null); // conVars, conOps, conVals
results = table.dataToString(3);
diff --git a/src/test/java/gov/noaa/pfel/erddap/dataset/EDDGridFromNcFilesUnpackedTests.java b/src/test/java/gov/noaa/pfel/erddap/dataset/EDDGridFromNcFilesUnpackedTests.java
index 0857ab0d..1138f562 100644
--- a/src/test/java/gov/noaa/pfel/erddap/dataset/EDDGridFromNcFilesUnpackedTests.java
+++ b/src/test/java/gov/noaa/pfel/erddap/dataset/EDDGridFromNcFilesUnpackedTests.java
@@ -1,11 +1,5 @@
package gov.noaa.pfel.erddap.dataset;
-import java.nio.file.Path;
-
-import org.junit.jupiter.api.BeforeAll;
-import org.junit.jupiter.params.ParameterizedTest;
-import org.junit.jupiter.params.provider.ValueSource;
-
import com.cohort.array.Attributes;
import com.cohort.array.PrimitiveArray;
import com.cohort.util.Calendar2;
@@ -14,11 +8,14 @@
import com.cohort.util.String2;
import com.cohort.util.Test;
import com.cohort.util.Units2;
-
import gov.noaa.pfel.coastwatch.griddata.NcHelper;
import gov.noaa.pfel.erddap.GenerateDatasetsXml;
import gov.noaa.pfel.erddap.util.EDStatic;
import gov.noaa.pfel.erddap.variable.EDV;
+import java.nio.file.Path;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.ValueSource;
import tags.TagImageComparison;
import tags.TagIncompleteTest;
import testDataset.EDDTestDataset;
@@ -29,2129 +26,2211 @@
import ucar.nc2.Variable;
class EDDGridFromNcFilesUnpackedTests {
- @BeforeAll
- static void init() {
- Initialization.edStatic();
- }
-
- /**
- * This tests generateDatasetsXml.
- *
- * @throws Throwable if touble
- */
- @org.junit.jupiter.api.Test
- @TagIncompleteTest
- void testGenerateDatasetsXml() throws Throwable {
-
- // String2.log("\n*** EDDGridFromNcFilesUnpacked.testGenerateDatasetsXml");
-
- String sampleDir = Path.of(EDDGridFromNcFilesUnpackedTests.class.getResource("/data/nc/").toURI()).toString()
- + "/";
- String sampleRegex = "scale_factor\\.nc";
- String sampleName = sampleDir + "scale_factor.nc";
-
- // test that sample file has short analysed_sst with scale_factor and add_offset
- String results = NcHelper.ncdump(sampleName, ""); // short data
- String expected = "netcdf scale_factor.nc {\n" +
- " dimensions:\n" +
- " time = 2;\n" +
- " lat = 10;\n" +
- " lon = 10;\n" +
- " variables:\n" +
- " short analysed_sst(time=2, lat=10, lon=10);\n" +
- " :long_name = \"analysed sea surface temperature\";\n" +
- " :standard_name = \"sea_surface_foundation_temperature\";\n" +
- " :units = \"kelvin\";\n" +
- " :_FillValue = -32768S; // short\n" +
- " :add_offset = 298.15; // double\n" +
- " :scale_factor = 0.001; // double\n" +
- " :valid_min = -32767S; // short\n" +
- " :valid_max = 32767S; // short\n" +
- " :comment = \"Interim near-real-time (nrt) version; to be replaced by Final version\";\n" +
- " :coordinates = \"time lat lon \";\n" +
- "\n" +
- " int time(time=2);\n" +
- " :long_name = \"reference time of sst field\";\n" +
- " :standard_name = \"time\";\n" +
- " :axis = \"T\";\n" +
- " :units = \"seconds since 1981-01-01 00:00:00 UTC\";\n" +
- " :_CoordinateAxisType = \"Time\";\n" +
- "\n" +
- " float lat(lat=10);\n" +
- " :long_name = \"latitude\";\n" +
- " :standard_name = \"latitude\";\n" +
- " :axis = \"Y\";\n" +
- " :units = \"degrees_north\";\n" +
- " :valid_min = -90.0f; // float\n" +
- " :valid_max = 90.0f; // float\n" +
- " :_CoordinateAxisType = \"Lat\";\n" +
- "\n" +
- " float lon(lon=10);\n" +
- " :long_name = \"longitude\";\n" +
- " :standard_name = \"longitude\";\n" +
- " :axis = \"X\";\n" +
- " :units = \"degrees_east\";\n" +
- " :valid_min = -180.0f; // float\n" +
- " :valid_max = 180.0f; // float\n" +
- " :_CoordinateAxisType = \"Lon\";\n" +
- "\n" +
- " // global attributes:\n" +
- " :title = \"Daily MUR SST, Interim near-real-time (nrt) product\";\n" +
- " :comment = \"Interim-MUR(nrt) will be replaced by MUR-Final in about 3 days; MUR = \\\"Multi-scale Ultra-high Reolution\\\"; produced under NASA MEaSUREs program.\";\n"
- +
- " :Conventions = \"CF-1.0\";\n" +
- " :DSD_entry_id = \"JPL-L4UHfnd-GLOB-MUR\";\n" +
- " :references = \"ftp://mariana.jpl.nasa.gov/mur_sst/tmchin/docs/ATBD/\";\n" +
- " :source_data = \"AVHRR19_G-NAVO, AVHRR_METOP_A-EUMETSAT, MODIS_A-JPL, MODIS_T-JPL, WSAT-REMSS, iQUAM-NOAA/NESDIS, Ice_Conc-OSISAF\";\n"
- +
- " :institution = \"Jet Propulsion Laboratory\";\n" +
- " :contact = \"ghrsst@podaac.jpl.nasa.gov\";\n" +
- " :GDS_version_id = \"GDS-v1.0-rev1.6\";\n" +
- " :netcdf_version_id = \"3.5\";\n" +
- " :creation_date = \"2015-10-06\";\n" +
- " :product_version = \"04nrt\";\n" +
- " :history = \"Interim near-real-time (nrt) version created at nominal 1-day latency.\";\n" +
- " :spatial_resolution = \"0.011 degrees\";\n" +
- " :start_date = \"2015-10-05\";\n" +
- " :start_time = \"09:00:00 UTC\";\n" +
- " :stop_date = \"2015-10-05\";\n" +
- " :stop_time = \"09:00:00 UTC\";\n" +
- " :southernmost_latitude = -90.0f; // float\n" +
- " :northernmost_latitude = 90.0f; // float\n" +
- " :westernmost_longitude = -180.0f; // float\n" +
- " :easternmost_longitude = 180.0f; // float\n" +
- " :file_quality_index = \"0\";\n" +
- " :History = \"Translated to CF-1.0 Conventions by Netcdf-Java CDM (NetcdfCFWriter)\n" +
- "Original Dataset = satellite/MUR/ssta/1day; Translation Date = Thu Oct 08 09:39:01 PDT 2015\";\n" +
- "\n" +
- " data:\n" +
- " analysed_sst = \n" +
- " {\n" +
- " {\n" +
- " {1779, 1790, 1802, 1815, 1827, 1839, 1851, 1862, 1874, 1886},\n" +
- " {1782, 1792, 1804, 1816, 1828, 1840, 1851, 1863, 1875, 1887},\n" +
- " {1786, 1795, 1805, 1817, 1828, 1839, 1851, 1862, 1874, 1885},\n" +
- " {1789, 1798, 1807, 1817, 1828, 1838, 1849, 1860, 1871, 1882},\n" +
- " {1793, 1800, 1808, 1817, 1827, 1836, 1846, 1856, 1866, 1876},\n" +
- " {1795, 1801, 1809, 1816, 1825, 1833, 1842, 1851, 1859, 1868},\n" +
- " {1796, 1802, 1808, 1815, 1822, 1829, 1836, 1844, 1851, 1858},\n" +
- " {1797, 1801, 1807, 1812, 1818, 1824, 1830, 1836, 1842, 1848},\n" +
- " {1796, 1800, 1804, 1809, 1813, 1818, 1822, 1827, 1832, 1836},\n" +
- " {1794, 1797, 1801, 1804, 1807, 1811, 1814, 1817, 1821, 1824}\n" +
- " },\n" +
- " {\n" +
- " {1773, 1777, 1782, 1787, 1792, 1798, 1803, 1809, 1815, 1821},\n" +
- " {1776, 1780, 1784, 1789, 1794, 1800, 1805, 1811, 1817, 1823},\n" +
- " {1778, 1782, 1787, 1792, 1797, 1802, 1807, 1813, 1819, 1825},\n" +
- " {1781, 1785, 1789, 1794, 1799, 1804, 1809, 1815, 1820, 1826},\n" +
- " {1783, 1787, 1791, 1796, 1800, 1805, 1810, 1816, 1821, 1826},\n" +
- " {1786, 1789, 1793, 1798, 1802, 1806, 1811, 1816, 1821, 1826},\n" +
- " {1788, 1791, 1795, 1799, 1803, 1807, 1812, 1816, 1821, 1825},\n" +
- " {1790, 1793, 1796, 1800, 1804, 1807, 1811, 1815, 1820, 1824},\n" +
- " {1791, 1794, 1797, 1800, 1804, 1807, 1811, 1814, 1818, 1822},\n" +
- " {1792, 1795, 1797, 1800, 1803, 1806, 1809, 1812, 1816, 1819}\n" +
- " }\n" +
- " }\n" +
- " time = \n" +
- " {1096880400, 1096966800}\n" +
- " lat = \n" +
- " {20.0006, 20.0116, 20.0226, 20.0336, 20.0446, 20.0555, 20.0665, 20.0775, 20.0885, 20.0995}\n" +
- " lon = \n" +
- " {-134.995, -134.984, -134.973, -134.962, -134.951, -134.94, -134.929, -134.918, -134.907, -134.896}\n"
- +
- "}\n";
- Test.ensureEqual(results, expected, "results=\n" + results);
-
- // scale_factor.nc analysed_sst is short with scale_factor and add offset.
- // Loading via openDataset tells netcdf-java to unpack the variable
- // so here it appears not to be double var with no scale_factor or add_offset
- results = EDDGridFromNcFilesUnpacked.generateDatasetsXml(
- sampleDir, sampleRegex, sampleName,
- "", // group
- "", EDDGridFromNcFilesUnpacked.DEFAULT_RELOAD_EVERY_N_MINUTES, null, null) + "\n"; // dimensionsCSV,
- // reloadMinutes,
- // cacheFromUrl
- String suggDatasetID = EDDGridFromNcFilesUnpacked.suggestDatasetID(sampleDir + sampleRegex);
-
- // GenerateDatasetsXml
- String gdxResults = (new GenerateDatasetsXml()).doIt(new String[] { "-verbose",
- "EDDGridFromNcFilesUnpacked",
- sampleDir, sampleRegex, sampleName,
+ @BeforeAll
+ static void init() {
+ Initialization.edStatic();
+ }
+
+ /**
+ * This tests generateDatasetsXml.
+ *
+ * @throws Throwable if touble
+ */
+ @org.junit.jupiter.api.Test
+ @TagIncompleteTest
+ void testGenerateDatasetsXml() throws Throwable {
+
+ // String2.log("\n*** EDDGridFromNcFilesUnpacked.testGenerateDatasetsXml");
+
+ String sampleDir =
+ Path.of(EDDGridFromNcFilesUnpackedTests.class.getResource("/data/nc/").toURI()).toString()
+ + "/";
+ String sampleRegex = "scale_factor\\.nc";
+ String sampleName = sampleDir + "scale_factor.nc";
+
+ // test that sample file has short analysed_sst with scale_factor and add_offset
+ String results = NcHelper.ncdump(sampleName, ""); // short data
+ String expected =
+ "netcdf scale_factor.nc {\n"
+ + " dimensions:\n"
+ + " time = 2;\n"
+ + " lat = 10;\n"
+ + " lon = 10;\n"
+ + " variables:\n"
+ + " short analysed_sst(time=2, lat=10, lon=10);\n"
+ + " :long_name = \"analysed sea surface temperature\";\n"
+ + " :standard_name = \"sea_surface_foundation_temperature\";\n"
+ + " :units = \"kelvin\";\n"
+ + " :_FillValue = -32768S; // short\n"
+ + " :add_offset = 298.15; // double\n"
+ + " :scale_factor = 0.001; // double\n"
+ + " :valid_min = -32767S; // short\n"
+ + " :valid_max = 32767S; // short\n"
+ + " :comment = \"Interim near-real-time (nrt) version; to be replaced by Final version\";\n"
+ + " :coordinates = \"time lat lon \";\n"
+ + "\n"
+ + " int time(time=2);\n"
+ + " :long_name = \"reference time of sst field\";\n"
+ + " :standard_name = \"time\";\n"
+ + " :axis = \"T\";\n"
+ + " :units = \"seconds since 1981-01-01 00:00:00 UTC\";\n"
+ + " :_CoordinateAxisType = \"Time\";\n"
+ + "\n"
+ + " float lat(lat=10);\n"
+ + " :long_name = \"latitude\";\n"
+ + " :standard_name = \"latitude\";\n"
+ + " :axis = \"Y\";\n"
+ + " :units = \"degrees_north\";\n"
+ + " :valid_min = -90.0f; // float\n"
+ + " :valid_max = 90.0f; // float\n"
+ + " :_CoordinateAxisType = \"Lat\";\n"
+ + "\n"
+ + " float lon(lon=10);\n"
+ + " :long_name = \"longitude\";\n"
+ + " :standard_name = \"longitude\";\n"
+ + " :axis = \"X\";\n"
+ + " :units = \"degrees_east\";\n"
+ + " :valid_min = -180.0f; // float\n"
+ + " :valid_max = 180.0f; // float\n"
+ + " :_CoordinateAxisType = \"Lon\";\n"
+ + "\n"
+ + " // global attributes:\n"
+ + " :title = \"Daily MUR SST, Interim near-real-time (nrt) product\";\n"
+ + " :comment = \"Interim-MUR(nrt) will be replaced by MUR-Final in about 3 days; MUR = \\\"Multi-scale Ultra-high Reolution\\\"; produced under NASA MEaSUREs program.\";\n"
+ + " :Conventions = \"CF-1.0\";\n"
+ + " :DSD_entry_id = \"JPL-L4UHfnd-GLOB-MUR\";\n"
+ + " :references = \"ftp://mariana.jpl.nasa.gov/mur_sst/tmchin/docs/ATBD/\";\n"
+ + " :source_data = \"AVHRR19_G-NAVO, AVHRR_METOP_A-EUMETSAT, MODIS_A-JPL, MODIS_T-JPL, WSAT-REMSS, iQUAM-NOAA/NESDIS, Ice_Conc-OSISAF\";\n"
+ + " :institution = \"Jet Propulsion Laboratory\";\n"
+ + " :contact = \"ghrsst@podaac.jpl.nasa.gov\";\n"
+ + " :GDS_version_id = \"GDS-v1.0-rev1.6\";\n"
+ + " :netcdf_version_id = \"3.5\";\n"
+ + " :creation_date = \"2015-10-06\";\n"
+ + " :product_version = \"04nrt\";\n"
+ + " :history = \"Interim near-real-time (nrt) version created at nominal 1-day latency.\";\n"
+ + " :spatial_resolution = \"0.011 degrees\";\n"
+ + " :start_date = \"2015-10-05\";\n"
+ + " :start_time = \"09:00:00 UTC\";\n"
+ + " :stop_date = \"2015-10-05\";\n"
+ + " :stop_time = \"09:00:00 UTC\";\n"
+ + " :southernmost_latitude = -90.0f; // float\n"
+ + " :northernmost_latitude = 90.0f; // float\n"
+ + " :westernmost_longitude = -180.0f; // float\n"
+ + " :easternmost_longitude = 180.0f; // float\n"
+ + " :file_quality_index = \"0\";\n"
+ + " :History = \"Translated to CF-1.0 Conventions by Netcdf-Java CDM (NetcdfCFWriter)\n"
+ + "Original Dataset = satellite/MUR/ssta/1day; Translation Date = Thu Oct 08 09:39:01 PDT 2015\";\n"
+ + "\n"
+ + " data:\n"
+ + " analysed_sst = \n"
+ + " {\n"
+ + " {\n"
+ + " {1779, 1790, 1802, 1815, 1827, 1839, 1851, 1862, 1874, 1886},\n"
+ + " {1782, 1792, 1804, 1816, 1828, 1840, 1851, 1863, 1875, 1887},\n"
+ + " {1786, 1795, 1805, 1817, 1828, 1839, 1851, 1862, 1874, 1885},\n"
+ + " {1789, 1798, 1807, 1817, 1828, 1838, 1849, 1860, 1871, 1882},\n"
+ + " {1793, 1800, 1808, 1817, 1827, 1836, 1846, 1856, 1866, 1876},\n"
+ + " {1795, 1801, 1809, 1816, 1825, 1833, 1842, 1851, 1859, 1868},\n"
+ + " {1796, 1802, 1808, 1815, 1822, 1829, 1836, 1844, 1851, 1858},\n"
+ + " {1797, 1801, 1807, 1812, 1818, 1824, 1830, 1836, 1842, 1848},\n"
+ + " {1796, 1800, 1804, 1809, 1813, 1818, 1822, 1827, 1832, 1836},\n"
+ + " {1794, 1797, 1801, 1804, 1807, 1811, 1814, 1817, 1821, 1824}\n"
+ + " },\n"
+ + " {\n"
+ + " {1773, 1777, 1782, 1787, 1792, 1798, 1803, 1809, 1815, 1821},\n"
+ + " {1776, 1780, 1784, 1789, 1794, 1800, 1805, 1811, 1817, 1823},\n"
+ + " {1778, 1782, 1787, 1792, 1797, 1802, 1807, 1813, 1819, 1825},\n"
+ + " {1781, 1785, 1789, 1794, 1799, 1804, 1809, 1815, 1820, 1826},\n"
+ + " {1783, 1787, 1791, 1796, 1800, 1805, 1810, 1816, 1821, 1826},\n"
+ + " {1786, 1789, 1793, 1798, 1802, 1806, 1811, 1816, 1821, 1826},\n"
+ + " {1788, 1791, 1795, 1799, 1803, 1807, 1812, 1816, 1821, 1825},\n"
+ + " {1790, 1793, 1796, 1800, 1804, 1807, 1811, 1815, 1820, 1824},\n"
+ + " {1791, 1794, 1797, 1800, 1804, 1807, 1811, 1814, 1818, 1822},\n"
+ + " {1792, 1795, 1797, 1800, 1803, 1806, 1809, 1812, 1816, 1819}\n"
+ + " }\n"
+ + " }\n"
+ + " time = \n"
+ + " {1096880400, 1096966800}\n"
+ + " lat = \n"
+ + " {20.0006, 20.0116, 20.0226, 20.0336, 20.0446, 20.0555, 20.0665, 20.0775, 20.0885, 20.0995}\n"
+ + " lon = \n"
+ + " {-134.995, -134.984, -134.973, -134.962, -134.951, -134.94, -134.929, -134.918, -134.907, -134.896}\n"
+ + "}\n";
+ Test.ensureEqual(results, expected, "results=\n" + results);
+
+ // scale_factor.nc analysed_sst is short with scale_factor and add offset.
+ // Loading via openDataset tells netcdf-java to unpack the variable
+ // so here it appears not to be double var with no scale_factor or add_offset
+ results =
+ EDDGridFromNcFilesUnpacked.generateDatasetsXml(
+ sampleDir,
+ sampleRegex,
+ sampleName,
"", // group
- "", "" + EDDGridFromNcFilesUnpacked.DEFAULT_RELOAD_EVERY_N_MINUTES, "" }, // dimensionsCSV,
- // reloadMinutes, cacheFromUrl
+ "",
+ EDDGridFromNcFilesUnpacked.DEFAULT_RELOAD_EVERY_N_MINUTES,
+ null,
+ null)
+ + "\n"; // dimensionsCSV,
+ // reloadMinutes,
+ // cacheFromUrl
+ String suggDatasetID = EDDGridFromNcFilesUnpacked.suggestDatasetID(sampleDir + sampleRegex);
+
+ // GenerateDatasetsXml
+ String gdxResults =
+ (new GenerateDatasetsXml())
+ .doIt(
+ new String[] {
+ "-verbose",
+ "EDDGridFromNcFilesUnpacked",
+ sampleDir,
+ sampleRegex,
+ sampleName,
+ "", // group
+ "",
+ "" + EDDGridFromNcFilesUnpacked.DEFAULT_RELOAD_EVERY_N_MINUTES,
+ ""
+ }, // dimensionsCSV,
+ // reloadMinutes, cacheFromUrl
false); // doIt loop?
- Test.ensureEqual(gdxResults, results, "Unexpected results from GenerateDatasetsXml.doIt. " +
- gdxResults.length() + " " + results.length());
-
- expected = "\n"
- +
- " 10080\n" +
- " 10000\n" +
- " " + sampleDir + "\n" +
- " scale_factor\\.nc\n" +
- " true\n" +
- " .*\n" +
- " last\n" +
- " 20\n" +
- " false\n" +
- " \n" +
- " \n" +
- " Grid\n" +
- " CF-1.10, COARDS, ACDD-1.3\n" +
- " ghrsst@podaac.jpl.nasa.gov\n" +
- " GHRSST\n" +
- " group\n" +
- " https://podaac.jpl.nasa.gov/\n" +
- " null\n" +
- " null\n" +
- " https://podaac.jpl.nasa.gov/\n" +
- " analysed, analysed_sst, daily, data, day, earth, Earth Science > Oceans > Ocean Temperature > Sea Surface Temperature, environments, foundation, high, interim, jet, laboratory, making, measures, multi, multi-scale, mur, near, near real time, near-real-time, nrt, ocean, oceans, product, propulsion, real, records, research, resolution, scale, science, sea, sea_surface_foundation_temperature, sst, surface, system, temperature, time, ultra, ultra-high, use\n"
- +
- " GCMD Science Keywords\n" +
- " [standard]\n" +
- " null\n" +
- " null\n" +
- " null\n" +
- " CF Standard Name Table v70\n" +
- " null\n" +
- " null\n" +
- " null\n" +
- " null\n" +
- " Interim-Multi-scale Ultra-high Resolution (MUR)(nrt) will be replaced by MUR-Final in about 3 days; MUR = "Multi-scale Ultra-high Reolution"; produced under NASA Making Earth System Data Records for Use in Research Environments (MEaSUREs) program.\n"
- +
- " null\n" +
- " \n" +
- " \n" +
- " time\n" +
- " time\n" +
- " \n" +
- " \n" +
- " Time\n" +
- " \n" +
- " \n" +
- " \n" +
- " lat\n" +
- " latitude\n" +
- " \n" +
- " \n" +
- " Location\n" +
- " Latitude\n" +
- " \n" +
- " \n" +
- " \n" +
- " lon\n" +
- " longitude\n" +
- " \n" +
- " \n" +
- " Location\n" +
- " Longitude\n" +
- " \n" +
- " \n" +
- " \n" +
- " analysed_sst\n" +
- " analysed_sst\n" +
- " double\n" +
- " \n" +
- " \n" +
- " 305.0\n" +
- " 273.0\n" +
- " null\n" +
- " Temperature\n" +
- " \n" +
- " \n" +
- "\n" +
- "\n\n";
- Test.ensureEqual(results, expected,
- "results.length=" + results.length() + " expected.length=" + expected.length() +
- "\nresults=\n" + results);
-
- // ensure it is ready-to-use by making a dataset from it
- String tDatasetID = suggDatasetID;
- EDD.deleteCachedDatasetInfo(tDatasetID);
- EDD edd = EDDGridFromNcFilesUnpacked.oneFromXmlFragment(null, results);
- Test.ensureEqual(edd.datasetID(), tDatasetID, "");
- Test.ensureEqual(edd.title(), "Daily MUR SST, Interim near-real-time (nrt) product", "");
- Test.ensureEqual(String2.toCSSVString(edd.dataVariableDestinationNames()),
- "analysed_sst", "");
-
- String2.log("\nEDDGridFromNcFilesUnpacked.testGenerateDatasetsXml passed the test.");
- }
-
- /**
- *
- * @throws Throwable if trouble
- */
- @ParameterizedTest
- @ValueSource(booleans = { false, true })
- void testBasic(boolean deleteCachedDatasetInfo) throws Throwable {
- // String2.log("\n*** EDDGridFromNcFilesUnpacked.testBasic()\n");
- // testVerboseOn();
- int language = 0;
- String name, tName, results, tResults, expected, userDapQuery, tQuery;
- String error = "";
- EDV edv;
- String today = Calendar2.getCurrentISODateTimeStringZulu().substring(0, 10);
-
- // generateDatasetsXml
- String id = "testEDDGridFromNcFilesUnpacked";
- if (deleteCachedDatasetInfo)
- EDDGridFromNcFilesUnpacked.deleteCachedDatasetInfo(id);
- EDDGrid eddGrid = (EDDGrid) EDDTestDataset.gettestEDDGridFromNcFilesUnpacked();
-
- // *** test getting das for entire dataset
- String2.log("\n*** test das dds for entire dataset\n");
- tName = eddGrid.makeNewFileForDapQuery(language, null, null, "", EDStatic.fullTestCacheDirectory,
- eddGrid.className(), ".das");
- results = File2.directReadFrom88591File(EDStatic.fullTestCacheDirectory + tName);
- // String2.log(results);
- expected = "Attributes {\n" +
- " time {\n" +
- " String _CoordinateAxisType \"Time\";\n" +
- " Float64 actual_range 1.4440356e+9, 1.444122e+9;\n" +
- " String axis \"T\";\n" +
- " String ioos_category \"Time\";\n" +
- " String long_name \"reference time of sst field\";\n" +
- " String standard_name \"time\";\n" +
- " String time_origin \"01-JAN-1970 00:00:00\";\n" +
- " String units \"seconds since 1970-01-01T00:00:00Z\";\n" +
- " }\n" +
- " latitude {\n" +
- " String _CoordinateAxisType \"Lat\";\n" +
- " Float32 actual_range 20.0006, 20.0995;\n" +
- " String axis \"Y\";\n" +
- " String ioos_category \"Location\";\n" +
- " String long_name \"Latitude\";\n" +
- " String standard_name \"latitude\";\n" +
- " String units \"degrees_north\";\n" +
- " Float32 valid_max 90.0;\n" +
- " Float32 valid_min -90.0;\n" +
- " }\n" +
- " longitude {\n" +
- " String _CoordinateAxisType \"Lon\";\n" +
- " Float32 actual_range -134.995, -134.896;\n" +
- " String axis \"X\";\n" +
- " String ioos_category \"Location\";\n" +
- " String long_name \"Longitude\";\n" +
- " String standard_name \"longitude\";\n" +
- " String units \"degrees_east\";\n" +
- " Float32 valid_max 180.0;\n" +
- " Float32 valid_min -180.0;\n" +
- " }\n" +
- " analysed_sst {\n" +
- " Float64 _FillValue NaN;\n" +
- " Float64 colorBarMaximum 305.0;\n" +
- " Float64 colorBarMinimum 273.0;\n" +
- " String comment \"Interim near-real-time (nrt) version; to be replaced by Final version\";\n" +
- " String ioos_category \"Temperature\";\n" +
- " String long_name \"analysed sea surface temperature\";\n" +
- " String standard_name \"sea_surface_foundation_temperature\";\n" +
- " String units \"degree_K\";\n" +
- " Float64 valid_max 330.917;\n" +
- " Float64 valid_min 265.383;\n" +
- " }\n" +
- " NC_GLOBAL {\n" +
- " String cdm_data_type \"Grid\";\n" +
- " String comment \"Interim-MUR(nrt) will be replaced by MUR-Final in about 3 days; MUR = \\\"Multi-scale Ultra-high Resolution\\\"; produced under NASA MEaSUREs program.\";\n"
- +
- " String contact \"ghrsst@podaac.jpl.nasa.gov\";\n" +
- " String Conventions \"CF-1.6, COARDS, ACDD-1.3\";\n" +
- " String creation_date \"2015-10-06\";\n" +
- " String creator_email \"ghrsst@podaac.jpl.nasa.gov\";\n" +
- " String creator_name \"GHRSST\";\n" +
- " String creator_url \"https://podaac.jpl.nasa.gov/\";\n" +
- " String DSD_entry_id \"JPL-L4UHfnd-GLOB-MUR\";\n" +
- " Float64 Easternmost_Easting -134.896;\n" +
- " String file_quality_index \"0\";\n" +
- " String GDS_version_id \"GDS-v1.0-rev1.6\";\n" +
- " Float64 geospatial_lat_max 20.0995;\n" +
- " Float64 geospatial_lat_min 20.0006;\n" +
- " String geospatial_lat_units \"degrees_north\";\n" +
- " Float64 geospatial_lon_max -134.896;\n" +
- " Float64 geospatial_lon_min -134.995;\n" +
- " Float64 geospatial_lon_resolution 0.011000000000001996;\n" +
- " String geospatial_lon_units \"degrees_east\";\n" +
- " String history \"Interim near-real-time (nrt) version created at nominal 1-day latency.\n" +
- today;
- tResults = results.substring(0, Math.min(results.length(), expected.length()));
- Test.ensureEqual(tResults, expected, "results=\n" + results);
-
- // T22:27:15Z (local files)
- // 2015-10-08T22:27:15Z
-
- expected = "/griddap/testEDDGridFromNcFilesUnpacked.das\";\n" +
- " String infoUrl \"https://podaac.jpl.nasa.gov/\";\n" +
- " String institution \"Jet Propulsion Laboratory\";\n" +
- " String keywords \"analysed, analysed_sst, daily, data, day, earth, Earth Science > Oceans > Ocean Temperature > Sea Surface Temperature, environments, foundation, high, interim, jet, laboratory, making, measures, multi, multi-scale, mur, near, near real time, near-real-time, nrt, ocean, oceans, product, propulsion, real, records, research, resolution, scale, sea, sea_surface_foundation_temperature, sst, surface, system, temperature, time, ultra, ultra-high, use\";\n"
- +
- " String keywords_vocabulary \"GCMD Science Keywords\";\n" +
- " String license \"The data may be used and redistributed for free but is not intended\n" +
- "for legal use, since it may contain inaccuracies. Neither the data\n" +
- "Contributor, ERD, NOAA, nor the United States Government, nor any\n" +
- "of their employees or contractors, makes any warranty, express or\n" +
- "implied, including warranties of merchantability and fitness for a\n" +
- "particular purpose, or assumes any legal liability for the accuracy,\n" +
- "completeness, or usefulness, of this information.\";\n" +
- " String netcdf_version_id \"3.5\";\n" +
- " Float64 Northernmost_Northing 20.0995;\n" +
- " String product_version \"04nrt\";\n" +
- " String references \"ftp://mariana.jpl.nasa.gov/mur_sst/tmchin/docs/ATBD/\";\n" +
- " String source_data \"AVHRR19_G-NAVO, AVHRR_METOP_A-EUMETSAT, MODIS_A-JPL, MODIS_T-JPL, WSAT-REMSS, iQUAM-NOAA/NESDIS, Ice_Conc-OSISAF\";\n"
- +
- " String sourceUrl \"(local files)\";\n" +
- " Float64 Southernmost_Northing 20.0006;\n" +
- " String spatial_resolution \"0.011 degrees\";\n" +
- " String standard_name_vocabulary \"CF Standard Name Table v70\";\n" +
- " String summary \"Interim-Multi-scale Ultra-high Resolution (MUR)(nrt) will be replaced by MUR-Final in about 3 days; MUR = \\\"Multi-scale Ultra-high Resolution\\\"; produced under NASA Making Earth System Data Records for Use in Research Environments (MEaSUREs) program.\";\n"
- +
- " String time_coverage_end \"2015-10-06T09:00:00Z\";\n" +
- " String time_coverage_start \"2015-10-05T09:00:00Z\";\n" +
- " String title \"Daily MUR SST, Interim near-real-time (nrt) product\";\n" +
- " Float64 Westernmost_Easting -134.995;\n" +
- " }\n" +
- "}\n";
- int tPo = results.indexOf(expected.substring(0, 17));
- Test.ensureTrue(tPo >= 0, "tPo=-1 results=\n" + results);
- tResults = results.substring(tPo, Math.min(results.length(), tPo + expected.length()));
- Test.ensureEqual(tResults, expected, "results=\n" + results);
-
- // *** test getting dds for entire dataset
- tName = eddGrid.makeNewFileForDapQuery(language, null, null, "", EDStatic.fullTestCacheDirectory,
- eddGrid.className(), ".dds");
- results = File2.directReadFrom88591File(EDStatic.fullTestCacheDirectory + tName);
- // String2.log(results);
- expected = "Dataset {\n" +
- " Float64 time[time = 2];\n" +
- " Float32 latitude[latitude = 10];\n" +
- " Float32 longitude[longitude = 10];\n" +
- " GRID {\n" +
- " ARRAY:\n" +
- " Float64 analysed_sst[time = 2][latitude = 10][longitude = 10];\n" +
- " MAPS:\n" +
- " Float64 time[time = 2];\n" +
- " Float32 latitude[latitude = 10];\n" +
- " Float32 longitude[longitude = 10];\n" +
- " } analysed_sst;\n" +
- "} testEDDGridFromNcFilesUnpacked;\n";
- Test.ensureEqual(results, expected, "\nresults=\n" + results);
-
- // .csv with data from one file
- String2.log("\n*** test read from one file\n");
- userDapQuery = "analysed_sst[0][0:2:6][0:2:6]";
- tName = eddGrid.makeNewFileForDapQuery(language, null, null, userDapQuery, EDStatic.fullTestCacheDirectory,
- eddGrid.className(), ".csv");
- results = File2.directReadFrom88591File(EDStatic.fullTestCacheDirectory + tName);
- // String2.log(results);
- expected = "time,latitude,longitude,analysed_sst\n" +
- "UTC,degrees_north,degrees_east,degree_K\n" +
- "2015-10-05T09:00:00Z,20.0006,-134.995,299.929\n" + // note double values (Kelvin)
- "2015-10-05T09:00:00Z,20.0006,-134.973,299.952\n" +
- "2015-10-05T09:00:00Z,20.0006,-134.951,299.977\n" +
- "2015-10-05T09:00:00Z,20.0006,-134.929,300.001\n" +
- "2015-10-05T09:00:00Z,20.0226,-134.995,299.936\n" +
- "2015-10-05T09:00:00Z,20.0226,-134.973,299.955\n" +
- "2015-10-05T09:00:00Z,20.0226,-134.951,299.97799999999995\n" +
- "2015-10-05T09:00:00Z,20.0226,-134.929,300.001\n" +
- "2015-10-05T09:00:00Z,20.0446,-134.995,299.943\n" +
- "2015-10-05T09:00:00Z,20.0446,-134.973,299.95799999999997\n" +
- "2015-10-05T09:00:00Z,20.0446,-134.951,299.977\n" +
- "2015-10-05T09:00:00Z,20.0446,-134.929,299.996\n" +
- "2015-10-05T09:00:00Z,20.0665,-134.995,299.94599999999997\n" +
- "2015-10-05T09:00:00Z,20.0665,-134.973,299.95799999999997\n" +
- "2015-10-05T09:00:00Z,20.0665,-134.951,299.972\n" +
- "2015-10-05T09:00:00Z,20.0665,-134.929,299.986\n";
- Test.ensureEqual(results, expected, "\nresults=\n" + results);
-
- // */
- }
-
- /**
- * Test file created from
- * https://thredds.jpl.nasa.gov/thredds/ncss/grid/ncml_aggregation/OceanTemperature/modis/aqua/11um/9km/aggregate__MODIS_AQUA_L3_SST_THERMAL_8DAY_9KM_DAYTIME.ncml/dataset.html
- * and stored in /erddapTest/unsigned/
- *
- * @throws Throwable if trouble
- */
- @org.junit.jupiter.api.Test
- @TagImageComparison
- void testUInt16File() throws Throwable {
- // String2.log("\n*** EDDGridFromNcFilesUnpacked.testUInt16File()");
- // testVerboseOn();
- int language = 0;
- String name, tName, results, tResults, expected, userDapQuery;
- String today = Calendar2.getCurrentISODateTimeStringZulu() + "Z";
- String fileDir = Path.of(EDDGridFromNcFilesUnpackedTests.class.getResource("/data/unsigned/").toURI())
+ Test.ensureEqual(
+ gdxResults,
+ results,
+ "Unexpected results from GenerateDatasetsXml.doIt. "
+ + gdxResults.length()
+ + " "
+ + results.length());
+
+ expected =
+ "\n"
+ + " 10080\n"
+ + " 10000\n"
+ + " "
+ + sampleDir
+ + "\n"
+ + " scale_factor\\.nc\n"
+ + " true\n"
+ + " .*\n"
+ + " last\n"
+ + " 20\n"
+ + " false\n"
+ + " \n"
+ + " \n"
+ + " Grid\n"
+ + " CF-1.10, COARDS, ACDD-1.3\n"
+ + " ghrsst@podaac.jpl.nasa.gov\n"
+ + " GHRSST\n"
+ + " group\n"
+ + " https://podaac.jpl.nasa.gov/\n"
+ + " null\n"
+ + " null\n"
+ + " https://podaac.jpl.nasa.gov/\n"
+ + " analysed, analysed_sst, daily, data, day, earth, Earth Science > Oceans > Ocean Temperature > Sea Surface Temperature, environments, foundation, high, interim, jet, laboratory, making, measures, multi, multi-scale, mur, near, near real time, near-real-time, nrt, ocean, oceans, product, propulsion, real, records, research, resolution, scale, science, sea, sea_surface_foundation_temperature, sst, surface, system, temperature, time, ultra, ultra-high, use\n"
+ + " GCMD Science Keywords\n"
+ + " [standard]\n"
+ + " null\n"
+ + " null\n"
+ + " null\n"
+ + " CF Standard Name Table v70\n"
+ + " null\n"
+ + " null\n"
+ + " null\n"
+ + " null\n"
+ + " Interim-Multi-scale Ultra-high Resolution (MUR)(nrt) will be replaced by MUR-Final in about 3 days; MUR = "Multi-scale Ultra-high Reolution"; produced under NASA Making Earth System Data Records for Use in Research Environments (MEaSUREs) program.\n"
+ + " null\n"
+ + " \n"
+ + " \n"
+ + " time\n"
+ + " time\n"
+ + " \n"
+ + " \n"
+ + " Time\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " lat\n"
+ + " latitude\n"
+ + " \n"
+ + " \n"
+ + " Location\n"
+ + " Latitude\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " lon\n"
+ + " longitude\n"
+ + " \n"
+ + " \n"
+ + " Location\n"
+ + " Longitude\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " analysed_sst\n"
+ + " analysed_sst\n"
+ + " double\n"
+ + " \n"
+ + " \n"
+ + " 305.0\n"
+ + " 273.0\n"
+ + " null\n"
+ + " Temperature\n"
+ + " \n"
+ + " \n"
+ + "\n"
+ + "\n\n";
+ Test.ensureEqual(
+ results,
+ expected,
+ "results.length="
+ + results.length()
+ + " expected.length="
+ + expected.length()
+ + "\nresults=\n"
+ + results);
+
+ // ensure it is ready-to-use by making a dataset from it
+ String tDatasetID = suggDatasetID;
+ EDD.deleteCachedDatasetInfo(tDatasetID);
+ EDD edd = EDDGridFromNcFilesUnpacked.oneFromXmlFragment(null, results);
+ Test.ensureEqual(edd.datasetID(), tDatasetID, "");
+ Test.ensureEqual(edd.title(), "Daily MUR SST, Interim near-real-time (nrt) product", "");
+ Test.ensureEqual(String2.toCSSVString(edd.dataVariableDestinationNames()), "analysed_sst", "");
+
+ String2.log("\nEDDGridFromNcFilesUnpacked.testGenerateDatasetsXml passed the test.");
+ }
+
+ /**
+ * @throws Throwable if trouble
+ */
+ @ParameterizedTest
+ @ValueSource(booleans = {false, true})
+ void testBasic(boolean deleteCachedDatasetInfo) throws Throwable {
+ // String2.log("\n*** EDDGridFromNcFilesUnpacked.testBasic()\n");
+ // testVerboseOn();
+ int language = 0;
+ String name, tName, results, tResults, expected, userDapQuery, tQuery;
+ String error = "";
+ EDV edv;
+ String today = Calendar2.getCurrentISODateTimeStringZulu().substring(0, 10);
+
+ // generateDatasetsXml
+ String id = "testEDDGridFromNcFilesUnpacked";
+ if (deleteCachedDatasetInfo) EDDGridFromNcFilesUnpacked.deleteCachedDatasetInfo(id);
+ EDDGrid eddGrid = (EDDGrid) EDDTestDataset.gettestEDDGridFromNcFilesUnpacked();
+
+ // *** test getting das for entire dataset
+ String2.log("\n*** test das dds for entire dataset\n");
+ tName =
+ eddGrid.makeNewFileForDapQuery(
+ language, null, null, "", EDStatic.fullTestCacheDirectory, eddGrid.className(), ".das");
+ results = File2.directReadFrom88591File(EDStatic.fullTestCacheDirectory + tName);
+ // String2.log(results);
+ expected =
+ "Attributes {\n"
+ + " time {\n"
+ + " String _CoordinateAxisType \"Time\";\n"
+ + " Float64 actual_range 1.4440356e+9, 1.444122e+9;\n"
+ + " String axis \"T\";\n"
+ + " String ioos_category \"Time\";\n"
+ + " String long_name \"reference time of sst field\";\n"
+ + " String standard_name \"time\";\n"
+ + " String time_origin \"01-JAN-1970 00:00:00\";\n"
+ + " String units \"seconds since 1970-01-01T00:00:00Z\";\n"
+ + " }\n"
+ + " latitude {\n"
+ + " String _CoordinateAxisType \"Lat\";\n"
+ + " Float32 actual_range 20.0006, 20.0995;\n"
+ + " String axis \"Y\";\n"
+ + " String ioos_category \"Location\";\n"
+ + " String long_name \"Latitude\";\n"
+ + " String standard_name \"latitude\";\n"
+ + " String units \"degrees_north\";\n"
+ + " Float32 valid_max 90.0;\n"
+ + " Float32 valid_min -90.0;\n"
+ + " }\n"
+ + " longitude {\n"
+ + " String _CoordinateAxisType \"Lon\";\n"
+ + " Float32 actual_range -134.995, -134.896;\n"
+ + " String axis \"X\";\n"
+ + " String ioos_category \"Location\";\n"
+ + " String long_name \"Longitude\";\n"
+ + " String standard_name \"longitude\";\n"
+ + " String units \"degrees_east\";\n"
+ + " Float32 valid_max 180.0;\n"
+ + " Float32 valid_min -180.0;\n"
+ + " }\n"
+ + " analysed_sst {\n"
+ + " Float64 _FillValue NaN;\n"
+ + " Float64 colorBarMaximum 305.0;\n"
+ + " Float64 colorBarMinimum 273.0;\n"
+ + " String comment \"Interim near-real-time (nrt) version; to be replaced by Final version\";\n"
+ + " String ioos_category \"Temperature\";\n"
+ + " String long_name \"analysed sea surface temperature\";\n"
+ + " String standard_name \"sea_surface_foundation_temperature\";\n"
+ + " String units \"degree_K\";\n"
+ + " Float64 valid_max 330.917;\n"
+ + " Float64 valid_min 265.383;\n"
+ + " }\n"
+ + " NC_GLOBAL {\n"
+ + " String cdm_data_type \"Grid\";\n"
+ + " String comment \"Interim-MUR(nrt) will be replaced by MUR-Final in about 3 days; MUR = \\\"Multi-scale Ultra-high Resolution\\\"; produced under NASA MEaSUREs program.\";\n"
+ + " String contact \"ghrsst@podaac.jpl.nasa.gov\";\n"
+ + " String Conventions \"CF-1.6, COARDS, ACDD-1.3\";\n"
+ + " String creation_date \"2015-10-06\";\n"
+ + " String creator_email \"ghrsst@podaac.jpl.nasa.gov\";\n"
+ + " String creator_name \"GHRSST\";\n"
+ + " String creator_url \"https://podaac.jpl.nasa.gov/\";\n"
+ + " String DSD_entry_id \"JPL-L4UHfnd-GLOB-MUR\";\n"
+ + " Float64 Easternmost_Easting -134.896;\n"
+ + " String file_quality_index \"0\";\n"
+ + " String GDS_version_id \"GDS-v1.0-rev1.6\";\n"
+ + " Float64 geospatial_lat_max 20.0995;\n"
+ + " Float64 geospatial_lat_min 20.0006;\n"
+ + " String geospatial_lat_units \"degrees_north\";\n"
+ + " Float64 geospatial_lon_max -134.896;\n"
+ + " Float64 geospatial_lon_min -134.995;\n"
+ + " Float64 geospatial_lon_resolution 0.011000000000001996;\n"
+ + " String geospatial_lon_units \"degrees_east\";\n"
+ + " String history \"Interim near-real-time (nrt) version created at nominal 1-day latency.\n"
+ + today;
+ tResults = results.substring(0, Math.min(results.length(), expected.length()));
+ Test.ensureEqual(tResults, expected, "results=\n" + results);
+
+ // T22:27:15Z (local files)
+ // 2015-10-08T22:27:15Z
+
+ expected =
+ "/griddap/testEDDGridFromNcFilesUnpacked.das\";\n"
+ + " String infoUrl \"https://podaac.jpl.nasa.gov/\";\n"
+ + " String institution \"Jet Propulsion Laboratory\";\n"
+ + " String keywords \"analysed, analysed_sst, daily, data, day, earth, Earth Science > Oceans > Ocean Temperature > Sea Surface Temperature, environments, foundation, high, interim, jet, laboratory, making, measures, multi, multi-scale, mur, near, near real time, near-real-time, nrt, ocean, oceans, product, propulsion, real, records, research, resolution, scale, sea, sea_surface_foundation_temperature, sst, surface, system, temperature, time, ultra, ultra-high, use\";\n"
+ + " String keywords_vocabulary \"GCMD Science Keywords\";\n"
+ + " String license \"The data may be used and redistributed for free but is not intended\n"
+ + "for legal use, since it may contain inaccuracies. Neither the data\n"
+ + "Contributor, ERD, NOAA, nor the United States Government, nor any\n"
+ + "of their employees or contractors, makes any warranty, express or\n"
+ + "implied, including warranties of merchantability and fitness for a\n"
+ + "particular purpose, or assumes any legal liability for the accuracy,\n"
+ + "completeness, or usefulness, of this information.\";\n"
+ + " String netcdf_version_id \"3.5\";\n"
+ + " Float64 Northernmost_Northing 20.0995;\n"
+ + " String product_version \"04nrt\";\n"
+ + " String references \"ftp://mariana.jpl.nasa.gov/mur_sst/tmchin/docs/ATBD/\";\n"
+ + " String source_data \"AVHRR19_G-NAVO, AVHRR_METOP_A-EUMETSAT, MODIS_A-JPL, MODIS_T-JPL, WSAT-REMSS, iQUAM-NOAA/NESDIS, Ice_Conc-OSISAF\";\n"
+ + " String sourceUrl \"(local files)\";\n"
+ + " Float64 Southernmost_Northing 20.0006;\n"
+ + " String spatial_resolution \"0.011 degrees\";\n"
+ + " String standard_name_vocabulary \"CF Standard Name Table v70\";\n"
+ + " String summary \"Interim-Multi-scale Ultra-high Resolution (MUR)(nrt) will be replaced by MUR-Final in about 3 days; MUR = \\\"Multi-scale Ultra-high Resolution\\\"; produced under NASA Making Earth System Data Records for Use in Research Environments (MEaSUREs) program.\";\n"
+ + " String time_coverage_end \"2015-10-06T09:00:00Z\";\n"
+ + " String time_coverage_start \"2015-10-05T09:00:00Z\";\n"
+ + " String title \"Daily MUR SST, Interim near-real-time (nrt) product\";\n"
+ + " Float64 Westernmost_Easting -134.995;\n"
+ + " }\n"
+ + "}\n";
+ int tPo = results.indexOf(expected.substring(0, 17));
+ Test.ensureTrue(tPo >= 0, "tPo=-1 results=\n" + results);
+ tResults = results.substring(tPo, Math.min(results.length(), tPo + expected.length()));
+ Test.ensureEqual(tResults, expected, "results=\n" + results);
+
+ // *** test getting dds for entire dataset
+ tName =
+ eddGrid.makeNewFileForDapQuery(
+ language, null, null, "", EDStatic.fullTestCacheDirectory, eddGrid.className(), ".dds");
+ results = File2.directReadFrom88591File(EDStatic.fullTestCacheDirectory + tName);
+ // String2.log(results);
+ expected =
+ "Dataset {\n"
+ + " Float64 time[time = 2];\n"
+ + " Float32 latitude[latitude = 10];\n"
+ + " Float32 longitude[longitude = 10];\n"
+ + " GRID {\n"
+ + " ARRAY:\n"
+ + " Float64 analysed_sst[time = 2][latitude = 10][longitude = 10];\n"
+ + " MAPS:\n"
+ + " Float64 time[time = 2];\n"
+ + " Float32 latitude[latitude = 10];\n"
+ + " Float32 longitude[longitude = 10];\n"
+ + " } analysed_sst;\n"
+ + "} testEDDGridFromNcFilesUnpacked;\n";
+ Test.ensureEqual(results, expected, "\nresults=\n" + results);
+
+ // .csv with data from one file
+ String2.log("\n*** test read from one file\n");
+ userDapQuery = "analysed_sst[0][0:2:6][0:2:6]";
+ tName =
+ eddGrid.makeNewFileForDapQuery(
+ language,
+ null,
+ null,
+ userDapQuery,
+ EDStatic.fullTestCacheDirectory,
+ eddGrid.className(),
+ ".csv");
+ results = File2.directReadFrom88591File(EDStatic.fullTestCacheDirectory + tName);
+ // String2.log(results);
+ expected =
+ "time,latitude,longitude,analysed_sst\n"
+ + "UTC,degrees_north,degrees_east,degree_K\n"
+ + "2015-10-05T09:00:00Z,20.0006,-134.995,299.929\n"
+ + // note double values (Kelvin)
+ "2015-10-05T09:00:00Z,20.0006,-134.973,299.952\n"
+ + "2015-10-05T09:00:00Z,20.0006,-134.951,299.977\n"
+ + "2015-10-05T09:00:00Z,20.0006,-134.929,300.001\n"
+ + "2015-10-05T09:00:00Z,20.0226,-134.995,299.936\n"
+ + "2015-10-05T09:00:00Z,20.0226,-134.973,299.955\n"
+ + "2015-10-05T09:00:00Z,20.0226,-134.951,299.97799999999995\n"
+ + "2015-10-05T09:00:00Z,20.0226,-134.929,300.001\n"
+ + "2015-10-05T09:00:00Z,20.0446,-134.995,299.943\n"
+ + "2015-10-05T09:00:00Z,20.0446,-134.973,299.95799999999997\n"
+ + "2015-10-05T09:00:00Z,20.0446,-134.951,299.977\n"
+ + "2015-10-05T09:00:00Z,20.0446,-134.929,299.996\n"
+ + "2015-10-05T09:00:00Z,20.0665,-134.995,299.94599999999997\n"
+ + "2015-10-05T09:00:00Z,20.0665,-134.973,299.95799999999997\n"
+ + "2015-10-05T09:00:00Z,20.0665,-134.951,299.972\n"
+ + "2015-10-05T09:00:00Z,20.0665,-134.929,299.986\n";
+ Test.ensureEqual(results, expected, "\nresults=\n" + results);
+
+ // */
+ }
+
+ /**
+ * Test file created from
+ * https://thredds.jpl.nasa.gov/thredds/ncss/grid/ncml_aggregation/OceanTemperature/modis/aqua/11um/9km/aggregate__MODIS_AQUA_L3_SST_THERMAL_8DAY_9KM_DAYTIME.ncml/dataset.html
+ * and stored in /erddapTest/unsigned/
+ *
+ * @throws Throwable if trouble
+ */
+ @org.junit.jupiter.api.Test
+ @TagImageComparison
+ void testUInt16File() throws Throwable {
+ // String2.log("\n*** EDDGridFromNcFilesUnpacked.testUInt16File()");
+ // testVerboseOn();
+ int language = 0;
+ String name, tName, results, tResults, expected, userDapQuery;
+ String today = Calendar2.getCurrentISODateTimeStringZulu() + "Z";
+ String fileDir =
+ Path.of(EDDGridFromNcFilesUnpackedTests.class.getResource("/data/unsigned/").toURI())
.toString()
- + "/";
- String fileName = "9km_aggregate__MODIS_AQUA_L3_SST_THERMAL_8DAY_9KM_DAYTIME.nc";
- boolean oNcDebugMode = NcHelper.debugMode;
- NcHelper.debugMode = true;
- boolean oAttDebugMode = Attributes.debugMode;
- Attributes.debugMode = true;
- String tDir = EDStatic.fullTestCacheDirectory;
-
- // DumpString
- results = NcHelper.ncdump(fileDir + fileName, "-h");
- expected = "netcdf 9km_aggregate__MODIS_AQUA_L3_SST_THERMAL_8DAY_9KM_DAYTIME.nc {\n" +
- " dimensions:\n" +
- " time = 1;\n" +
- " lat = 2160;\n" +
- " lon = 25;\n" +
- " variables:\n" +
- " short l3m_data(time=1, lat=2160, lon=25);\n" +
- " :_Unsigned = \"true\";\n" +
- " :long_name = \"l3m_data\";\n" +
- " :scale_factor = 7.17185E-4f; // float\n" + // halfway=32768-> 23.50071808, so, many data values
- // are higher
- " :add_offset = -2.0f; // float\n" +
- " :_FillValue = -1S; // short\n" + // In a sense wrong: cf says it should be actual value:
- // 65535(ushort)
- " :Scaling = \"linear\";\n" +
- " :Scaling_Equation = \"(Slope*l3m_data) + Intercept = Parameter value\";\n" +
- " :Slope = 7.17185E-4f; // float\n" +
- " :Intercept = -2.0f; // float\n" +
- " :coordinates = \"time Number_of_Lines Number_of_Columns lat lon\";\n" +
- "\n" +
- " int time(time=1);\n" +
- " :standard_name = \"time\";\n" +
- " :axis = \"T\";\n" +
- " :units = \"days since 2002-01-01\";\n" +
- " :_CoordinateAxisType = \"Time\";\n" +
- "\n" +
- " float Number_of_Lines(lat=2160);\n" + // note that ncss knows this is lat, but didn't rename it
- " :long_name = \"latitude\";\n" +
- " :units = \"degrees_north\";\n" +
- " :_CoordinateAxisType = \"Lat\";\n" +
- " :standard_name = \"latitude\";\n" +
- "\n" +
- " float Number_of_Columns(lon=25);\n" + // note that ncss knows this is lon, but didn't rename it
- " :long_name = \"longitude\";\n" +
- " :units = \"degrees_east\";\n" +
- " :_CoordinateAxisType = \"Lon\";\n" +
- " :standard_name = \"longitude\";\n" +
- "\n" +
- " byte l3m_qual(time=1, lat=2160, lon=25);\n" +
- " :_Unsigned = \"true\";\n" +
- " :long_name = \"l3m_qual\";\n" +
- " :scale_factor = 7.17185E-4f; // float\n" + // I suspect that's wrong
- " :add_offset = -2.0f; // float\n" + // I suspect that's wrong
- " :valid_range = 0, 2; // int\n" +
- " :coordinates = \"time Number_of_Lines Number_of_Columns lat lon\";\n" + // that's bizarre
- "\n" +
- " // global attributes:\n" +
- " :Product_Name = \"A20092652009272.L3m_8D_SST_9\";\n" +
- " :Sensor_Name = \"MODISA\";\n" +
- " :Sensor = \"\";\n" +
- " :Title = \"MODISA Level-3 Standard Mapped Image\";\n" +
- " :Data_Center = \"\";\n" +
- " :Station_Name = \"\";\n" +
- " :Station_Latitude = 0.0f; // float\n" +
- " :Station_Longitude = 0.0f; // float\n" +
- " :Mission = \"\";\n" +
- " :Mission_Characteristics = \"\";\n" +
- " :Sensor_Characteristics = \"\";\n" +
- " :Product_Type = \"8-day\";\n" +
- " :Replacement_Flag = \"ORIGINAL\";\n" +
- " :Software_Name = \"smigen\";\n" +
- " :Software_Version = \"4.0\";\n" +
- " :Processing_Time = \"2009282201111000\";\n" +
- " :Input_Files = \"A20092652009272.L3b_8D_SST.main\";\n" +
- " :Processing_Control = \"smigen par=A20092652009272.L3m_8D_SST_9.param\";\n" +
- " :Input_Parameters = \"IFILE = /data3/sdpsoper/vdc/vpu2/workbuf/A20092652009272.L3b_8D_SST.main|OFILE = A20092652009272.L3m_8D_SST_9|PFILE = |PROD = sst|PALFILE = DEFAULT|RFLAG = ORIGINAL|MEAS = 1|STYPE = 0|DATAMIN = 0.000000|DATAMAX = 0.000000|LONWEST = -180.000000|LONEAST = 180.000000|LATSOUTH = -90.000000|LATNORTH = 90.000000|RESOLUTION = 9km|PROJECTION = RECT|GAP_FILL = 0|SEAM_LON = -180.000000|PRECISION=I\";\n"
- +
- " :L2_Flag_Names = \"LAND,HISOLZ\";\n" +
- " :Period_Start_Year = 2009S; // short\n" +
- " :Period_Start_Day = 265S; // short\n" +
- " :Period_End_Year = 2009S; // short\n" +
- " :Period_End_Day = 270S; // short\n" +
- " :Start_Time = \"2009265000008779\";\n" +
- " :End_Time = \"2009271030006395\";\n" +
- " :Start_Year = 2009S; // short\n" +
- " :Start_Day = 265S; // short\n" +
- " :Start_Millisec = 8779; // int\n" +
- " :End_Year = 2009S; // short\n" +
- " :End_Day = 271S; // short\n" +
- " :End_Millisec = 10806395; // int\n" +
- " :Start_Orbit = 0; // int\n" +
- " :End_Orbit = 0; // int\n" +
- " :Orbit = 0; // int\n" +
- " :Map_Projection = \"Equidistant Cylindrical\";\n" +
- " :Latitude_Units = \"degrees North\";\n" +
- " :Longitude_Units = \"degrees East\";\n" +
- " :Northernmost_Latitude = 90.0f; // float\n" +
- " :Southernmost_Latitude = -90.0f; // float\n" +
- " :Westernmost_Longitude = -180.0f; // float\n" +
- " :Easternmost_Longitude = 180.0f; // float\n" +
- " :Latitude_Step = 0.083333336f; // float\n" +
- " :Longitude_Step = 0.083333336f; // float\n" +
- " :SW_Point_Latitude = -89.958336f; // float\n" +
- " :SW_Point_Longitude = -179.95833f; // float\n" +
- " :Data_Bins = 14234182; // int\n" +
- " :Number_of_Lines = 2160; // int\n" +
- " :Number_of_Columns = 4320; // int\n" +
- " :Parameter = \"Sea Surface Temperature\";\n" +
- " :Measure = \"Mean\";\n" +
- " :Units = \"deg-C\";\n" +
- " :Scaling = \"linear\";\n" +
- " :Scaling_Equation = \"(Slope*l3m_data) + Intercept = Parameter value\";\n" +
- " :Slope = 7.17185E-4f; // float\n" +
- " :Intercept = -2.0f; // float\n" +
- " :Scaled_Data_Minimum = -2.0f; // float\n" +
- " :Scaled_Data_Maximum = 45.0f; // float\n" +
- " :Data_Minimum = -1.999999f; // float\n" +
- " :Data_Maximum = 36.915f; // float\n" +
- " :start_date = \"2002-07-04 UTC\";\n" +
- " :start_time = \"00:00:00 UTC\";\n" +
- " :stop_date = \"2015-03-06 UTC\";\n" +
- " :stop_time = \"23:59:59 UTC\";\n" +
- " :Conventions = \"CF-1.0\";\n" +
- " :History = \"Translated to CF-1.0 Conventions by Netcdf-Java CDM (NetcdfCFWriter)\n" +
- "Original Dataset = file:/usr/ftp/ncml/catalog_ncml/OceanTemperature/modis/aqua/11um/9km/aggregate__MODIS_AQUA_L3_SST_THERMAL_8DAY_9KM_DAYTIME.ncml; Translation Date = Fri Oct 30 09:44:07 GMT-08:00 2015\";\n"
- +
- " :geospatial_lat_min = -89.95833587646484; // double\n" +
- " :geospatial_lat_max = 89.95833587646484; // double\n" +
- " :geospatial_lon_min = -136.04165649414062; // double\n" +
- " :geospatial_lon_max = -134.04165649414062; // double\n" +
- "}\n";
- Test.ensureEqual(results, expected, "results=\n" + results);
-
- String suggDatasetID = EDDGridFromNcFilesUnpacked.suggestDatasetID(fileDir + fileName);
-
- // generateDatasetsXml
- results = EDDGridFromNcFilesUnpacked.generateDatasetsXml(fileDir, fileName, "",
- "", // group
- "", -1, "", null);
- expected = "\n"
- + // same id as EDDGridFromNcFiles !!!
- " 1440\n" +
- " 10000\n" +
- " " + fileDir + "\n" +
- " 9km_aggregate__MODIS_AQUA_L3_SST_THERMAL_8DAY_9KM_DAYTIME.nc\n" +
- " true\n" +
- " .*\n" +
- " last\n" +
- " 20\n" +
- " false\n" +
- " \n" +
- " \n" +
- " Grid\n" +
- " CF-1.10, COARDS, ACDD-1.3\n" +
- " null\n" +
- " null\n" +
- " null\n" +
- " null\n" +
- " null\n" +
- " null\n" +
- " null\n" +
- " null\n" +
- " null\n" +
- " null\n" +
- " Translated to CF-1.0 Conventions by Netcdf-Java CDM (NetcdfCFWriter)\n" +
- "Original Dataset = file:/usr/ftp/ncml/catalog_ncml/OceanTemperature/modis/aqua/11um/9km/aggregate__MODIS_AQUA_L3_SST_THERMAL_8DAY_9KM_DAYTIME.ncml; Translation Date = Fri Oct 30 09:44:07 GMT-08:00 2015\n"
- +
- " ???\n" +
- " null\n" +
- " ???\n" +
- " null\n" +
- " aqua, data, earth, Earth Science > Oceans > Ocean Temperature > Sea Surface Temperature, image, imaging, L3, l3m_data, l3m_qual, mapped, moderate, modis, modisa, ocean, oceans, quality, resolution, science, sea, sea_surface_temperature, smi, spectroradiometer, standard, surface, temperature, time\n"
- +
- " GCMD Science Keywords\n" +
- " null\n" +
- " null\n" +
- " [standard]\n" +
- " null\n" +
- " null\n" +
- " null\n" +
- " null\n" +
- " null\n" +
- " null\n" +
- " null\n" +
- " null\n" +
- " null\n" +
- " null\n" +
- " null\n" +
- " null\n" +
- " null\n" +
- " null\n" +
- " null\n" +
- " CF Standard Name Table v70\n" +
- " null\n" +
- " null\n" +
- " null\n" +
- " null\n" +
- " null\n" +
- " null\n" +
- " null\n" +
- " null\n" +
- " null\n" +
- " null\n" +
- " null\n" +
- " Moderate Resolution Imaging Spectroradiometer on Aqua (MODISA) Level-3 Standard Mapped Image\n"
- +
- " null\n" +
- " null\n" +
- " null\n" +
- " MODISA L3 SMI,\n" +
- " null\n" +
- " null\n" +
- " \n" +
- " \n" +
- " time\n" +
- " time\n" +
- " \n" +
- " \n" +
- " Time\n" +
- " Time\n" +
- " \n" +
- " \n" +
- " \n" +
- " lat\n" + // correct based on what is in the file, but needs to be
- // Number_of_Lines
- " latitude\n" +
- " \n" +
- " \n" +
- " Location\n" +
- " Latitude\n" +
- " latitude\n" +
- " degrees_north\n" +
- " \n" +
- " \n" +
- " \n" +
- " lon\n" + // correct based on what is in the file, but needs to be
- // Number_of_Columns
- " longitude\n" +
- " \n" +
- " \n" +
- " Location\n" +
- " Longitude\n" +
- " longitude\n" +
- " degrees_east\n" +
- " \n" +
- " \n" +
- " \n" +
- " l3m_data\n" +
- " sst\n" +
- " float\n" +
- " \n" +
- " \n" +
- " 32.0\n" +
- " 0.0\n" +
- " null\n" +
- " Temperature\n" +
- " Sea Surface Temperature\n" +
- " sea_surface_temperature\n" +
- " deg_C\n" +
- " \n" +
- " \n" +
- " \n" +
- " l3m_qual\n" +
- " sst_quality\n" +
- " float\n" +
- " \n" +
- " \n" +
- " 150.0\n" +
- " 0.0\n" +
- " null\n" +
- " Quality\n" +
- " Sea Surface Temperature Quality\n" +
- " \n" +
- " \n" +
- "\n" +
- "\n";
- Test.ensureEqual(results, expected, "results=\n" + results);
-
- // ensure files are reread
- // NOTE that testUInt16FileUnpacked uses
- // Number_of_Lines (the 1D var which uses lat dimension) and
- // Number_of_Columns (the 1D var which uses lon dimension)
- File2.deleteAllFiles(EDDGridFromNcFilesUnpacked.datasetDir("testUInt16FileUnpacked"));
- EDDGrid eddGrid = (EDDGrid) EDDTestDataset.gettestUInt16FileUnpacked();
- // in uaf erddap, this is nasa_jpl_c688_be2f_cf9d
-
- // re-pack apparent missing value
- // 45.000717 +2=> 47.000717 /7.17185E-4=> 65535
-
- // .das das isn't affected by userDapQuery
- tName = eddGrid.makeNewFileForDapQuery(language, null, null, "",
- tDir, eddGrid.className(), ".das");
- results = File2.readFromFile88591(tDir + tName)[1];
- expected = "Attributes {\n" +
- " time {\n" +
- " String _CoordinateAxisType \"Time\";\n" +
- " Float64 actual_range 1.0257408e+9, 1.0257408e+9;\n" +
- " String axis \"T\";\n" +
- " String ioos_category \"Time\";\n" +
- " String long_name \"Time\";\n" +
- " String standard_name \"time\";\n" +
- " String time_origin \"01-JAN-1970 00:00:00\";\n" +
- " String units \"seconds since 1970-01-01T00:00:00Z\";\n" +
- " }\n" +
- " latitude {\n" +
- " String _CoordinateAxisType \"Lat\";\n" +
- " Float32 actual_range -89.95834, 89.95834;\n" + // a test of descending lat axis
- " String axis \"Y\";\n" +
- " String ioos_category \"Location\";\n" +
- " String long_name \"Latitude\";\n" +
- " String standard_name \"latitude\";\n" +
- " String units \"degrees_north\";\n" +
- " }\n" +
- " longitude {\n" +
- " String _CoordinateAxisType \"Lon\";\n" +
- " Float32 actual_range -136.0417, -134.0417;\n" +
- " String axis \"X\";\n" +
- " String ioos_category \"Location\";\n" +
- " String long_name \"Longitude\";\n" +
- " String standard_name \"longitude\";\n" +
- " String units \"degrees_east\";\n" +
- " }\n" +
- " sst {\n" +
- " Float32 _FillValue NaN;\n" + // important test of UInt16 and Unpacked
- " Float64 colorBarMaximum 32.0;\n" +
- " Float64 colorBarMinimum 0.0;\n" +
- " String ioos_category \"Temperature\";\n" +
- " String long_name \"Sea Surface Temperature\";\n" +
- " String standard_name \"sea_surface_temperature\";\n" +
- " String units \"deg_C\";\n" +
- " }\n" +
- " sst_quality {\n" +
- " Float64 colorBarMaximum 150.0;\n" +
- " Float64 colorBarMinimum 0.0;\n" +
- " String ioos_category \"Quality\";\n" +
- " String long_name \"Sea Surface Temperature Quality\";\n" +
- " Float32 valid_range -2.0, -1.9985657;\n" + // unpacking did that
- " }\n" +
- " NC_GLOBAL {\n" +
- " String cdm_data_type \"Grid\";\n" +
- " String Conventions \"CF-1.6, COARDS, ACDD-1.3\";\n" +
- " Float64 Easternmost_Easting -134.0417;\n" +
- " Float64 geospatial_lat_max 89.95834;\n" +
- " Float64 geospatial_lat_min -89.95834;\n" +
- " String geospatial_lat_units \"degrees_north\";\n" +
- " Float64 geospatial_lon_max -134.0417;\n" +
- " Float64 geospatial_lon_min -136.0417;\n" +
- " Float64 geospatial_lon_resolution 0.08333333333333333;\n" +
- " String geospatial_lon_units \"degrees_east\";\n" +
- " String history \"Translated to CF-1.0 Conventions by Netcdf-Java CDM (NetcdfCFWriter)\n" +
- "Original Dataset = file:/usr/ftp/ncml/catalog_ncml/OceanTemperature/modis/aqua/11um/9km/aggregate__MODIS_AQUA_L3_SST_THERMAL_8DAY_9KM_DAYTIME.ncml; Translation Date = Fri Oct 30 09:44:07 GMT-08:00 2015\n";
- tResults = results.substring(0, Math.min(results.length(), expected.length()));
- Test.ensureEqual(tResults, expected, "\nresults=\n" + results);
-
- expected =
- // "2015-10-30T18:17:10Z (local files)
- // 2015-10-30T18:17:10Z
- // http://localhost:8080/cwexperimental/griddap/testUInt16File.das";
- " String infoUrl \"???\";\n" +
- " String Input_Parameters \"IFILE = /data3/sdpsoper/vdc/vpu2/workbuf/A20092652009272.L3b_8D_SST.main|OFILE = A20092652009272.L3m_8D_SST_9|PFILE = |PROD = sst|PALFILE = DEFAULT|RFLAG = ORIGINAL|MEAS = 1|STYPE = 0|DATAMIN = 0.000000|DATAMAX = 0.000000|LONWEST = -180.000000|LONEAST = 180.000000|LATSOUTH = -90.000000|LATNORTH = 90.000000|RESOLUTION = 9km|PROJECTION = RECT|GAP_FILL = 0|SEAM_LON = -180.000000|PRECISION=I\";\n"
- +
- " String institution \"???\";\n" +
- " String keywords \"aqua, data, earth, Earth Science > Oceans > Ocean Temperature > Sea Surface Temperature, image, imaging, L3, l3m_data, l3m_qual, mapped, moderate, modis, modisa, ocean, oceans, quality, resolution, science, sea, sea_surface_temperature, smi, spectroradiometer, standard, surface, temperature, time\";\n"
- +
- " String keywords_vocabulary \"GCMD Science Keywords\";\n" +
- " String L2_Flag_Names \"LAND,HISOLZ\";\n" +
- " String license \"The data may be used and redistributed for free but is not intended\n" +
- "for legal use, since it may contain inaccuracies. Neither the data\n" +
- "Contributor, ERD, NOAA, nor the United States Government, nor any\n" +
- "of their employees or contractors, makes any warranty, express or\n" +
- "implied, including warranties of merchantability and fitness for a\n" +
- "particular purpose, or assumes any legal liability for the accuracy,\n" +
- "completeness, or usefulness, of this information.\";\n" +
- " String Map_Projection \"Equidistant Cylindrical\";\n" +
- " String Measure \"Mean\";\n" +
- " Float64 Northernmost_Northing 89.95834;\n" +
- " String Processing_Control \"smigen par=A20092652009272.L3m_8D_SST_9.param\";\n" +
- " String Processing_Time \"2009282201111000\";\n" +
- " String Product_Name \"A20092652009272.L3m_8D_SST_9\";\n" +
- " String Product_Type \"8-day\";\n" +
- " String Replacement_Flag \"ORIGINAL\";\n" +
- " Float32 Scaled_Data_Maximum 45.0;\n" +
- " Float32 Scaled_Data_Minimum -2.0;\n" +
- " String Sensor_Name \"MODISA\";\n" +
- " String Software_Name \"smigen\";\n" +
- " String Software_Version \"4.0\";\n" +
- " String sourceUrl \"(local files)\";\n" +
- " Float64 Southernmost_Northing -89.95834;\n" +
- " String standard_name_vocabulary \"CF Standard Name Table v70\";\n" +
- " String summary \"Moderate Resolution Imaging Spectroradiometer on Aqua (MODISA) Level-3 Standard Mapped Image\";\n"
- +
- " String time_coverage_end \"2002-07-04T00:00:00Z\";\n" +
- " String time_coverage_start \"2002-07-04T00:00:00Z\";\n" +
- " String title \"MODISA L3 SMI,\";\n" +
- " Float64 Westernmost_Easting -136.0417;\n" +
- " }\n" +
- "}\n";
- int tpo = results.indexOf(expected.substring(0, 17));
- Test.ensureTrue(tpo >= 0, "tpo=-1 results=\n" + results);
- Test.ensureEqual(
- results.substring(tpo, Math.min(results.length(), tpo + expected.length())),
- expected, "results=\n" + results);
-
- // .dds dds isn't affected by userDapQuery
- tName = eddGrid.makeNewFileForDapQuery(language, null, null, "",
- tDir, eddGrid.className(), ".dds");
- results = File2.directReadFrom88591File(
- tDir + tName);
- expected = // difference from testUInt16Dap: lat lon are float here, not double
- "Dataset {\n" +
- " Float64 time[time = 1];\n" +
- " Float32 latitude[latitude = 2160];\n" +
- " Float32 longitude[longitude = 25];\n" +
- " GRID {\n" +
- " ARRAY:\n" +
- " Float32 sst[time = 1][latitude = 2160][longitude = 25];\n" +
- " MAPS:\n" +
- " Float64 time[time = 1];\n" +
- " Float32 latitude[latitude = 2160];\n" +
- " Float32 longitude[longitude = 25];\n" +
- " } sst;\n" +
- " GRID {\n" +
- " ARRAY:\n" +
- " Float32 sst_quality[time = 1][latitude = 2160][longitude = 25];\n" +
- " MAPS:\n" +
- " Float64 time[time = 1];\n" +
- " Float32 latitude[latitude = 2160];\n" +
- " Float32 longitude[longitude = 25];\n" +
- " } sst_quality;\n" +
- "} testUInt16FileUnpacked;\n";
- Test.ensureEqual(results, expected, "\nresults=\n" + results);
-
- // .csv data values
- userDapQuery = "sst[0][0:100:2159][(-134.95833513)],sst_quality[0][0:100:2159][(-134.95833513)]";
- tName = eddGrid.makeNewFileForDapQuery(language, null, null, userDapQuery,
- tDir, eddGrid.className(), ".csv");
- results = File2.directReadFrom88591File(
- tDir + tName);
- String2.log(results);
- expected = // difference from testUInt16Dap: lat lon are float here, not double
- "time,latitude,longitude,sst,sst_quality\n" +
- "UTC,degrees_north,degrees_east,deg_C,\n" + // quality values should be 0, 1, 2, 3, 4, 5
- "2002-07-04T00:00:00Z,89.958336,-134.95833,-0.84102905,-1.9992828\n" + // quality values are
- // wrong because file
- // incorrectly has
- // scale_factor,
- // add_offset for them
- "2002-07-04T00:00:00Z,81.62501,-134.95833,-1.6371044,-2.0\n" +
- "2002-07-04T00:00:00Z,73.291664,-134.95833,-0.11021753,-2.0\n" +
- "2002-07-04T00:00:00Z,64.958336,-134.95833,NaN,-1.8171178\n" + // NaN shows _FillValue=-1S was
- // correctly caught
- "2002-07-04T00:00:00Z,56.625008,-134.95833,NaN,-1.8171178\n" +
- "2002-07-04T00:00:00Z,48.291664,-134.95833,12.6406145,-2.0\n" +
- "2002-07-04T00:00:00Z,39.958336,-134.95833,17.95137,-2.0\n" +
- "2002-07-04T00:00:00Z,31.625,-134.95833,20.432829,-2.0\n" +
- "2002-07-04T00:00:00Z,23.291664,-134.95833,19.664007,-1.9985657\n" +
- "2002-07-04T00:00:00Z,14.958336,-134.95833,24.482773,-2.0\n" + // sst>23.5 shows unsigned values
- // correctly caught
- "2002-07-04T00:00:00Z,6.625,-134.95833,29.068455,-2.0\n" +
- "2002-07-04T00:00:00Z,-1.7083359,-134.95833,27.240349,-2.0\n" +
- "2002-07-04T00:00:00Z,-10.041664,-134.95833,27.210228,-2.0\n" +
- "2002-07-04T00:00:00Z,-18.375,-134.95833,26.713936,-2.0\n" +
- "2002-07-04T00:00:00Z,-26.708336,-134.95833,21.580326,-2.0\n" +
- "2002-07-04T00:00:00Z,-35.041668,-134.95833,15.789774,-2.0\n" +
- "2002-07-04T00:00:00Z,-43.375,-134.95833,NaN,-1.8171178\n" +
- "2002-07-04T00:00:00Z,-51.708336,-134.95833,6.1673026,-1.9985657\n" +
- "2002-07-04T00:00:00Z,-60.041668,-134.95833,0.40400413,-2.0\n" +
- "2002-07-04T00:00:00Z,-68.375,-134.95833,NaN,-1.8171178\n" +
- "2002-07-04T00:00:00Z,-76.708336,-134.95833,NaN,-1.8171178\n" +
- "2002-07-04T00:00:00Z,-85.04167,-134.95833,NaN,-1.8171178\n";
- Test.ensureEqual(results, expected, "\nresults=\n" + results +
- "\nFIX THIS problem with unsigned values.");
-
- // display the image
- String2.log("\n\n* PNG ");
- String baseName = eddGrid.className() + "_UInt16_Map";
- tName = eddGrid.makeNewFileForDapQuery(language, null, null, "sst[0][][]&.land=under",
- Image2Tests.urlToAbsolutePath(Image2Tests.OBS_DIR), baseName, ".png");
- // Test.displayInBrowser("file://" + tDir + tName);
- Image2Tests.testImagesIdentical(
- tName,
- baseName + ".png",
- baseName + "_diff.png");
-
- NcHelper.debugMode = oNcDebugMode;
- Attributes.debugMode = oAttDebugMode;
- }
-
- /**
- * Test file from Yibo Jiang (PODAAC) and stored in /erddapTest/nc/ .
- *
- * @throws Throwable if trouble
- */
- @org.junit.jupiter.api.Test
- void testSuperPreciseTimeUnits() throws Throwable {
- // String2.log("\n*** EDDGridFromNcFilesUnpacked.testSuperPreciseTimeUnits");
- // testVerboseOn();
- int language = 0;
- String name, tName, results, tResults, expected, userDapQuery;
- String fileDir = Path.of(EDDGridFromNcFilesUnpackedTests.class.getResource("/largeFiles/nc/").toURI())
+ + "/";
+ String fileName = "9km_aggregate__MODIS_AQUA_L3_SST_THERMAL_8DAY_9KM_DAYTIME.nc";
+ boolean oNcDebugMode = NcHelper.debugMode;
+ NcHelper.debugMode = true;
+ boolean oAttDebugMode = Attributes.debugMode;
+ Attributes.debugMode = true;
+ String tDir = EDStatic.fullTestCacheDirectory;
+
+ // DumpString
+ results = NcHelper.ncdump(fileDir + fileName, "-h");
+ expected =
+ "netcdf 9km_aggregate__MODIS_AQUA_L3_SST_THERMAL_8DAY_9KM_DAYTIME.nc {\n"
+ + " dimensions:\n"
+ + " time = 1;\n"
+ + " lat = 2160;\n"
+ + " lon = 25;\n"
+ + " variables:\n"
+ + " short l3m_data(time=1, lat=2160, lon=25);\n"
+ + " :_Unsigned = \"true\";\n"
+ + " :long_name = \"l3m_data\";\n"
+ + " :scale_factor = 7.17185E-4f; // float\n"
+ + // halfway=32768-> 23.50071808, so, many data values
+ // are higher
+ " :add_offset = -2.0f; // float\n"
+ + " :_FillValue = -1S; // short\n"
+ + // In a sense wrong: cf says it should be actual value:
+ // 65535(ushort)
+ " :Scaling = \"linear\";\n"
+ + " :Scaling_Equation = \"(Slope*l3m_data) + Intercept = Parameter value\";\n"
+ + " :Slope = 7.17185E-4f; // float\n"
+ + " :Intercept = -2.0f; // float\n"
+ + " :coordinates = \"time Number_of_Lines Number_of_Columns lat lon\";\n"
+ + "\n"
+ + " int time(time=1);\n"
+ + " :standard_name = \"time\";\n"
+ + " :axis = \"T\";\n"
+ + " :units = \"days since 2002-01-01\";\n"
+ + " :_CoordinateAxisType = \"Time\";\n"
+ + "\n"
+ + " float Number_of_Lines(lat=2160);\n"
+ + // note that ncss knows this is lat, but didn't rename it
+ " :long_name = \"latitude\";\n"
+ + " :units = \"degrees_north\";\n"
+ + " :_CoordinateAxisType = \"Lat\";\n"
+ + " :standard_name = \"latitude\";\n"
+ + "\n"
+ + " float Number_of_Columns(lon=25);\n"
+ + // note that ncss knows this is lon, but didn't rename it
+ " :long_name = \"longitude\";\n"
+ + " :units = \"degrees_east\";\n"
+ + " :_CoordinateAxisType = \"Lon\";\n"
+ + " :standard_name = \"longitude\";\n"
+ + "\n"
+ + " byte l3m_qual(time=1, lat=2160, lon=25);\n"
+ + " :_Unsigned = \"true\";\n"
+ + " :long_name = \"l3m_qual\";\n"
+ + " :scale_factor = 7.17185E-4f; // float\n"
+ + // I suspect that's wrong
+ " :add_offset = -2.0f; // float\n"
+ + // I suspect that's wrong
+ " :valid_range = 0, 2; // int\n"
+ + " :coordinates = \"time Number_of_Lines Number_of_Columns lat lon\";\n"
+ + // that's bizarre
+ "\n"
+ + " // global attributes:\n"
+ + " :Product_Name = \"A20092652009272.L3m_8D_SST_9\";\n"
+ + " :Sensor_Name = \"MODISA\";\n"
+ + " :Sensor = \"\";\n"
+ + " :Title = \"MODISA Level-3 Standard Mapped Image\";\n"
+ + " :Data_Center = \"\";\n"
+ + " :Station_Name = \"\";\n"
+ + " :Station_Latitude = 0.0f; // float\n"
+ + " :Station_Longitude = 0.0f; // float\n"
+ + " :Mission = \"\";\n"
+ + " :Mission_Characteristics = \"\";\n"
+ + " :Sensor_Characteristics = \"\";\n"
+ + " :Product_Type = \"8-day\";\n"
+ + " :Replacement_Flag = \"ORIGINAL\";\n"
+ + " :Software_Name = \"smigen\";\n"
+ + " :Software_Version = \"4.0\";\n"
+ + " :Processing_Time = \"2009282201111000\";\n"
+ + " :Input_Files = \"A20092652009272.L3b_8D_SST.main\";\n"
+ + " :Processing_Control = \"smigen par=A20092652009272.L3m_8D_SST_9.param\";\n"
+ + " :Input_Parameters = \"IFILE = /data3/sdpsoper/vdc/vpu2/workbuf/A20092652009272.L3b_8D_SST.main|OFILE = A20092652009272.L3m_8D_SST_9|PFILE = |PROD = sst|PALFILE = DEFAULT|RFLAG = ORIGINAL|MEAS = 1|STYPE = 0|DATAMIN = 0.000000|DATAMAX = 0.000000|LONWEST = -180.000000|LONEAST = 180.000000|LATSOUTH = -90.000000|LATNORTH = 90.000000|RESOLUTION = 9km|PROJECTION = RECT|GAP_FILL = 0|SEAM_LON = -180.000000|PRECISION=I\";\n"
+ + " :L2_Flag_Names = \"LAND,HISOLZ\";\n"
+ + " :Period_Start_Year = 2009S; // short\n"
+ + " :Period_Start_Day = 265S; // short\n"
+ + " :Period_End_Year = 2009S; // short\n"
+ + " :Period_End_Day = 270S; // short\n"
+ + " :Start_Time = \"2009265000008779\";\n"
+ + " :End_Time = \"2009271030006395\";\n"
+ + " :Start_Year = 2009S; // short\n"
+ + " :Start_Day = 265S; // short\n"
+ + " :Start_Millisec = 8779; // int\n"
+ + " :End_Year = 2009S; // short\n"
+ + " :End_Day = 271S; // short\n"
+ + " :End_Millisec = 10806395; // int\n"
+ + " :Start_Orbit = 0; // int\n"
+ + " :End_Orbit = 0; // int\n"
+ + " :Orbit = 0; // int\n"
+ + " :Map_Projection = \"Equidistant Cylindrical\";\n"
+ + " :Latitude_Units = \"degrees North\";\n"
+ + " :Longitude_Units = \"degrees East\";\n"
+ + " :Northernmost_Latitude = 90.0f; // float\n"
+ + " :Southernmost_Latitude = -90.0f; // float\n"
+ + " :Westernmost_Longitude = -180.0f; // float\n"
+ + " :Easternmost_Longitude = 180.0f; // float\n"
+ + " :Latitude_Step = 0.083333336f; // float\n"
+ + " :Longitude_Step = 0.083333336f; // float\n"
+ + " :SW_Point_Latitude = -89.958336f; // float\n"
+ + " :SW_Point_Longitude = -179.95833f; // float\n"
+ + " :Data_Bins = 14234182; // int\n"
+ + " :Number_of_Lines = 2160; // int\n"
+ + " :Number_of_Columns = 4320; // int\n"
+ + " :Parameter = \"Sea Surface Temperature\";\n"
+ + " :Measure = \"Mean\";\n"
+ + " :Units = \"deg-C\";\n"
+ + " :Scaling = \"linear\";\n"
+ + " :Scaling_Equation = \"(Slope*l3m_data) + Intercept = Parameter value\";\n"
+ + " :Slope = 7.17185E-4f; // float\n"
+ + " :Intercept = -2.0f; // float\n"
+ + " :Scaled_Data_Minimum = -2.0f; // float\n"
+ + " :Scaled_Data_Maximum = 45.0f; // float\n"
+ + " :Data_Minimum = -1.999999f; // float\n"
+ + " :Data_Maximum = 36.915f; // float\n"
+ + " :start_date = \"2002-07-04 UTC\";\n"
+ + " :start_time = \"00:00:00 UTC\";\n"
+ + " :stop_date = \"2015-03-06 UTC\";\n"
+ + " :stop_time = \"23:59:59 UTC\";\n"
+ + " :Conventions = \"CF-1.0\";\n"
+ + " :History = \"Translated to CF-1.0 Conventions by Netcdf-Java CDM (NetcdfCFWriter)\n"
+ + "Original Dataset = file:/usr/ftp/ncml/catalog_ncml/OceanTemperature/modis/aqua/11um/9km/aggregate__MODIS_AQUA_L3_SST_THERMAL_8DAY_9KM_DAYTIME.ncml; Translation Date = Fri Oct 30 09:44:07 GMT-08:00 2015\";\n"
+ + " :geospatial_lat_min = -89.95833587646484; // double\n"
+ + " :geospatial_lat_max = 89.95833587646484; // double\n"
+ + " :geospatial_lon_min = -136.04165649414062; // double\n"
+ + " :geospatial_lon_max = -134.04165649414062; // double\n"
+ + "}\n";
+ Test.ensureEqual(results, expected, "results=\n" + results);
+
+ String suggDatasetID = EDDGridFromNcFilesUnpacked.suggestDatasetID(fileDir + fileName);
+
+ // generateDatasetsXml
+ results =
+ EDDGridFromNcFilesUnpacked.generateDatasetsXml(
+ fileDir, fileName, "", "", // group
+ "", -1, "", null);
+ expected =
+ "\n"
+ + // same id as EDDGridFromNcFiles !!!
+ " 1440\n"
+ + " 10000\n"
+ + " "
+ + fileDir
+ + "\n"
+ + " 9km_aggregate__MODIS_AQUA_L3_SST_THERMAL_8DAY_9KM_DAYTIME.nc\n"
+ + " true\n"
+ + " .*\n"
+ + " last\n"
+ + " 20\n"
+ + " false\n"
+ + " \n"
+ + " \n"
+ + " Grid\n"
+ + " CF-1.10, COARDS, ACDD-1.3\n"
+ + " null\n"
+ + " null\n"
+ + " null\n"
+ + " null\n"
+ + " null\n"
+ + " null\n"
+ + " null\n"
+ + " null\n"
+ + " null\n"
+ + " null\n"
+ + " Translated to CF-1.0 Conventions by Netcdf-Java CDM (NetcdfCFWriter)\n"
+ + "Original Dataset = file:/usr/ftp/ncml/catalog_ncml/OceanTemperature/modis/aqua/11um/9km/aggregate__MODIS_AQUA_L3_SST_THERMAL_8DAY_9KM_DAYTIME.ncml; Translation Date = Fri Oct 30 09:44:07 GMT-08:00 2015\n"
+ + " ???\n"
+ + " null\n"
+ + " ???\n"
+ + " null\n"
+ + " aqua, data, earth, Earth Science > Oceans > Ocean Temperature > Sea Surface Temperature, image, imaging, L3, l3m_data, l3m_qual, mapped, moderate, modis, modisa, ocean, oceans, quality, resolution, science, sea, sea_surface_temperature, smi, spectroradiometer, standard, surface, temperature, time\n"
+ + " GCMD Science Keywords\n"
+ + " null\n"
+ + " null\n"
+ + " [standard]\n"
+ + " null\n"
+ + " null\n"
+ + " null\n"
+ + " null\n"
+ + " null\n"
+ + " null\n"
+ + " null\n"
+ + " null\n"
+ + " null\n"
+ + " null\n"
+ + " null\n"
+ + " null\n"
+ + " null\n"
+ + " null\n"
+ + " null\n"
+ + " CF Standard Name Table v70\n"
+ + " null\n"
+ + " null\n"
+ + " null\n"
+ + " null\n"
+ + " null\n"
+ + " null\n"
+ + " null\n"
+ + " null\n"
+ + " null\n"
+ + " null\n"
+ + " null\n"
+ + " Moderate Resolution Imaging Spectroradiometer on Aqua (MODISA) Level-3 Standard Mapped Image\n"
+ + " null\n"
+ + " null\n"
+ + " null\n"
+ + " MODISA L3 SMI,\n"
+ + " null\n"
+ + " null\n"
+ + " \n"
+ + " \n"
+ + " time\n"
+ + " time\n"
+ + " \n"
+ + " \n"
+ + " Time\n"
+ + " Time\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " lat\n"
+ + // correct based on what is in the file, but needs to be
+ // Number_of_Lines
+ " latitude\n"
+ + " \n"
+ + " \n"
+ + " Location\n"
+ + " Latitude\n"
+ + " latitude\n"
+ + " degrees_north\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " lon\n"
+ + // correct based on what is in the file, but needs to be
+ // Number_of_Columns
+ " longitude\n"
+ + " \n"
+ + " \n"
+ + " Location\n"
+ + " Longitude\n"
+ + " longitude\n"
+ + " degrees_east\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " l3m_data\n"
+ + " sst\n"
+ + " float\n"
+ + " \n"
+ + " \n"
+ + " 32.0\n"
+ + " 0.0\n"
+ + " null\n"
+ + " Temperature\n"
+ + " Sea Surface Temperature\n"
+ + " sea_surface_temperature\n"
+ + " deg_C\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " l3m_qual\n"
+ + " sst_quality\n"
+ + " float\n"
+ + " \n"
+ + " \n"
+ + " 150.0\n"
+ + " 0.0\n"
+ + " null\n"
+ + " Quality\n"
+ + " Sea Surface Temperature Quality\n"
+ + " \n"
+ + " \n"
+ + "\n"
+ + "\n";
+ Test.ensureEqual(results, expected, "results=\n" + results);
+
+ // ensure files are reread
+ // NOTE that testUInt16FileUnpacked uses
+ // Number_of_Lines (the 1D var which uses lat dimension) and
+ // Number_of_Columns (the 1D var which uses lon dimension)
+ File2.deleteAllFiles(EDDGridFromNcFilesUnpacked.datasetDir("testUInt16FileUnpacked"));
+ EDDGrid eddGrid = (EDDGrid) EDDTestDataset.gettestUInt16FileUnpacked();
+ // in uaf erddap, this is nasa_jpl_c688_be2f_cf9d
+
+ // re-pack apparent missing value
+ // 45.000717 +2=> 47.000717 /7.17185E-4=> 65535
+
+ // .das das isn't affected by userDapQuery
+ tName =
+ eddGrid.makeNewFileForDapQuery(language, null, null, "", tDir, eddGrid.className(), ".das");
+ results = File2.readFromFile88591(tDir + tName)[1];
+ expected =
+ "Attributes {\n"
+ + " time {\n"
+ + " String _CoordinateAxisType \"Time\";\n"
+ + " Float64 actual_range 1.0257408e+9, 1.0257408e+9;\n"
+ + " String axis \"T\";\n"
+ + " String ioos_category \"Time\";\n"
+ + " String long_name \"Time\";\n"
+ + " String standard_name \"time\";\n"
+ + " String time_origin \"01-JAN-1970 00:00:00\";\n"
+ + " String units \"seconds since 1970-01-01T00:00:00Z\";\n"
+ + " }\n"
+ + " latitude {\n"
+ + " String _CoordinateAxisType \"Lat\";\n"
+ + " Float32 actual_range -89.95834, 89.95834;\n"
+ + // a test of descending lat axis
+ " String axis \"Y\";\n"
+ + " String ioos_category \"Location\";\n"
+ + " String long_name \"Latitude\";\n"
+ + " String standard_name \"latitude\";\n"
+ + " String units \"degrees_north\";\n"
+ + " }\n"
+ + " longitude {\n"
+ + " String _CoordinateAxisType \"Lon\";\n"
+ + " Float32 actual_range -136.0417, -134.0417;\n"
+ + " String axis \"X\";\n"
+ + " String ioos_category \"Location\";\n"
+ + " String long_name \"Longitude\";\n"
+ + " String standard_name \"longitude\";\n"
+ + " String units \"degrees_east\";\n"
+ + " }\n"
+ + " sst {\n"
+ + " Float32 _FillValue NaN;\n"
+ + // important test of UInt16 and Unpacked
+ " Float64 colorBarMaximum 32.0;\n"
+ + " Float64 colorBarMinimum 0.0;\n"
+ + " String ioos_category \"Temperature\";\n"
+ + " String long_name \"Sea Surface Temperature\";\n"
+ + " String standard_name \"sea_surface_temperature\";\n"
+ + " String units \"deg_C\";\n"
+ + " }\n"
+ + " sst_quality {\n"
+ + " Float64 colorBarMaximum 150.0;\n"
+ + " Float64 colorBarMinimum 0.0;\n"
+ + " String ioos_category \"Quality\";\n"
+ + " String long_name \"Sea Surface Temperature Quality\";\n"
+ + " Float32 valid_range -2.0, -1.9985657;\n"
+ + // unpacking did that
+ " }\n"
+ + " NC_GLOBAL {\n"
+ + " String cdm_data_type \"Grid\";\n"
+ + " String Conventions \"CF-1.6, COARDS, ACDD-1.3\";\n"
+ + " Float64 Easternmost_Easting -134.0417;\n"
+ + " Float64 geospatial_lat_max 89.95834;\n"
+ + " Float64 geospatial_lat_min -89.95834;\n"
+ + " String geospatial_lat_units \"degrees_north\";\n"
+ + " Float64 geospatial_lon_max -134.0417;\n"
+ + " Float64 geospatial_lon_min -136.0417;\n"
+ + " Float64 geospatial_lon_resolution 0.08333333333333333;\n"
+ + " String geospatial_lon_units \"degrees_east\";\n"
+ + " String history \"Translated to CF-1.0 Conventions by Netcdf-Java CDM (NetcdfCFWriter)\n"
+ + "Original Dataset = file:/usr/ftp/ncml/catalog_ncml/OceanTemperature/modis/aqua/11um/9km/aggregate__MODIS_AQUA_L3_SST_THERMAL_8DAY_9KM_DAYTIME.ncml; Translation Date = Fri Oct 30 09:44:07 GMT-08:00 2015\n";
+ tResults = results.substring(0, Math.min(results.length(), expected.length()));
+ Test.ensureEqual(tResults, expected, "\nresults=\n" + results);
+
+ expected =
+ // "2015-10-30T18:17:10Z (local files)
+ // 2015-10-30T18:17:10Z
+ // http://localhost:8080/cwexperimental/griddap/testUInt16File.das";
+ " String infoUrl \"???\";\n"
+ + " String Input_Parameters \"IFILE = /data3/sdpsoper/vdc/vpu2/workbuf/A20092652009272.L3b_8D_SST.main|OFILE = A20092652009272.L3m_8D_SST_9|PFILE = |PROD = sst|PALFILE = DEFAULT|RFLAG = ORIGINAL|MEAS = 1|STYPE = 0|DATAMIN = 0.000000|DATAMAX = 0.000000|LONWEST = -180.000000|LONEAST = 180.000000|LATSOUTH = -90.000000|LATNORTH = 90.000000|RESOLUTION = 9km|PROJECTION = RECT|GAP_FILL = 0|SEAM_LON = -180.000000|PRECISION=I\";\n"
+ + " String institution \"???\";\n"
+ + " String keywords \"aqua, data, earth, Earth Science > Oceans > Ocean Temperature > Sea Surface Temperature, image, imaging, L3, l3m_data, l3m_qual, mapped, moderate, modis, modisa, ocean, oceans, quality, resolution, science, sea, sea_surface_temperature, smi, spectroradiometer, standard, surface, temperature, time\";\n"
+ + " String keywords_vocabulary \"GCMD Science Keywords\";\n"
+ + " String L2_Flag_Names \"LAND,HISOLZ\";\n"
+ + " String license \"The data may be used and redistributed for free but is not intended\n"
+ + "for legal use, since it may contain inaccuracies. Neither the data\n"
+ + "Contributor, ERD, NOAA, nor the United States Government, nor any\n"
+ + "of their employees or contractors, makes any warranty, express or\n"
+ + "implied, including warranties of merchantability and fitness for a\n"
+ + "particular purpose, or assumes any legal liability for the accuracy,\n"
+ + "completeness, or usefulness, of this information.\";\n"
+ + " String Map_Projection \"Equidistant Cylindrical\";\n"
+ + " String Measure \"Mean\";\n"
+ + " Float64 Northernmost_Northing 89.95834;\n"
+ + " String Processing_Control \"smigen par=A20092652009272.L3m_8D_SST_9.param\";\n"
+ + " String Processing_Time \"2009282201111000\";\n"
+ + " String Product_Name \"A20092652009272.L3m_8D_SST_9\";\n"
+ + " String Product_Type \"8-day\";\n"
+ + " String Replacement_Flag \"ORIGINAL\";\n"
+ + " Float32 Scaled_Data_Maximum 45.0;\n"
+ + " Float32 Scaled_Data_Minimum -2.0;\n"
+ + " String Sensor_Name \"MODISA\";\n"
+ + " String Software_Name \"smigen\";\n"
+ + " String Software_Version \"4.0\";\n"
+ + " String sourceUrl \"(local files)\";\n"
+ + " Float64 Southernmost_Northing -89.95834;\n"
+ + " String standard_name_vocabulary \"CF Standard Name Table v70\";\n"
+ + " String summary \"Moderate Resolution Imaging Spectroradiometer on Aqua (MODISA) Level-3 Standard Mapped Image\";\n"
+ + " String time_coverage_end \"2002-07-04T00:00:00Z\";\n"
+ + " String time_coverage_start \"2002-07-04T00:00:00Z\";\n"
+ + " String title \"MODISA L3 SMI,\";\n"
+ + " Float64 Westernmost_Easting -136.0417;\n"
+ + " }\n"
+ + "}\n";
+ int tpo = results.indexOf(expected.substring(0, 17));
+ Test.ensureTrue(tpo >= 0, "tpo=-1 results=\n" + results);
+ Test.ensureEqual(
+ results.substring(tpo, Math.min(results.length(), tpo + expected.length())),
+ expected,
+ "results=\n" + results);
+
+ // .dds dds isn't affected by userDapQuery
+ tName =
+ eddGrid.makeNewFileForDapQuery(language, null, null, "", tDir, eddGrid.className(), ".dds");
+ results = File2.directReadFrom88591File(tDir + tName);
+ expected = // difference from testUInt16Dap: lat lon are float here, not double
+ "Dataset {\n"
+ + " Float64 time[time = 1];\n"
+ + " Float32 latitude[latitude = 2160];\n"
+ + " Float32 longitude[longitude = 25];\n"
+ + " GRID {\n"
+ + " ARRAY:\n"
+ + " Float32 sst[time = 1][latitude = 2160][longitude = 25];\n"
+ + " MAPS:\n"
+ + " Float64 time[time = 1];\n"
+ + " Float32 latitude[latitude = 2160];\n"
+ + " Float32 longitude[longitude = 25];\n"
+ + " } sst;\n"
+ + " GRID {\n"
+ + " ARRAY:\n"
+ + " Float32 sst_quality[time = 1][latitude = 2160][longitude = 25];\n"
+ + " MAPS:\n"
+ + " Float64 time[time = 1];\n"
+ + " Float32 latitude[latitude = 2160];\n"
+ + " Float32 longitude[longitude = 25];\n"
+ + " } sst_quality;\n"
+ + "} testUInt16FileUnpacked;\n";
+ Test.ensureEqual(results, expected, "\nresults=\n" + results);
+
+ // .csv data values
+ userDapQuery =
+ "sst[0][0:100:2159][(-134.95833513)],sst_quality[0][0:100:2159][(-134.95833513)]";
+ tName =
+ eddGrid.makeNewFileForDapQuery(
+ language, null, null, userDapQuery, tDir, eddGrid.className(), ".csv");
+ results = File2.directReadFrom88591File(tDir + tName);
+ String2.log(results);
+ expected = // difference from testUInt16Dap: lat lon are float here, not double
+ "time,latitude,longitude,sst,sst_quality\n"
+ + "UTC,degrees_north,degrees_east,deg_C,\n"
+ + // quality values should be 0, 1, 2, 3, 4, 5
+ "2002-07-04T00:00:00Z,89.958336,-134.95833,-0.84102905,-1.9992828\n"
+ + // quality values are
+ // wrong because file
+ // incorrectly has
+ // scale_factor,
+ // add_offset for them
+ "2002-07-04T00:00:00Z,81.62501,-134.95833,-1.6371044,-2.0\n"
+ + "2002-07-04T00:00:00Z,73.291664,-134.95833,-0.11021753,-2.0\n"
+ + "2002-07-04T00:00:00Z,64.958336,-134.95833,NaN,-1.8171178\n"
+ + // NaN shows _FillValue=-1S was
+ // correctly caught
+ "2002-07-04T00:00:00Z,56.625008,-134.95833,NaN,-1.8171178\n"
+ + "2002-07-04T00:00:00Z,48.291664,-134.95833,12.6406145,-2.0\n"
+ + "2002-07-04T00:00:00Z,39.958336,-134.95833,17.95137,-2.0\n"
+ + "2002-07-04T00:00:00Z,31.625,-134.95833,20.432829,-2.0\n"
+ + "2002-07-04T00:00:00Z,23.291664,-134.95833,19.664007,-1.9985657\n"
+ + "2002-07-04T00:00:00Z,14.958336,-134.95833,24.482773,-2.0\n"
+ + // sst>23.5 shows unsigned values
+ // correctly caught
+ "2002-07-04T00:00:00Z,6.625,-134.95833,29.068455,-2.0\n"
+ + "2002-07-04T00:00:00Z,-1.7083359,-134.95833,27.240349,-2.0\n"
+ + "2002-07-04T00:00:00Z,-10.041664,-134.95833,27.210228,-2.0\n"
+ + "2002-07-04T00:00:00Z,-18.375,-134.95833,26.713936,-2.0\n"
+ + "2002-07-04T00:00:00Z,-26.708336,-134.95833,21.580326,-2.0\n"
+ + "2002-07-04T00:00:00Z,-35.041668,-134.95833,15.789774,-2.0\n"
+ + "2002-07-04T00:00:00Z,-43.375,-134.95833,NaN,-1.8171178\n"
+ + "2002-07-04T00:00:00Z,-51.708336,-134.95833,6.1673026,-1.9985657\n"
+ + "2002-07-04T00:00:00Z,-60.041668,-134.95833,0.40400413,-2.0\n"
+ + "2002-07-04T00:00:00Z,-68.375,-134.95833,NaN,-1.8171178\n"
+ + "2002-07-04T00:00:00Z,-76.708336,-134.95833,NaN,-1.8171178\n"
+ + "2002-07-04T00:00:00Z,-85.04167,-134.95833,NaN,-1.8171178\n";
+ Test.ensureEqual(
+ results, expected, "\nresults=\n" + results + "\nFIX THIS problem with unsigned values.");
+
+ // display the image
+ String2.log("\n\n* PNG ");
+ String baseName = eddGrid.className() + "_UInt16_Map";
+ tName =
+ eddGrid.makeNewFileForDapQuery(
+ language,
+ null,
+ null,
+ "sst[0][][]&.land=under",
+ Image2Tests.urlToAbsolutePath(Image2Tests.OBS_DIR),
+ baseName,
+ ".png");
+ // Test.displayInBrowser("file://" + tDir + tName);
+ Image2Tests.testImagesIdentical(tName, baseName + ".png", baseName + "_diff.png");
+
+ NcHelper.debugMode = oNcDebugMode;
+ Attributes.debugMode = oAttDebugMode;
+ }
+
+ /**
+ * Test file from Yibo Jiang (PODAAC) and stored in /erddapTest/nc/ .
+ *
+ * @throws Throwable if trouble
+ */
+ @org.junit.jupiter.api.Test
+ void testSuperPreciseTimeUnits() throws Throwable {
+ // String2.log("\n*** EDDGridFromNcFilesUnpacked.testSuperPreciseTimeUnits");
+ // testVerboseOn();
+ int language = 0;
+ String name, tName, results, tResults, expected, userDapQuery;
+ String fileDir =
+ Path.of(EDDGridFromNcFilesUnpackedTests.class.getResource("/largeFiles/nc/").toURI())
.toString()
- + "/";
- String tDir = EDStatic.fullTestCacheDirectory;
- boolean oDebugMode = NcHelper.debugMode;
- NcHelper.debugMode = true;
-
- // ncdump of file showing time units and values
- results = NcHelper.ncdump(fileDir + "superPreciseTimeUnits.nc", "-v time");
- expected = "netcdf superPreciseTimeUnits.nc {\n" +
- " dimensions:\n" +
- " time = 24;\n" +
- " lat = 400;\n" +
- " lon = 1800;\n" +
- " variables:\n" +
- " byte time(time=24);\n" +
- " :long_name = \"Reference time of file\";\n" +
- " :standard_name = \"time\";\n" +
- " :calendar = \"gregorian\";\n" +
- " :comment = \"Timestamp coordinate at the center of the 1 hr bin, at 1 hour resolution. Range is one UTC day.\";\n"
- +
- " :units = \"hours since 2020-01-01 00:30:00.000000000\";\n" +
- " :_ChunkSizes = 24U; // uint\n" +
- "\n" +
- " int epoch_time(time=24);\n" +
- " :long_name = \"Time Centering of Data Based on Epoch Reference\";\n" +
- " :standard_name = \"time\";\n" +
- " :calendar = \"gregorian\";\n" +
- " :comment = \"Timestamp coordinate is at the center of the 1 hr bin, at 1 hour resolution referenced by the historical Epoch reference date/time. The Epoch reference date/time corresponds to the first observation time window in the CYGNSS historical data record. Total number of timestamps in a file corresponds to one UTC day. This value is rounded to the nearest hour since leap seconds may have occured making the number of hours since the start of the mission not exact.\";\n"
- +
- " :units = \"hours since 2017-03-18 00:30:00.000000000\";\n" +
- " :_ChunkSizes = 24U; // uint\n" +
- "\n" +
- " float lat(lat=400);\n" +
- " :long_name = \"Latitude\";\n" +
- " :standard_name = \"latitude\";\n" +
- " :units = \"degrees_north\";\n" +
- " :comment = \"Latitude coordinate at the center of the 0.2 degree bin, degrees_north, at 0.2 degree resolution. Range is -39.9 .. 39.9.\";\n"
- +
- " :_ChunkSizes = 400U; // uint\n" +
- "\n" +
- " float lon(lon=1800);\n" +
- " :long_name = \"Longitude\";\n" +
- " :standard_name = \"longitude\";\n" +
- " :units = \"degrees_east\";\n" +
- " :comment = \"Longitude coordinate at the center of the 0.2 degree bin, degrees_east, at 0.2 degree resolution. Range is 0.1 .. 359.9.\";\n"
- +
- " :_ChunkSizes = 1800U; // uint\n" +
- "\n" +
- " float wind_speed(time=24, lat=400, lon=1800);\n" +
- " :long_name = \"Wind speed\";\n" +
- " :standard_name = \"wind_speed\";\n" +
- " :units = \"m s-1\";\n" +
- " :_FillValue = -9999.0f; // float\n" +
- " :valid_range = -5.0, 100.0; // double\n" +
- " :comment = \"Minimum variance estimate of the mean wind speed in the bin over the spatial and temporal intervals specified by the bin's boundaries. This is done using an inverse-variance weighted average of all L2 samples of the wind speed that were made within the bin.\";\n"
- +
- " :_ChunkSizes = 8U, 134U, 600U; // uint\n" +
- "\n" +
- " float wind_speed_uncertainty(time=24, lat=400, lon=1800);\n" +
- " :long_name = \"Wind speed uncertainty\";\n" +
- " :units = \"m s-1\";\n" +
- " :_FillValue = -9999.0f; // float\n" +
- " :valid_range = 0.0, 10.0; // double\n" +
- " :comment = \"Standard deviation of the error in the mean of all L2 samples of the wind speed within the bin.\";\n"
- +
- " :_ChunkSizes = 8U, 134U, 600U; // uint\n" +
- "\n" +
- " int num_wind_speed_samples(time=24, lat=400, lon=1800);\n" +
- " :long_name = \"Number of wind speed samples\";\n" +
- " :units = \"1\";\n" +
- " :_FillValue = -99; // int\n" +
- " :valid_range = 1, 100000; // int\n" +
- " :comment = \"The number of L2 wind speed samples used to calculate wind_speed.\";\n" +
- " :_ChunkSizes = 8U, 134U, 600U; // uint\n" +
- "\n" +
- " float yslf_wind_speed(time=24, lat=400, lon=1800);\n" +
- " :long_name = \"Young sea limited fetch wind speed\";\n" +
- " :standard_name = \"yslf_wind_speed\";\n" +
- " :units = \"m s-1\";\n" +
- " :_FillValue = -9999.0f; // float\n" +
- " :valid_range = -5.0, 100.0; // double\n" +
- " :comment = \"Minimum variance estimate of the young sea limited fetch mean wind speed in the bin over the spatial and temporal intervals specified by the bin's boundaries. This is done using an inverse-variance weighted average of all L2 samples of the wind speed that were made within the bin.\";\n"
- +
- " :_ChunkSizes = 8U, 134U, 600U; // uint\n" +
- "\n" +
- " float yslf_wind_speed_uncertainty(time=24, lat=400, lon=1800);\n" +
- " :long_name = \"Young sea limited fetch wind speed uncertainty\";\n" +
- " :_FillValue = -9999.0f; // float\n" +
- " :valid_range = 0.0, 10.0; // double\n" +
- " :comment = \"Standard deviation of the error in the mean of all L2 samples of the young sea limited fetch wind speed within the bin.\";\n"
- +
- " :units = \"m s-1\";\n" +
- " :_ChunkSizes = 8U, 134U, 600U; // uint\n" +
- "\n" +
- " int num_yslf_wind_speed_samples(time=24, lat=400, lon=1800);\n" +
- " :long_name = \"Number of young sea limited fetch wind speed samples\";\n" +
- " :units = \"1\";\n" +
- " :_FillValue = -99; // int\n" +
- " :valid_range = 1, 100000; // int\n" +
- " :comment = \"The number of L2 young sea limited fetch wind speed samples used to calculate yslf_wind_speed.\";\n"
- +
- " :_ChunkSizes = 8U, 134U, 600U; // uint\n" +
- "\n" +
- " float mean_square_slope(time=24, lat=400, lon=1800);\n" +
- " :long_name = \"Mean square slope\";\n" +
- " :units = \"1\";\n" +
- " :_FillValue = -9999.0f; // float\n" +
- " :valid_range = 0.0, 0.04; // double\n" +
- " :comment = \"Mean MSS in the bin over the spatial and temporal intervals specified by the bin's boundaries.\";\n"
- +
- " :_ChunkSizes = 8U, 134U, 600U; // uint\n" +
- "\n" +
- " float mean_square_slope_uncertainty(time=24, lat=400, lon=1800);\n" +
- " :long_name = \"Mean square slope uncertainty\";\n" +
- " :units = \"1\";\n" +
- " :_FillValue = -9999.0f; // float\n" +
- " :valid_range = 0.0, 0.08; // double\n" +
- " :comment = \"Standard deviation of the error in the mean of all L2 samples of the MSS within the bin.\";\n"
- +
- " :_ChunkSizes = 8U, 134U, 600U; // uint\n" +
- "\n" +
- " int num_mss_samples(time=24, lat=400, lon=1800);\n" +
- " :long_name = \"Number of mean square slope samples\";\n" +
- " :units = \"1\";\n" +
- " :_FillValue = -99; // int\n" +
- " :valid_range = 1, 100000; // int\n" +
- " :comment = \"The number of L2 MSS samples used to calculate mean_square_slope.\";\n" +
- " :_ChunkSizes = 8U, 134U, 600U; // uint\n" +
- "\n" +
- " float merra2_wind_speed(time=24, lat=400, lon=1800);\n" +
- " :long_name = \"MERRA-2 reference wind speed\";\n" +
- " :units = \"m s-1\";\n" +
- " :_FillValue = -9999.0f; // float\n" +
- " :valid_range = 0.0, 100.0; // double\n" +
- " :comment = \"Mean MERRA-2 wind speed in the bin over the spatial and temporal intervals specified by the bin's boundaries. See https://disc.gsfc.nasa.gov/datasets/M2I1NXASM_5.12.4/summary?keywords=%22MERRA-2%22\";\n"
- +
- " :_ChunkSizes = 8U, 134U, 600U; // uint\n" +
- "\n" +
- " int num_merra2_wind_speed_samples(time=24, lat=400, lon=1800);\n" +
- " :units = \"1\";\n" +
- " :_FillValue = -99; // int\n" +
- " :valid_range = 1, 100000; // int\n" +
- " :comment = \"The number of L2 MERRA-2 wind speed samples used to calculate merra2_wind_speed.\";\n"
- +
- " :long_name = \"Number of MERRA-2 wind speed samples\";\n" +
- " :_ChunkSizes = 8U, 134U, 600U; // uint\n" +
- "\n" +
- " // global attributes:\n" +
- " :Conventions = \"CF-1.6, ACDD-1.3, ISO-8601\";\n" +
- " :standard_name_vocabulary = \"CF Standard Name Table v30\";\n" +
- " :project = \"CYGNSS\";\n" +
- " :summary = \"CYGNSS is a NASA Earth Venture mission, managed by the Earth System Science Pathfinder Program. The mission consists of a constellation of eight small satellites. The eight observatories comprise a constellation that measures the ocean surface wind field with very high temporal resolution and spatial coverage, under all precipitating conditions, and over the full dynamic range of wind speeds experienced in a tropical cyclone. The CYGNSS observatories fly in 510 km circular orbits at a common inclination of 35°. Each observatory includes a Delay Doppler Mapping Instrument (DDMI) consisting of a modified GPS receiver capable of measuring surface scattering, a low gain zenith antenna for measurement of the direct GPS signal, and two high gain nadir antennas for measurement of the weaker scattered signal. Each DDMI is capable of measuring 4 simultaneous bi-static reflections, resulting in a total of 32 wind measurements per second by the full constellation.\";\n"
- +
- " :program = \"CYGNSS\";\n" +
- " :references = \"Ruf, C., P. Chang, M.P. Clarizia, S. Gleason, Z. Jelenak, J. Murray, M. Morris, S. Musko, D. Posselt, D. Provost, D. Starkenburg, V. Zavorotny, CYGNSS Handbook, Ann Arbor, MI, Michigan Pub., ISBN 978-1-60785-380-0, 154 pp, 1 Apr 2016. http://clasp-research.engin.umich.edu/missions/cygnss/reference/cygnss-mission/CYGNSS_Handbook_April2016.pdf\n"
- +
- "Global Modeling and Assimilation Office (GMAO) (2015), MERRA-2 inst1_2d_asm_Nx: 2d,1-Hourly,Instantaneous,Single-Level,Assimilation,Single-Level Diagnostics V5.12.4, Greenbelt, MD, USA, Goddard Earth Sciences Data and Information Services Center (GES DISC), Accessed: {dates differ for each L1 file. See 'source' L1 files for exact timestamps}, https://doi.org/10.5067/3Z173KIE2TPD\";\n"
- +
- " :processing_level = \"3\";\n" +
- " :comment = \"This Level 3 gridded product combines all 8 x 4 = 32 wind speed and mean square slope (MSS) measurements made by the CYGNSS constellation each second, uniformly sampled in latitude, longitude and time.\";\n"
- +
- " :creator_type = \"institution\";\n" +
- " :institution = \"University of Michigan Space Physics Research Lab (SPRL)\";\n" +
- " :creator_name = \"CYGNSS Science Operations Center\";\n" +
- " :publisher_name = \"PO.DAAC\";\n" +
- " :publisher_email = \"podaac@podaac.jpl.nasa.gov\";\n" +
- " :publisher_url = \"\u200Bhttp://podaac.jpl.nasa.gov\";\n" + // ! zero width space #8203
- " :sensor = \"Delay Doppler Mapping Instrument (DDMI)\";\n" +
- " :geospatial_lat_min = \"-39.9N\";\n" +
- " :geospatial_lat_max = \"39.9N\";\n" +
- " :geospatial_lon_min = \"0.1E\";\n" +
- " :geospatial_lon_max = \"359.9E\";\n" +
- " :version_id = \"1.0\";\n" +
- " :title = \"CYGNSS Level 3 Climate Data Record Version 1.0\";\n" +
- " :ShortName = \"CYGNSS_L3_CDR_V1.0\";\n" +
- " :id = \"PODAAC-CYGNS-L3C10\";\n" +
- " :netcdf_version_id = \"4.3.3.1 of Dec 10 2015 16:44:18 $\";\n" +
- " :l3_algorithm_version = \"cdr-v1.0\";\n" +
- " :date_created = \"2020-04-28T17:49:27Z\";\n" +
- " :date_issued = \"2020-04-28T17:49:27Z\";\n" +
- " :source = \"cyg.ddmi.s20200101-000000-e20200101-235959.l2.wind-mss-cdr.a10.d10.nc\";\n" +
- " :history = \"Tue Apr 28 17:49:28 2020: ncks -O -L1 -a /tmp/qt_temp.J61461 /tmp/qt_temp.T61461\n" +
- "/data/ops/op_cdr_1_0/apps/src/produce-L3-files/produce-L3-files --dstore production_1@cygnss-data-1.engin.umich.edu --day 2020-01-01\";\n"
- +
- " :time_coverage_start = \"2020-01-01T00:30:00Z\";\n" +
- " :time_coverage_end = \"2020-01-01T23:30:00Z\";\n" +
- " :time_coverage_duration = \"P1DT00H00M00S\";\n" +
- " :time_coverage_resolution = \"P0DT1H0M0S\";\n" +
- " :platform = \"Observatory References: cyg1, cyg2, cyg3, cyg4, cyg5, cyg6, cyg7, cyg8\";\n" +
- " :NCO = \"4.4.4\";\n" +
- "\n" +
- " data:\n" +
- " time = \n" +
- " {0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23}\n" +
- "}\n";
- Test.ensureEqual(results.substring(0, expected.length()), expected, "\nresults=\n" + results);
-
- // ensure files are reread
- File2.deleteAllFiles(EDDGridFromNcFilesUnpacked.datasetDir("testSuperPreciseTimeUnits"));
- EDDGrid eddGrid = (EDDGrid) EDDTestDataset.gettestSuperPreciseTimeUnits();
-
- // .dds dds isn't affected by userDapQuery
- tName = eddGrid.makeNewFileForDapQuery(language, null, null, "",
- tDir, eddGrid.className(), ".dds");
- results = File2.directReadFrom88591File(
- tDir + tName);
- expected = // difference from testUInt16Dap: lat lon are float here, not double
- "Dataset {\n" +
- " Float64 time[time = 24];\n" +
- " Float32 latitude[latitude = 400];\n" +
- " Float32 longitude[longitude = 1800];\n" +
- " GRID {\n" +
- " ARRAY:\n" +
- " Float32 wind_speed[time = 24][latitude = 400][longitude = 1800];\n" +
- " MAPS:\n" +
- " Float64 time[time = 24];\n" +
- " Float32 latitude[latitude = 400];\n" +
- " Float32 longitude[longitude = 1800];\n" +
- " } wind_speed;\n" +
- " GRID {\n" +
- " ARRAY:\n" +
- " Float32 wind_speed_uncertainty[time = 24][latitude = 400][longitude = 1800];\n" +
- " MAPS:\n" +
- " Float64 time[time = 24];\n" +
- " Float32 latitude[latitude = 400];\n" +
- " Float32 longitude[longitude = 1800];\n" +
- " } wind_speed_uncertainty;\n" +
- " GRID {\n" +
- " ARRAY:\n" +
- " Int32 num_wind_speed_samples[time = 24][latitude = 400][longitude = 1800];\n" +
- " MAPS:\n" +
- " Float64 time[time = 24];\n" +
- " Float32 latitude[latitude = 400];\n" +
- " Float32 longitude[longitude = 1800];\n" +
- " } num_wind_speed_samples;\n" +
- " GRID {\n" +
- " ARRAY:\n" +
- " Float32 yslf_wind_speed[time = 24][latitude = 400][longitude = 1800];\n" +
- " MAPS:\n" +
- " Float64 time[time = 24];\n" +
- " Float32 latitude[latitude = 400];\n" +
- " Float32 longitude[longitude = 1800];\n" +
- " } yslf_wind_speed;\n" +
- " GRID {\n" +
- " ARRAY:\n" +
- " Float32 yslf_wind_speed_uncertainty[time = 24][latitude = 400][longitude = 1800];\n" +
- " MAPS:\n" +
- " Float64 time[time = 24];\n" +
- " Float32 latitude[latitude = 400];\n" +
- " Float32 longitude[longitude = 1800];\n" +
- " } yslf_wind_speed_uncertainty;\n" +
- " GRID {\n" +
- " ARRAY:\n" +
- " Int32 num_yslf_wind_speed_samples[time = 24][latitude = 400][longitude = 1800];\n" +
- " MAPS:\n" +
- " Float64 time[time = 24];\n" +
- " Float32 latitude[latitude = 400];\n" +
- " Float32 longitude[longitude = 1800];\n" +
- " } num_yslf_wind_speed_samples;\n" +
- " GRID {\n" +
- " ARRAY:\n" +
- " Float32 mean_square_slope[time = 24][latitude = 400][longitude = 1800];\n" +
- " MAPS:\n" +
- " Float64 time[time = 24];\n" +
- " Float32 latitude[latitude = 400];\n" +
- " Float32 longitude[longitude = 1800];\n" +
- " } mean_square_slope;\n" +
- " GRID {\n" +
- " ARRAY:\n" +
- " Float32 mean_square_slope_uncertainty[time = 24][latitude = 400][longitude = 1800];\n" +
- " MAPS:\n" +
- " Float64 time[time = 24];\n" +
- " Float32 latitude[latitude = 400];\n" +
- " Float32 longitude[longitude = 1800];\n" +
- " } mean_square_slope_uncertainty;\n" +
- " GRID {\n" +
- " ARRAY:\n" +
- " Int32 num_mss_samples[time = 24][latitude = 400][longitude = 1800];\n" +
- " MAPS:\n" +
- " Float64 time[time = 24];\n" +
- " Float32 latitude[latitude = 400];\n" +
- " Float32 longitude[longitude = 1800];\n" +
- " } num_mss_samples;\n" +
- " GRID {\n" +
- " ARRAY:\n" +
- " Float32 merra2_wind_speed[time = 24][latitude = 400][longitude = 1800];\n" +
- " MAPS:\n" +
- " Float64 time[time = 24];\n" +
- " Float32 latitude[latitude = 400];\n" +
- " Float32 longitude[longitude = 1800];\n" +
- " } merra2_wind_speed;\n" +
- " GRID {\n" +
- " ARRAY:\n" +
- " Int32 num_merra2_wind_speed_samples[time = 24][latitude = 400][longitude = 1800];\n" +
- " MAPS:\n" +
- " Float64 time[time = 24];\n" +
- " Float32 latitude[latitude = 400];\n" +
- " Float32 longitude[longitude = 1800];\n" +
- " } num_merra2_wind_speed_samples;\n" +
- "} testSuperPreciseTimeUnits;\n";
- Test.ensureEqual(results, expected, "\nresults=\n" + results);
-
- // .csv data values
- userDapQuery = "time";
- tName = eddGrid.makeNewFileForDapQuery(language, null, null, "time",
- tDir, eddGrid.className(), ".csv");
- results = File2.directReadFrom88591File(
- tDir + tName);
- String2.log(results);
- expected = // difference from testUInt16Dap: lat lon are float here, not double
- "time\n" +
- "UTC\n" +
- "2020-01-01T00:30:00Z\n" +
- "2020-01-01T01:30:00Z\n" +
- "2020-01-01T02:30:00Z\n" +
- "2020-01-01T03:30:00Z\n" +
- "2020-01-01T04:30:00Z\n" +
- "2020-01-01T05:30:00Z\n" +
- "2020-01-01T06:30:00Z\n" +
- "2020-01-01T07:30:00Z\n" +
- "2020-01-01T08:30:00Z\n" +
- "2020-01-01T09:30:00Z\n" +
- "2020-01-01T10:30:00Z\n" +
- "2020-01-01T11:30:00Z\n" +
- "2020-01-01T12:30:00Z\n" +
- "2020-01-01T13:30:00Z\n" +
- "2020-01-01T14:30:00Z\n" +
- "2020-01-01T15:30:00Z\n" +
- "2020-01-01T16:30:00Z\n" +
- "2020-01-01T17:30:00Z\n" +
- "2020-01-01T18:30:00Z\n" +
- "2020-01-01T19:30:00Z\n" +
- "2020-01-01T20:30:00Z\n" +
- "2020-01-01T21:30:00Z\n" +
- "2020-01-01T22:30:00Z\n" +
- "2020-01-01T23:30:00Z\n";
- Test.ensureEqual(results, expected, "\nresults=\n" + results);
-
- NcHelper.debugMode = oDebugMode;
+ + "/";
+ String tDir = EDStatic.fullTestCacheDirectory;
+ boolean oDebugMode = NcHelper.debugMode;
+ NcHelper.debugMode = true;
+
+ // ncdump of file showing time units and values
+ results = NcHelper.ncdump(fileDir + "superPreciseTimeUnits.nc", "-v time");
+ expected =
+ "netcdf superPreciseTimeUnits.nc {\n"
+ + " dimensions:\n"
+ + " time = 24;\n"
+ + " lat = 400;\n"
+ + " lon = 1800;\n"
+ + " variables:\n"
+ + " byte time(time=24);\n"
+ + " :long_name = \"Reference time of file\";\n"
+ + " :standard_name = \"time\";\n"
+ + " :calendar = \"gregorian\";\n"
+ + " :comment = \"Timestamp coordinate at the center of the 1 hr bin, at 1 hour resolution. Range is one UTC day.\";\n"
+ + " :units = \"hours since 2020-01-01 00:30:00.000000000\";\n"
+ + " :_ChunkSizes = 24U; // uint\n"
+ + "\n"
+ + " int epoch_time(time=24);\n"
+ + " :long_name = \"Time Centering of Data Based on Epoch Reference\";\n"
+ + " :standard_name = \"time\";\n"
+ + " :calendar = \"gregorian\";\n"
+ + " :comment = \"Timestamp coordinate is at the center of the 1 hr bin, at 1 hour resolution referenced by the historical Epoch reference date/time. The Epoch reference date/time corresponds to the first observation time window in the CYGNSS historical data record. Total number of timestamps in a file corresponds to one UTC day. This value is rounded to the nearest hour since leap seconds may have occured making the number of hours since the start of the mission not exact.\";\n"
+ + " :units = \"hours since 2017-03-18 00:30:00.000000000\";\n"
+ + " :_ChunkSizes = 24U; // uint\n"
+ + "\n"
+ + " float lat(lat=400);\n"
+ + " :long_name = \"Latitude\";\n"
+ + " :standard_name = \"latitude\";\n"
+ + " :units = \"degrees_north\";\n"
+ + " :comment = \"Latitude coordinate at the center of the 0.2 degree bin, degrees_north, at 0.2 degree resolution. Range is -39.9 .. 39.9.\";\n"
+ + " :_ChunkSizes = 400U; // uint\n"
+ + "\n"
+ + " float lon(lon=1800);\n"
+ + " :long_name = \"Longitude\";\n"
+ + " :standard_name = \"longitude\";\n"
+ + " :units = \"degrees_east\";\n"
+ + " :comment = \"Longitude coordinate at the center of the 0.2 degree bin, degrees_east, at 0.2 degree resolution. Range is 0.1 .. 359.9.\";\n"
+ + " :_ChunkSizes = 1800U; // uint\n"
+ + "\n"
+ + " float wind_speed(time=24, lat=400, lon=1800);\n"
+ + " :long_name = \"Wind speed\";\n"
+ + " :standard_name = \"wind_speed\";\n"
+ + " :units = \"m s-1\";\n"
+ + " :_FillValue = -9999.0f; // float\n"
+ + " :valid_range = -5.0, 100.0; // double\n"
+ + " :comment = \"Minimum variance estimate of the mean wind speed in the bin over the spatial and temporal intervals specified by the bin's boundaries. This is done using an inverse-variance weighted average of all L2 samples of the wind speed that were made within the bin.\";\n"
+ + " :_ChunkSizes = 8U, 134U, 600U; // uint\n"
+ + "\n"
+ + " float wind_speed_uncertainty(time=24, lat=400, lon=1800);\n"
+ + " :long_name = \"Wind speed uncertainty\";\n"
+ + " :units = \"m s-1\";\n"
+ + " :_FillValue = -9999.0f; // float\n"
+ + " :valid_range = 0.0, 10.0; // double\n"
+ + " :comment = \"Standard deviation of the error in the mean of all L2 samples of the wind speed within the bin.\";\n"
+ + " :_ChunkSizes = 8U, 134U, 600U; // uint\n"
+ + "\n"
+ + " int num_wind_speed_samples(time=24, lat=400, lon=1800);\n"
+ + " :long_name = \"Number of wind speed samples\";\n"
+ + " :units = \"1\";\n"
+ + " :_FillValue = -99; // int\n"
+ + " :valid_range = 1, 100000; // int\n"
+ + " :comment = \"The number of L2 wind speed samples used to calculate wind_speed.\";\n"
+ + " :_ChunkSizes = 8U, 134U, 600U; // uint\n"
+ + "\n"
+ + " float yslf_wind_speed(time=24, lat=400, lon=1800);\n"
+ + " :long_name = \"Young sea limited fetch wind speed\";\n"
+ + " :standard_name = \"yslf_wind_speed\";\n"
+ + " :units = \"m s-1\";\n"
+ + " :_FillValue = -9999.0f; // float\n"
+ + " :valid_range = -5.0, 100.0; // double\n"
+ + " :comment = \"Minimum variance estimate of the young sea limited fetch mean wind speed in the bin over the spatial and temporal intervals specified by the bin's boundaries. This is done using an inverse-variance weighted average of all L2 samples of the wind speed that were made within the bin.\";\n"
+ + " :_ChunkSizes = 8U, 134U, 600U; // uint\n"
+ + "\n"
+ + " float yslf_wind_speed_uncertainty(time=24, lat=400, lon=1800);\n"
+ + " :long_name = \"Young sea limited fetch wind speed uncertainty\";\n"
+ + " :_FillValue = -9999.0f; // float\n"
+ + " :valid_range = 0.0, 10.0; // double\n"
+ + " :comment = \"Standard deviation of the error in the mean of all L2 samples of the young sea limited fetch wind speed within the bin.\";\n"
+ + " :units = \"m s-1\";\n"
+ + " :_ChunkSizes = 8U, 134U, 600U; // uint\n"
+ + "\n"
+ + " int num_yslf_wind_speed_samples(time=24, lat=400, lon=1800);\n"
+ + " :long_name = \"Number of young sea limited fetch wind speed samples\";\n"
+ + " :units = \"1\";\n"
+ + " :_FillValue = -99; // int\n"
+ + " :valid_range = 1, 100000; // int\n"
+ + " :comment = \"The number of L2 young sea limited fetch wind speed samples used to calculate yslf_wind_speed.\";\n"
+ + " :_ChunkSizes = 8U, 134U, 600U; // uint\n"
+ + "\n"
+ + " float mean_square_slope(time=24, lat=400, lon=1800);\n"
+ + " :long_name = \"Mean square slope\";\n"
+ + " :units = \"1\";\n"
+ + " :_FillValue = -9999.0f; // float\n"
+ + " :valid_range = 0.0, 0.04; // double\n"
+ + " :comment = \"Mean MSS in the bin over the spatial and temporal intervals specified by the bin's boundaries.\";\n"
+ + " :_ChunkSizes = 8U, 134U, 600U; // uint\n"
+ + "\n"
+ + " float mean_square_slope_uncertainty(time=24, lat=400, lon=1800);\n"
+ + " :long_name = \"Mean square slope uncertainty\";\n"
+ + " :units = \"1\";\n"
+ + " :_FillValue = -9999.0f; // float\n"
+ + " :valid_range = 0.0, 0.08; // double\n"
+ + " :comment = \"Standard deviation of the error in the mean of all L2 samples of the MSS within the bin.\";\n"
+ + " :_ChunkSizes = 8U, 134U, 600U; // uint\n"
+ + "\n"
+ + " int num_mss_samples(time=24, lat=400, lon=1800);\n"
+ + " :long_name = \"Number of mean square slope samples\";\n"
+ + " :units = \"1\";\n"
+ + " :_FillValue = -99; // int\n"
+ + " :valid_range = 1, 100000; // int\n"
+ + " :comment = \"The number of L2 MSS samples used to calculate mean_square_slope.\";\n"
+ + " :_ChunkSizes = 8U, 134U, 600U; // uint\n"
+ + "\n"
+ + " float merra2_wind_speed(time=24, lat=400, lon=1800);\n"
+ + " :long_name = \"MERRA-2 reference wind speed\";\n"
+ + " :units = \"m s-1\";\n"
+ + " :_FillValue = -9999.0f; // float\n"
+ + " :valid_range = 0.0, 100.0; // double\n"
+ + " :comment = \"Mean MERRA-2 wind speed in the bin over the spatial and temporal intervals specified by the bin's boundaries. See https://disc.gsfc.nasa.gov/datasets/M2I1NXASM_5.12.4/summary?keywords=%22MERRA-2%22\";\n"
+ + " :_ChunkSizes = 8U, 134U, 600U; // uint\n"
+ + "\n"
+ + " int num_merra2_wind_speed_samples(time=24, lat=400, lon=1800);\n"
+ + " :units = \"1\";\n"
+ + " :_FillValue = -99; // int\n"
+ + " :valid_range = 1, 100000; // int\n"
+ + " :comment = \"The number of L2 MERRA-2 wind speed samples used to calculate merra2_wind_speed.\";\n"
+ + " :long_name = \"Number of MERRA-2 wind speed samples\";\n"
+ + " :_ChunkSizes = 8U, 134U, 600U; // uint\n"
+ + "\n"
+ + " // global attributes:\n"
+ + " :Conventions = \"CF-1.6, ACDD-1.3, ISO-8601\";\n"
+ + " :standard_name_vocabulary = \"CF Standard Name Table v30\";\n"
+ + " :project = \"CYGNSS\";\n"
+ + " :summary = \"CYGNSS is a NASA Earth Venture mission, managed by the Earth System Science Pathfinder Program. The mission consists of a constellation of eight small satellites. The eight observatories comprise a constellation that measures the ocean surface wind field with very high temporal resolution and spatial coverage, under all precipitating conditions, and over the full dynamic range of wind speeds experienced in a tropical cyclone. The CYGNSS observatories fly in 510 km circular orbits at a common inclination of 35°. Each observatory includes a Delay Doppler Mapping Instrument (DDMI) consisting of a modified GPS receiver capable of measuring surface scattering, a low gain zenith antenna for measurement of the direct GPS signal, and two high gain nadir antennas for measurement of the weaker scattered signal. Each DDMI is capable of measuring 4 simultaneous bi-static reflections, resulting in a total of 32 wind measurements per second by the full constellation.\";\n"
+ + " :program = \"CYGNSS\";\n"
+ + " :references = \"Ruf, C., P. Chang, M.P. Clarizia, S. Gleason, Z. Jelenak, J. Murray, M. Morris, S. Musko, D. Posselt, D. Provost, D. Starkenburg, V. Zavorotny, CYGNSS Handbook, Ann Arbor, MI, Michigan Pub., ISBN 978-1-60785-380-0, 154 pp, 1 Apr 2016. http://clasp-research.engin.umich.edu/missions/cygnss/reference/cygnss-mission/CYGNSS_Handbook_April2016.pdf\n"
+ + "Global Modeling and Assimilation Office (GMAO) (2015), MERRA-2 inst1_2d_asm_Nx: 2d,1-Hourly,Instantaneous,Single-Level,Assimilation,Single-Level Diagnostics V5.12.4, Greenbelt, MD, USA, Goddard Earth Sciences Data and Information Services Center (GES DISC), Accessed: {dates differ for each L1 file. See 'source' L1 files for exact timestamps}, https://doi.org/10.5067/3Z173KIE2TPD\";\n"
+ + " :processing_level = \"3\";\n"
+ + " :comment = \"This Level 3 gridded product combines all 8 x 4 = 32 wind speed and mean square slope (MSS) measurements made by the CYGNSS constellation each second, uniformly sampled in latitude, longitude and time.\";\n"
+ + " :creator_type = \"institution\";\n"
+ + " :institution = \"University of Michigan Space Physics Research Lab (SPRL)\";\n"
+ + " :creator_name = \"CYGNSS Science Operations Center\";\n"
+ + " :publisher_name = \"PO.DAAC\";\n"
+ + " :publisher_email = \"podaac@podaac.jpl.nasa.gov\";\n"
+ + " :publisher_url = \"\u200Bhttp://podaac.jpl.nasa.gov\";\n"
+ + // ! zero width space #8203
+ " :sensor = \"Delay Doppler Mapping Instrument (DDMI)\";\n"
+ + " :geospatial_lat_min = \"-39.9N\";\n"
+ + " :geospatial_lat_max = \"39.9N\";\n"
+ + " :geospatial_lon_min = \"0.1E\";\n"
+ + " :geospatial_lon_max = \"359.9E\";\n"
+ + " :version_id = \"1.0\";\n"
+ + " :title = \"CYGNSS Level 3 Climate Data Record Version 1.0\";\n"
+ + " :ShortName = \"CYGNSS_L3_CDR_V1.0\";\n"
+ + " :id = \"PODAAC-CYGNS-L3C10\";\n"
+ + " :netcdf_version_id = \"4.3.3.1 of Dec 10 2015 16:44:18 $\";\n"
+ + " :l3_algorithm_version = \"cdr-v1.0\";\n"
+ + " :date_created = \"2020-04-28T17:49:27Z\";\n"
+ + " :date_issued = \"2020-04-28T17:49:27Z\";\n"
+ + " :source = \"cyg.ddmi.s20200101-000000-e20200101-235959.l2.wind-mss-cdr.a10.d10.nc\";\n"
+ + " :history = \"Tue Apr 28 17:49:28 2020: ncks -O -L1 -a /tmp/qt_temp.J61461 /tmp/qt_temp.T61461\n"
+ + "/data/ops/op_cdr_1_0/apps/src/produce-L3-files/produce-L3-files --dstore production_1@cygnss-data-1.engin.umich.edu --day 2020-01-01\";\n"
+ + " :time_coverage_start = \"2020-01-01T00:30:00Z\";\n"
+ + " :time_coverage_end = \"2020-01-01T23:30:00Z\";\n"
+ + " :time_coverage_duration = \"P1DT00H00M00S\";\n"
+ + " :time_coverage_resolution = \"P0DT1H0M0S\";\n"
+ + " :platform = \"Observatory References: cyg1, cyg2, cyg3, cyg4, cyg5, cyg6, cyg7, cyg8\";\n"
+ + " :NCO = \"4.4.4\";\n"
+ + "\n"
+ + " data:\n"
+ + " time = \n"
+ + " {0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23}\n"
+ + "}\n";
+ Test.ensureEqual(results.substring(0, expected.length()), expected, "\nresults=\n" + results);
+
+ // ensure files are reread
+ File2.deleteAllFiles(EDDGridFromNcFilesUnpacked.datasetDir("testSuperPreciseTimeUnits"));
+ EDDGrid eddGrid = (EDDGrid) EDDTestDataset.gettestSuperPreciseTimeUnits();
+
+ // .dds dds isn't affected by userDapQuery
+ tName =
+ eddGrid.makeNewFileForDapQuery(language, null, null, "", tDir, eddGrid.className(), ".dds");
+ results = File2.directReadFrom88591File(tDir + tName);
+ expected = // difference from testUInt16Dap: lat lon are float here, not double
+ "Dataset {\n"
+ + " Float64 time[time = 24];\n"
+ + " Float32 latitude[latitude = 400];\n"
+ + " Float32 longitude[longitude = 1800];\n"
+ + " GRID {\n"
+ + " ARRAY:\n"
+ + " Float32 wind_speed[time = 24][latitude = 400][longitude = 1800];\n"
+ + " MAPS:\n"
+ + " Float64 time[time = 24];\n"
+ + " Float32 latitude[latitude = 400];\n"
+ + " Float32 longitude[longitude = 1800];\n"
+ + " } wind_speed;\n"
+ + " GRID {\n"
+ + " ARRAY:\n"
+ + " Float32 wind_speed_uncertainty[time = 24][latitude = 400][longitude = 1800];\n"
+ + " MAPS:\n"
+ + " Float64 time[time = 24];\n"
+ + " Float32 latitude[latitude = 400];\n"
+ + " Float32 longitude[longitude = 1800];\n"
+ + " } wind_speed_uncertainty;\n"
+ + " GRID {\n"
+ + " ARRAY:\n"
+ + " Int32 num_wind_speed_samples[time = 24][latitude = 400][longitude = 1800];\n"
+ + " MAPS:\n"
+ + " Float64 time[time = 24];\n"
+ + " Float32 latitude[latitude = 400];\n"
+ + " Float32 longitude[longitude = 1800];\n"
+ + " } num_wind_speed_samples;\n"
+ + " GRID {\n"
+ + " ARRAY:\n"
+ + " Float32 yslf_wind_speed[time = 24][latitude = 400][longitude = 1800];\n"
+ + " MAPS:\n"
+ + " Float64 time[time = 24];\n"
+ + " Float32 latitude[latitude = 400];\n"
+ + " Float32 longitude[longitude = 1800];\n"
+ + " } yslf_wind_speed;\n"
+ + " GRID {\n"
+ + " ARRAY:\n"
+ + " Float32 yslf_wind_speed_uncertainty[time = 24][latitude = 400][longitude = 1800];\n"
+ + " MAPS:\n"
+ + " Float64 time[time = 24];\n"
+ + " Float32 latitude[latitude = 400];\n"
+ + " Float32 longitude[longitude = 1800];\n"
+ + " } yslf_wind_speed_uncertainty;\n"
+ + " GRID {\n"
+ + " ARRAY:\n"
+ + " Int32 num_yslf_wind_speed_samples[time = 24][latitude = 400][longitude = 1800];\n"
+ + " MAPS:\n"
+ + " Float64 time[time = 24];\n"
+ + " Float32 latitude[latitude = 400];\n"
+ + " Float32 longitude[longitude = 1800];\n"
+ + " } num_yslf_wind_speed_samples;\n"
+ + " GRID {\n"
+ + " ARRAY:\n"
+ + " Float32 mean_square_slope[time = 24][latitude = 400][longitude = 1800];\n"
+ + " MAPS:\n"
+ + " Float64 time[time = 24];\n"
+ + " Float32 latitude[latitude = 400];\n"
+ + " Float32 longitude[longitude = 1800];\n"
+ + " } mean_square_slope;\n"
+ + " GRID {\n"
+ + " ARRAY:\n"
+ + " Float32 mean_square_slope_uncertainty[time = 24][latitude = 400][longitude = 1800];\n"
+ + " MAPS:\n"
+ + " Float64 time[time = 24];\n"
+ + " Float32 latitude[latitude = 400];\n"
+ + " Float32 longitude[longitude = 1800];\n"
+ + " } mean_square_slope_uncertainty;\n"
+ + " GRID {\n"
+ + " ARRAY:\n"
+ + " Int32 num_mss_samples[time = 24][latitude = 400][longitude = 1800];\n"
+ + " MAPS:\n"
+ + " Float64 time[time = 24];\n"
+ + " Float32 latitude[latitude = 400];\n"
+ + " Float32 longitude[longitude = 1800];\n"
+ + " } num_mss_samples;\n"
+ + " GRID {\n"
+ + " ARRAY:\n"
+ + " Float32 merra2_wind_speed[time = 24][latitude = 400][longitude = 1800];\n"
+ + " MAPS:\n"
+ + " Float64 time[time = 24];\n"
+ + " Float32 latitude[latitude = 400];\n"
+ + " Float32 longitude[longitude = 1800];\n"
+ + " } merra2_wind_speed;\n"
+ + " GRID {\n"
+ + " ARRAY:\n"
+ + " Int32 num_merra2_wind_speed_samples[time = 24][latitude = 400][longitude = 1800];\n"
+ + " MAPS:\n"
+ + " Float64 time[time = 24];\n"
+ + " Float32 latitude[latitude = 400];\n"
+ + " Float32 longitude[longitude = 1800];\n"
+ + " } num_merra2_wind_speed_samples;\n"
+ + "} testSuperPreciseTimeUnits;\n";
+ Test.ensureEqual(results, expected, "\nresults=\n" + results);
+
+ // .csv data values
+ userDapQuery = "time";
+ tName =
+ eddGrid.makeNewFileForDapQuery(
+ language, null, null, "time", tDir, eddGrid.className(), ".csv");
+ results = File2.directReadFrom88591File(tDir + tName);
+ String2.log(results);
+ expected = // difference from testUInt16Dap: lat lon are float here, not double
+ "time\n"
+ + "UTC\n"
+ + "2020-01-01T00:30:00Z\n"
+ + "2020-01-01T01:30:00Z\n"
+ + "2020-01-01T02:30:00Z\n"
+ + "2020-01-01T03:30:00Z\n"
+ + "2020-01-01T04:30:00Z\n"
+ + "2020-01-01T05:30:00Z\n"
+ + "2020-01-01T06:30:00Z\n"
+ + "2020-01-01T07:30:00Z\n"
+ + "2020-01-01T08:30:00Z\n"
+ + "2020-01-01T09:30:00Z\n"
+ + "2020-01-01T10:30:00Z\n"
+ + "2020-01-01T11:30:00Z\n"
+ + "2020-01-01T12:30:00Z\n"
+ + "2020-01-01T13:30:00Z\n"
+ + "2020-01-01T14:30:00Z\n"
+ + "2020-01-01T15:30:00Z\n"
+ + "2020-01-01T16:30:00Z\n"
+ + "2020-01-01T17:30:00Z\n"
+ + "2020-01-01T18:30:00Z\n"
+ + "2020-01-01T19:30:00Z\n"
+ + "2020-01-01T20:30:00Z\n"
+ + "2020-01-01T21:30:00Z\n"
+ + "2020-01-01T22:30:00Z\n"
+ + "2020-01-01T23:30:00Z\n";
+ Test.ensureEqual(results, expected, "\nresults=\n" + results);
+
+ NcHelper.debugMode = oDebugMode;
+ }
+
+ /**
+ * Test files from https://oceandata.sci.gsfc.nasa.gov/MODIS-Aqua/L3SMI and stored in
+ * /erddapTest/unsigned/
+ *
+ * @throws Throwable if trouble
+ */
+ @org.junit.jupiter.api.Test
+ @TagImageComparison
+ void testMissingValue() throws Throwable {
+ // String2.log("\n*** EDDGridFromNcFilesUnpacked.testMissingValue");
+ // testVerboseOn();
+ int language = 0;
+ String name, tName, results, tResults, expected, userDapQuery;
+ String today = Calendar2.getCurrentISODateTimeStringZulu() + "Z";
+ String tDir = EDStatic.fullTestCacheDirectory;
+ String fileDir =
+ Path.of(EDDGridFromNcFilesUnpackedTests.class.getResource("/data/unpacked/").toURI())
+ .toString()
+ + "/";
+ String fileName1 = "A2003001.L3m_DAY_POC_poc_4km.nc";
+ String fileName2 = "A2016241.L3m_DAY_POC_poc_4km.nc";
+ Variable var;
+ Attributes atts;
+ Array array;
+ PrimitiveArray pa;
+ boolean oDebugMode = NcHelper.debugMode;
+ NcHelper.debugMode = true;
+
+ // **** fileName1 -- not packed data: poc is float
+ // DumpString
+ results = NcHelper.ncdump(fileDir + fileName1, "-h");
+ expected =
+ "netcdf A2003001.L3m_DAY_POC_poc_4km.nc {\n"
+ + " dimensions:\n"
+ + " lon = 8640;\n"
+ + " eightbitcolor = 256;\n"
+ + " rgb = 3;\n"
+ + " lat = 4320;\n"
+ + " variables:\n"
+ + " float poc(lat=4320, lon=8640);\n"
+ + " :long_name = \"Particulate Organic Carbon, D. Stramski, 2007 (443/555 version)\";\n"
+ + " :units = \"mg m^-3\";\n"
+ + " :standard_name = \"mole_concentration_of_particulate_organic_carbon_in_sea_water\";\n"
+ + " :_FillValue = -32767.0f; // float\n"
+ + " :valid_min = 0.0f; // float\n"
+ + " :valid_max = 1000.0f; // float\n"
+ + " :display_scale = \"log\";\n"
+ + " :display_min = 10.0; // double\n"
+ + " :display_max = 1000.0; // double\n"
+ + " :scale_factor = 1.0f; // float\n"
+ + " :add_offset = 0.0f; // float\n"
+ + " :reference = \"Stramski, D., et al. \\\"Relationships between the surface concentration of particulate organic carbon and optical properties in the eastern South Pacific and eastern Atlantic Oceans.\\\" Biogeosciences 5.1 (2008): 171-201.\";\n"
+ + " :_ChunkSizes = 64U, 64U; // uint\n"
+ + "\n"
+ + " float lon(lon=8640);\n"
+ + " :long_name = \"Longitude\";\n"
+ + " :units = \"degree_east\";\n"
+ + " :_FillValue = -32767.0f; // float\n"
+ + " :valid_min = -180.0f; // float\n"
+ + " :valid_max = 180.0f; // float\n"
+ + "\n"
+ + " ubyte palette(rgb=3, eightbitcolor=256);\n"
+ +
+ // " :_FillValue = -1UB; // byte\n" + //gone with
+ // " :_Unsigned = \"true\";\n" +
+ "\n"
+ + " float lat(lat=4320);\n"
+ + " :long_name = \"Latitude\";\n"
+ + " :units = \"degree_north\";\n"
+ + " :_FillValue = -32767.0f; // float\n"
+ + " :valid_min = -90.0f; // float\n"
+ + " :valid_max = 90.0f; // float\n";
+ Test.ensureEqual(results.substring(0, expected.length()), expected, "results=\n" + results);
+
+ NetcdfFile ncFile = NcHelper.openFile(fileDir + fileName1);
+ try {
+
+ // lon
+ var = ncFile.findVariable("lon");
+ atts = new Attributes();
+ NcHelper.getVariableAttributes(var, atts);
+ results = atts.toString();
+ expected =
+ " _FillValue=-32767.0f\n"
+ + " long_name=Longitude\n"
+ + " units=degree_east\n"
+ + // what's in the file
+ " valid_max=180.0f\n"
+ + " valid_min=-180.0f\n";
+ Test.ensureEqual(results, expected, "results=\n" + results);
+
+ Units2.unpackVariableAttributes(atts, var.getFullName(), NcHelper.getElementPAType(var));
+ results = atts.toString();
+ expected =
+ " _FillValue=NaNf\n"
+ + // converted to PA standard mv
+ " long_name=Longitude\n"
+ + " units=degrees_east\n"
+ + // standardized to degrees_east
+ " valid_max=180.0f\n"
+ + " valid_min=-180.0f\n";
+ Test.ensureEqual(results, expected, "results=\n" + results);
+
+ // palette
+ var = ncFile.findVariable("palette");
+ atts = new Attributes();
+ NcHelper.getVariableAttributes(var, atts);
+ results = atts.toString();
+ expected =
+ // " _FillValue=-1b\n" +
+ // " _Unsigned=true\n"; //disappeared w netcdf-java 5.2, so I added back in with
+ // code in getVariableAttributes. 2020 now gone again because it is just a part
+ // of the dataType
+ "";
+ Test.ensureEqual(results, expected, "results=\n" + results);
+
+ Units2.unpackVariableAttributes(atts, var.getFullName(), NcHelper.getElementPAType(var));
+ results = atts.toString();
+ expected = "";
+ // " _FillValue=32767s\n"; //byte -> short //converted to PA standard mv
+ // " _Unsigned=true\n";
+ Test.ensureEqual(results, expected, "results=\n" + results);
+
+ // palette as unsigned byte
+ pa =
+ NcHelper.getPrimitiveArray(
+ var.read(new int[] {0, 0}, new int[] {1, 10}), true, true); // origin, shape,
+ // buildStringFromChar
+ Test.ensureEqual(pa.elementTypeString(), "ubyte", "");
+ results = pa.toString();
+ expected = "147, 0, 108, 144, 0, 111, 141, 0, 114, 138";
+ Test.ensureEqual(results, expected, "results=\n" + results);
+
+ // palette ubyte
+ pa = NcHelper.unpackPA(var, pa, true, true); // lookForStringTimes, lookForUnsigned
+ Test.ensureEqual(pa.elementTypeString(), "ubyte", "");
+ results = pa.toString();
+ expected = "147, 0, 108, 144, 0, 111, 141, 0, 114, 138"; // unsigned
+ Test.ensureEqual(results, expected, "results=\n" + results);
+
+ // poc
+ var = ncFile.findVariable("poc");
+ atts = new Attributes();
+ NcHelper.getVariableAttributes(var, atts);
+ results = atts.toString();
+ expected =
+ " _ChunkSizes=64ui,64ui\n"
+ + " _FillValue=-32767.0f\n"
+ + " add_offset=0.0f\n"
+ + " display_max=1000.0d\n"
+ + " display_min=10.0d\n"
+ + " display_scale=log\n"
+ + " long_name=\"Particulate Organic Carbon, D. Stramski, 2007 (443/555 version)\"\n"
+ + " reference=\"Stramski, D., et al. \"\"Relationships between the surface concentration of particulate organic carbon and optical properties in the eastern South Pacific and eastern Atlantic Oceans.\"\" Biogeosciences 5.1 (2008): 171-201.\"\n"
+ + " scale_factor=1.0f\n"
+ + " standard_name=mole_concentration_of_particulate_organic_carbon_in_sea_water\n"
+ + " units=mg m^-3\n"
+ + " valid_max=1000.0f\n"
+ + " valid_min=0.0f\n";
+ Test.ensureEqual(results, expected, "results=\n" + results);
+
+ Units2.unpackVariableAttributes(atts, var.getFullName(), NcHelper.getElementPAType(var));
+ results = atts.toString();
+ expected =
+ " _ChunkSizes=64ui,64ui\n"
+ + " _FillValue=NaNf\n"
+ + // standardized
+ // " add_offset=0.0f\n" + //removed
+ " display_max=1000.0d\n"
+ + " display_min=10.0d\n"
+ + " display_scale=log\n"
+ + " long_name=\"Particulate Organic Carbon, D. Stramski, 2007 (443/555 version)\"\n"
+ + " reference=\"Stramski, D., et al. \"\"Relationships between the surface concentration of particulate organic carbon and optical properties in the eastern South Pacific and eastern Atlantic Oceans.\"\" Biogeosciences 5.1 (2008): 171-201.\"\n"
+ +
+ // " scale_factor=1.0f\n" + //removed
+ " standard_name=mole_concentration_of_particulate_organic_carbon_in_sea_water\n"
+ + " units=mg m-3\n"
+ + " valid_max=1000.0f\n"
+ + " valid_min=0.0f\n";
+ Test.ensureEqual(results, expected, "results=\n" + results);
+
+ // poc as packed values (shorts)
+ pa =
+ NcHelper.getPrimitiveArray(
+ var.read(new Section("(0:4100:1000,0:8100:1000)")), true, false); // start:end:stride,
+ // buildStringFromChar, isUnsigned
+ Test.ensureEqual(pa.elementTypeString(), "float", "");
+ results = pa.toString();
+ expected =
+ "-32767.0, -32767.0, -32767.0, -32767.0, -32767.0, -32767.0, -32767.0, -32767.0, "
+ + "-32767.0, -32767.0, -32767.0, -32767.0, -32767.0, -32767.0, -32767.0, -32767.0, "
+ + "-32767.0, -32767.0, -32767.0, -32767.0, -32767.0, -32767.0, -32767.0, -32767.0, "
+ + "-32767.0, -32767.0, 29.476826, -32767.0, -32767.0, -32767.0, 431.7499, -32767.0, "
+ + "36.19993, -32767.0, -32767.0, -32767.0, -32767.0, -32767.0, -32767.0, -32767.0, "
+ + "-32767.0, -32767.0, -32767.0, -32767.0, -32767.0";
+ Test.ensureEqual(results, expected, "results=\n" + results);
+
+ // poc as unpacked values (float)
+ pa = NcHelper.unpackPA(var, pa, true, true); // lookForStringTimes, lookForUnsigned
+ Test.ensureEqual(pa.elementTypeString(), "float", "");
+ results = pa.toString();
+ expected = // standardized mv
+ "NaN, NaN, NaN, NaN, NaN, NaN, NaN, NaN, NaN, NaN, NaN, NaN, NaN, NaN, NaN, NaN, "
+ + "NaN, NaN, NaN, NaN, NaN, NaN, NaN, NaN, NaN, NaN, 29.476826, NaN, NaN, NaN, "
+ + "431.7499, NaN, 36.19993, NaN, NaN, NaN, NaN, NaN, NaN, NaN, NaN, NaN, NaN, NaN, NaN";
+ Test.ensureEqual(results, expected, "results=\n" + results);
+ } finally {
+ try {
+ if (ncFile != null) ncFile.close();
+ } catch (Exception e9) {
+ }
}
- /**
- * Test files from https://oceandata.sci.gsfc.nasa.gov/MODIS-Aqua/L3SMI
- * and stored in /erddapTest/unsigned/
- *
- * @throws Throwable if trouble
- */
- @org.junit.jupiter.api.Test
- @TagImageComparison
- void testMissingValue() throws Throwable {
- // String2.log("\n*** EDDGridFromNcFilesUnpacked.testMissingValue");
- // testVerboseOn();
- int language = 0;
- String name, tName, results, tResults, expected, userDapQuery;
- String today = Calendar2.getCurrentISODateTimeStringZulu() + "Z";
- String tDir = EDStatic.fullTestCacheDirectory;
- String fileDir = Path.of(EDDGridFromNcFilesUnpackedTests.class.getResource("/data/unpacked/").toURI())
- .toString()
- + "/";
- String fileName1 = "A2003001.L3m_DAY_POC_poc_4km.nc";
- String fileName2 = "A2016241.L3m_DAY_POC_poc_4km.nc";
- Variable var;
- Attributes atts;
- Array array;
- PrimitiveArray pa;
- boolean oDebugMode = NcHelper.debugMode;
- NcHelper.debugMode = true;
-
- // **** fileName1 -- not packed data: poc is float
- // DumpString
- results = NcHelper.ncdump(fileDir + fileName1, "-h");
- expected = "netcdf A2003001.L3m_DAY_POC_poc_4km.nc {\n" +
- " dimensions:\n" +
- " lon = 8640;\n" +
- " eightbitcolor = 256;\n" +
- " rgb = 3;\n" +
- " lat = 4320;\n" +
- " variables:\n" +
- " float poc(lat=4320, lon=8640);\n" +
- " :long_name = \"Particulate Organic Carbon, D. Stramski, 2007 (443/555 version)\";\n" +
- " :units = \"mg m^-3\";\n" +
- " :standard_name = \"mole_concentration_of_particulate_organic_carbon_in_sea_water\";\n" +
- " :_FillValue = -32767.0f; // float\n" +
- " :valid_min = 0.0f; // float\n" +
- " :valid_max = 1000.0f; // float\n" +
- " :display_scale = \"log\";\n" +
- " :display_min = 10.0; // double\n" +
- " :display_max = 1000.0; // double\n" +
- " :scale_factor = 1.0f; // float\n" +
- " :add_offset = 0.0f; // float\n" +
- " :reference = \"Stramski, D., et al. \\\"Relationships between the surface concentration of particulate organic carbon and optical properties in the eastern South Pacific and eastern Atlantic Oceans.\\\" Biogeosciences 5.1 (2008): 171-201.\";\n"
- +
- " :_ChunkSizes = 64U, 64U; // uint\n" +
- "\n" +
- " float lon(lon=8640);\n" +
- " :long_name = \"Longitude\";\n" +
- " :units = \"degree_east\";\n" +
- " :_FillValue = -32767.0f; // float\n" +
- " :valid_min = -180.0f; // float\n" +
- " :valid_max = 180.0f; // float\n" +
- "\n" +
- " ubyte palette(rgb=3, eightbitcolor=256);\n" +
- // " :_FillValue = -1UB; // byte\n" + //gone with
- // " :_Unsigned = \"true\";\n" +
- "\n" +
- " float lat(lat=4320);\n" +
- " :long_name = \"Latitude\";\n" +
- " :units = \"degree_north\";\n" +
- " :_FillValue = -32767.0f; // float\n" +
- " :valid_min = -90.0f; // float\n" +
- " :valid_max = 90.0f; // float\n";
- Test.ensureEqual(results.substring(0, expected.length()), expected, "results=\n" + results);
-
- NetcdfFile ncFile = NcHelper.openFile(fileDir + fileName1);
- try {
-
- // lon
- var = ncFile.findVariable("lon");
- atts = new Attributes();
- NcHelper.getVariableAttributes(var, atts);
- results = atts.toString();
- expected = " _FillValue=-32767.0f\n" +
- " long_name=Longitude\n" +
- " units=degree_east\n" + // what's in the file
- " valid_max=180.0f\n" +
- " valid_min=-180.0f\n";
- Test.ensureEqual(results, expected, "results=\n" + results);
-
- Units2.unpackVariableAttributes(atts, var.getFullName(), NcHelper.getElementPAType(var));
- results = atts.toString();
- expected = " _FillValue=NaNf\n" + // converted to PA standard mv
- " long_name=Longitude\n" +
- " units=degrees_east\n" + // standardized to degrees_east
- " valid_max=180.0f\n" +
- " valid_min=-180.0f\n";
- Test.ensureEqual(results, expected, "results=\n" + results);
-
- // palette
- var = ncFile.findVariable("palette");
- atts = new Attributes();
- NcHelper.getVariableAttributes(var, atts);
- results = atts.toString();
- expected =
- // " _FillValue=-1b\n" +
- // " _Unsigned=true\n"; //disappeared w netcdf-java 5.2, so I added back in with
- // code in getVariableAttributes. 2020 now gone again because it is just a part
- // of the dataType
- "";
- Test.ensureEqual(results, expected, "results=\n" + results);
-
- Units2.unpackVariableAttributes(atts, var.getFullName(), NcHelper.getElementPAType(var));
- results = atts.toString();
- expected = "";
- // " _FillValue=32767s\n"; //byte -> short //converted to PA standard mv
- // " _Unsigned=true\n";
- Test.ensureEqual(results, expected, "results=\n" + results);
-
- // palette as unsigned byte
- pa = NcHelper.getPrimitiveArray(
- var.read(new int[] { 0, 0 }, new int[] { 1, 10 }), true, true); // origin, shape,
- // buildStringFromChar
- Test.ensureEqual(pa.elementTypeString(), "ubyte", "");
- results = pa.toString();
- expected = "147, 0, 108, 144, 0, 111, 141, 0, 114, 138";
- Test.ensureEqual(results, expected, "results=\n" + results);
-
- // palette ubyte
- pa = NcHelper.unpackPA(var, pa, true, true); // lookForStringTimes, lookForUnsigned
- Test.ensureEqual(pa.elementTypeString(), "ubyte", "");
- results = pa.toString();
- expected = "147, 0, 108, 144, 0, 111, 141, 0, 114, 138"; // unsigned
- Test.ensureEqual(results, expected, "results=\n" + results);
-
- // poc
- var = ncFile.findVariable("poc");
- atts = new Attributes();
- NcHelper.getVariableAttributes(var, atts);
- results = atts.toString();
- expected = " _ChunkSizes=64ui,64ui\n" +
- " _FillValue=-32767.0f\n" +
- " add_offset=0.0f\n" +
- " display_max=1000.0d\n" +
- " display_min=10.0d\n" +
- " display_scale=log\n" +
- " long_name=\"Particulate Organic Carbon, D. Stramski, 2007 (443/555 version)\"\n" +
- " reference=\"Stramski, D., et al. \"\"Relationships between the surface concentration of particulate organic carbon and optical properties in the eastern South Pacific and eastern Atlantic Oceans.\"\" Biogeosciences 5.1 (2008): 171-201.\"\n"
- +
- " scale_factor=1.0f\n" +
- " standard_name=mole_concentration_of_particulate_organic_carbon_in_sea_water\n" +
- " units=mg m^-3\n" +
- " valid_max=1000.0f\n" +
- " valid_min=0.0f\n";
- Test.ensureEqual(results, expected, "results=\n" + results);
-
- Units2.unpackVariableAttributes(atts, var.getFullName(), NcHelper.getElementPAType(var));
- results = atts.toString();
- expected = " _ChunkSizes=64ui,64ui\n" +
- " _FillValue=NaNf\n" + // standardized
- // " add_offset=0.0f\n" + //removed
- " display_max=1000.0d\n" +
- " display_min=10.0d\n" +
- " display_scale=log\n" +
- " long_name=\"Particulate Organic Carbon, D. Stramski, 2007 (443/555 version)\"\n" +
- " reference=\"Stramski, D., et al. \"\"Relationships between the surface concentration of particulate organic carbon and optical properties in the eastern South Pacific and eastern Atlantic Oceans.\"\" Biogeosciences 5.1 (2008): 171-201.\"\n"
- +
- // " scale_factor=1.0f\n" + //removed
- " standard_name=mole_concentration_of_particulate_organic_carbon_in_sea_water\n" +
- " units=mg m-3\n" +
- " valid_max=1000.0f\n" +
- " valid_min=0.0f\n";
- Test.ensureEqual(results, expected, "results=\n" + results);
-
- // poc as packed values (shorts)
- pa = NcHelper.getPrimitiveArray(
- var.read(new Section("(0:4100:1000,0:8100:1000)")), true, false); // start:end:stride,
- // buildStringFromChar, isUnsigned
- Test.ensureEqual(pa.elementTypeString(), "float", "");
- results = pa.toString();
- expected = "-32767.0, -32767.0, -32767.0, -32767.0, -32767.0, -32767.0, -32767.0, -32767.0, " +
- "-32767.0, -32767.0, -32767.0, -32767.0, -32767.0, -32767.0, -32767.0, -32767.0, " +
- "-32767.0, -32767.0, -32767.0, -32767.0, -32767.0, -32767.0, -32767.0, -32767.0, " +
- "-32767.0, -32767.0, 29.476826, -32767.0, -32767.0, -32767.0, 431.7499, -32767.0, " +
- "36.19993, -32767.0, -32767.0, -32767.0, -32767.0, -32767.0, -32767.0, -32767.0, " +
- "-32767.0, -32767.0, -32767.0, -32767.0, -32767.0";
- Test.ensureEqual(results, expected, "results=\n" + results);
-
- // poc as unpacked values (float)
- pa = NcHelper.unpackPA(var, pa, true, true); // lookForStringTimes, lookForUnsigned
- Test.ensureEqual(pa.elementTypeString(), "float", "");
- results = pa.toString();
- expected = // standardized mv
- "NaN, NaN, NaN, NaN, NaN, NaN, NaN, NaN, NaN, NaN, NaN, NaN, NaN, NaN, NaN, NaN, " +
- "NaN, NaN, NaN, NaN, NaN, NaN, NaN, NaN, NaN, NaN, 29.476826, NaN, NaN, NaN, " +
- "431.7499, NaN, 36.19993, NaN, NaN, NaN, NaN, NaN, NaN, NaN, NaN, NaN, NaN, NaN, NaN";
- Test.ensureEqual(results, expected, "results=\n" + results);
- } finally {
- try {
- if (ncFile != null)
- ncFile.close();
- } catch (Exception e9) {
- }
- }
-
- // **** fileName2 -- packed data: poc is short
- // DumpString
- results = NcHelper.ncdump(fileDir + fileName2, "-h");
- expected = "netcdf A2016241.L3m_DAY_POC_poc_4km.nc {\n" +
- " dimensions:\n" +
- " eightbitcolor = 256;\n" +
- " rgb = 3;\n" +
- " lat = 4320;\n" +
- " lon = 8640;\n" +
- " variables:\n" +
- " short poc(lat=4320, lon=8640);\n" +
- " :long_name = \"Particulate Organic Carbon, D. Stramski, 2007 (443/555 version)\";\n" +
- " :scale_factor = 0.2f; // float\n" +
- " :add_offset = 6400.0f; // float\n" +
- " :units = \"mg m^-3\";\n" +
- " :standard_name = \"mole_concentration_of_particulate_organic_carbon_in_sea_water\";\n" +
- " :_FillValue = -32767S; // short\n" +
- " :valid_min = -32000S; // short\n" +
- " :valid_max = -27000S; // short\n" +
- " :reference = \"Stramski, D., et al. \\\"Relationships between the surface concentration of particulate organic carbon and optical properties in the eastern South Pacific and eastern Atlantic Oceans.\\\" Biogeosciences 5.1 (2008): 171-201.\";\n"
- +
- " :display_scale = \"log\";\n" +
- " :display_min = 10.0f; // float\n" +
- " :display_max = 1000.0f; // float\n" +
- " :_ChunkSizes = 40U, 1729U; // uint\n" +
- "\n" +
- " ubyte palette(rgb=3, eightbitcolor=256);\n" + // 'u' added in netcdf-java 5.2
- // " :_FillValue = -1UB; // byte\n" + //gone in netcdf-java 5.2
- // " :_Unsigned = \"true\";\n" +
- "\n" +
- " float lat(lat=4320);\n" +
- " :long_name = \"Latitude\";\n" +
- " :units = \"degree_north\";\n" +
- " :_FillValue = -999.0f; // float\n" +
- " :valid_min = -90.0f; // float\n" +
- " :valid_max = 90.0f; // float\n" +
- "\n" +
- " float lon(lon=8640);\n" +
- " :long_name = \"Longitude\";\n" +
- " :units = \"degree_east\";\n" +
- " :_FillValue = -999.0f; // float\n" +
- " :valid_min = -180.0f; // float\n" +
- " :valid_max = 180.0f; // float\n";
- Test.ensureEqual(results.substring(0, expected.length()), expected, "results=\n" + results);
-
- ncFile = NcHelper.openFile(fileDir + fileName2);
- try {
-
- // lon
- var = ncFile.findVariable("lon");
- atts = new Attributes();
- NcHelper.getVariableAttributes(var, atts);
- results = atts.toString();
- expected = " _FillValue=-999.0f\n" +
- " long_name=Longitude\n" +
- " units=degree_east\n" + // in file
- " valid_max=180.0f\n" +
- " valid_min=-180.0f\n";
- Test.ensureEqual(results, expected, "results=\n" + results);
-
- Units2.unpackVariableAttributes(atts, var.getFullName(), NcHelper.getElementPAType(var));
- results = atts.toString();
- expected = " _FillValue=NaNf\n" + // converted to PA standard mv
- " long_name=Longitude\n" +
- " units=degrees_east\n" + // stardardized to 's'
- " valid_max=180.0f\n" +
- " valid_min=-180.0f\n";
- Test.ensureEqual(results, expected, "results=\n" + results);
-
- // palette
- var = ncFile.findVariable("palette");
- atts = new Attributes();
- NcHelper.getVariableAttributes(var, atts);
- results = atts.toString();
- expected = "";
- // " _FillValue=-1b\n";
- Test.ensureEqual(results, expected, "results=\n" + results);
-
- Units2.unpackVariableAttributes(atts, var.getFullName(), NcHelper.getElementPAType(var));
- results = atts.toString();
- expected = "";
- // " _FillValue=32767s\n"; //byte -> short //converted to PA standard mv. gone
- // in netcdf-java 5.2
- Test.ensureEqual(results, expected, "results=\n" + results);
-
- // palette as unsigned byte
- pa = NcHelper.getPrimitiveArray(
- var.read(new int[] { 0, 0 }, new int[] { 1, 10 }), true, true); // origin, shape,
- // buildStringFromChar, isUnsigned
- Test.ensureEqual(pa.elementTypeString(), "ubyte", "");
- results = pa.toString();
- expected = "147, 0, 108, 144, 0, 111, 141, 0, 114, 138";
- Test.ensureEqual(results, expected, "results=\n" + results);
-
- // palette ubyte
- pa = NcHelper.unpackPA(var, pa, true, true); // lookForStringTimes, lookForUnsigned
- Test.ensureEqual(pa.elementTypeString(), "ubyte", "");
- results = pa.toString();
- expected = "147, 0, 108, 144, 0, 111, 141, 0, 114, 138";
- Test.ensureEqual(results, expected, "results=\n" + results);
-
- // poc
- var = ncFile.findVariable("poc");
- atts = new Attributes();
- NcHelper.getVariableAttributes(var, atts);
- results = atts.toString();
- expected = " _ChunkSizes=40ui,1729ui\n" +
- " _FillValue=-32767s\n" +
- " add_offset=6400.0f\n" +
- " display_max=1000.0f\n" +
- " display_min=10.0f\n" +
- " display_scale=log\n" +
- " long_name=\"Particulate Organic Carbon, D. Stramski, 2007 (443/555 version)\"\n" +
- " reference=\"Stramski, D., et al. \"\"Relationships between the surface concentration of particulate organic carbon and optical properties in the eastern South Pacific and eastern Atlantic Oceans.\"\" Biogeosciences 5.1 (2008): 171-201.\"\n"
- +
- " scale_factor=0.2f\n" +
- " standard_name=mole_concentration_of_particulate_organic_carbon_in_sea_water\n" +
- " units=mg m^-3\n" + // in file
- " valid_max=-27000s\n" +
- " valid_min=-32000s\n";
- Test.ensureEqual(results, expected, "results=\n" + results);
-
- Units2.unpackVariableAttributes(atts, var.getFullName(), NcHelper.getElementPAType(var));
- results = atts.toString();
- expected = " _ChunkSizes=40ui,1729ui\n" +
- " _FillValue=NaNf\n" + // standardized
- // " add_offset=6400.0\n" + //removed
- " display_max=1000.0f\n" +
- " display_min=10.0f\n" +
- " display_scale=log\n" +
- " long_name=\"Particulate Organic Carbon, D. Stramski, 2007 (443/555 version)\"\n" +
- " reference=\"Stramski, D., et al. \"\"Relationships between the surface concentration of particulate organic carbon and optical properties in the eastern South Pacific and eastern Atlantic Oceans.\"\" Biogeosciences 5.1 (2008): 171-201.\"\n"
- +
- // " scale_factor=0.2f\n" + removed
- " standard_name=mole_concentration_of_particulate_organic_carbon_in_sea_water\n" +
- " units=mg m-3\n" + // standardized
- " valid_max=1000.0f\n" + // unpacked
- " valid_min=0.0f\n"; // unpacked
- Test.ensureEqual(results, expected, "results=\n" + results);
-
- // poc as packed values (shorts)
- pa = NcHelper.getPrimitiveArray( // odd start to catch some data, not just mv
- var.read(new Section("(70:4100:1000,70:8100:1000)")), true, NcHelper.isUnsigned(var)); // start:end:stride,
- // buildStringFromChar
- Test.ensureEqual(pa.elementTypeString(), "short", "");
- results = pa.toString();
- expected = "-32767, -32767, -32767, -32767, -32767, -32767, -32767, -32767, -32767, -32767, " +
- "-32767, -32767, -31518, -32767, -31186, -32767, -32767, -31609, -32767, -32767, " +
- "-32767, -32767, -32767, -32767, -32767, -32767, -31867, -32767, -32767, -32767, " +
- "-32767, -32767, -32767, -32767, -32767, -32767, -32767, -32767, -32767, -32767, " +
- "-32767, -32767, -32767, -32767, -32767";
- Test.ensureEqual(results, expected, "results=\n" + results);
-
- // poc as unpacked values (float)
- pa = NcHelper.unpackPA(var, pa, true, true); // lookForStringTimes, lookForUnsigned
- Test.ensureEqual(pa.elementTypeString(), "float", "");
- results = pa.toString();
- expected = "NaN, NaN, NaN, NaN, NaN, NaN, NaN, NaN, NaN, NaN, NaN, NaN, 96.4, NaN, 162.8, " +
- "NaN, NaN, 78.2, NaN, NaN, NaN, NaN, NaN, NaN, NaN, NaN, 26.6, NaN, NaN, NaN, " +
- "NaN, NaN, NaN, NaN, NaN, NaN, NaN, NaN, NaN, NaN, NaN, NaN, NaN, NaN, NaN";
- Test.ensureEqual(results, expected, "results=\n" + results);
- } finally {
- try {
- if (ncFile != null)
- ncFile.close();
- } catch (Exception e9) {
- }
- }
-
- // ***** test erdMPOC1day dataset
- EDDGrid eddGrid = (EDDGrid) EDDTestDataset.geterdMPOC1day();
-
- // .das
- tName = eddGrid.makeNewFileForDapQuery(language, null, null, "",
- tDir, eddGrid.className(), ".das");
- results = File2.readFromFile88591(tDir + tName)[1];
- expected = "Attributes {\n" +
- " time {\n" +
- " String _CoordinateAxisType \"Time\";\n" +
- " Float64 actual_range 1.0414224e+9, 1.472472e+9;\n" +
- " String axis \"T\";\n" +
- " String ioos_category \"Time\";\n" +
- " String long_name \"Centered Time\";\n" +
- " String standard_name \"time\";\n" +
- " String time_origin \"01-JAN-1970 00:00:00\";\n" +
- " String units \"seconds since 1970-01-01T00:00:00Z\";\n" +
- " }\n" +
- " latitude {\n" +
- " String _CoordinateAxisType \"Lat\";\n" +
- " Float32 _FillValue NaN;\n" +
- " Float32 actual_range -89.97918, 89.97916;\n" + // a test of descending lat axis
- " String axis \"Y\";\n" +
- " String ioos_category \"Location\";\n" +
- " String long_name \"Latitude\";\n" +
- " String standard_name \"latitude\";\n" +
- " String units \"degrees_north\";\n" +
- " Float32 valid_max 90.0;\n" +
- " Float32 valid_min -90.0;\n" +
- " }\n" +
- " longitude {\n" +
- " String _CoordinateAxisType \"Lon\";\n" +
- " Float32 _FillValue NaN;\n" +
- " Float32 actual_range -179.9792, 179.9792;\n" +
- " String axis \"X\";\n" +
- " String ioos_category \"Location\";\n" +
- " String long_name \"Longitude\";\n" +
- " String standard_name \"longitude\";\n" +
- " String units \"degrees_east\";\n" +
- " Float32 valid_max 180.0;\n" +
- " Float32 valid_min -180.0;\n" +
- " }\n" +
- " poc {\n" +
- " Float32 _FillValue NaN;\n" +
- " Float64 colorBarMaximum 1000.0;\n" +
- " Float64 colorBarMinimum 10.0;\n" +
- " String colorBarScale \"Log\";\n" +
- " String ioos_category \"Ocean Color\";\n" +
- " String long_name \"Particulate Organic Carbon, D. Stramski, 2007 (443/555 version)\";\n" +
- " String references \"Stramski, D., et al. \\\"Relationships between the surface concentration of particulate organic carbon and optical properties in the eastern South Pacific and eastern Atlantic Oceans.\\\" Biogeosciences 5.1 (2008): 171-201.\";\n"
- +
- " String standard_name \"mole_concentration_of_particulate_organic_carbon_in_sea_water\";\n" +
- " String units \"mg m-3\";\n" +
- " Float32 valid_max 1000.0;\n" +
- " Float32 valid_min 0.0;\n" +
- " }\n" +
- " NC_GLOBAL {\n" +
- " String _lastModified \"YYYY-MM-DDThh:mm:ss.000Z\";\n" +
- " String cdm_data_type \"Grid\";\n" +
- " String Conventions \"CF-1.6, COARDS, ACDD-1.3\";\n" +
- " String creator_email \"data@oceancolor.gsfc.nasa.gov\";\n" +
- " String creator_name \"NASA/GSFC/OBPG\";\n" +
- " String creator_type \"group\";\n" +
- " String creator_url \"https://oceandata.sci.gsfc.nasa.gov\";\n" +
- " String date_created \"YYYY-MM-DDThh:mm:ss.000Z\";\n" +
- " Float64 Easternmost_Easting 179.9792;\n" +
- " Float64 geospatial_lat_max 89.97916;\n" +
- " Float64 geospatial_lat_min -89.97918;\n" +
- " String geospatial_lat_units \"degrees_north\";\n" +
- " Float64 geospatial_lon_max 179.9792;\n" +
- " Float64 geospatial_lon_min -179.9792;\n" +
- " String geospatial_lon_units \"degrees_east\";\n" +
- " String grid_mapping_name \"latitude_longitude\";\n" +
- " String history \"Datafiles are downloaded ASAP from https://oceandata.sci.gsfc.nasa.gov/MODIS-Aqua/L3SMI to NOAA NMFS SWFSC ERD.\n"
- +
- "NOAA NMFS SWFSC ERD (erd.data@noaa.gov) uses ERDDAP to add the time variable and slightly modify the metadata.\n"
- +
- "Direct read of HDF4 file through CDM library.\n";
- results = results.replaceAll("String _lastModified \"....-..-..T..:..:...000Z", "String _lastModified \"YYYY-MM-DDThh:mm:ss.000Z");
- results = results.replaceAll("String date_created \"....-..-..T..:..:...000Z",
- "String date_created \"YYYY-MM-DDThh:mm:ss.000Z");
- tResults = results.substring(0, Math.min(results.length(), expected.length()));
- Test.ensureEqual(tResults, expected, "\nresults=\n" + results);
-
- expected =
- // "2015-10-30T18:17:10Z (local files)
- // 2015-10-30T18:17:10Z
- // http://localhost:8080/cwexperimental/griddap/testUInt16File.das";
- " String identifier_product_doi \"10.5067/AQUA/MODIS_OC.2014.0\";\n" +
- " String identifier_product_doi_authority \"https://dx.doi.org\";\n" +
- " String infoUrl \"https://coastwatch.pfeg.noaa.gov/infog/MPOC_las.html\";\n" +
- " String institution \"NASA/GSFC OBPG\";\n" +
- " String instrument \"MODIS\";\n" +
- " String keywords \"443/555, biology, carbon, center, chemistry, chlorophyll, color, concentration, data, Earth Science > Oceans > Ocean Chemistry > Chlorophyll, Earth Science > Oceans > Ocean Optics > Ocean Color, ecology, flight, goddard, group, gsfc, image, imaging, L3, laboratory, level, level-3, mapped, moderate, modis, mole, mole_concentration_of_particulate_organic_carbon_in_sea_water, nasa, ocean, ocean color, oceans, optics, organic, particulate, poc, processing, resolution, sea, seawater, smi, space, spectroradiometer, standard, stramski, time, version, water\";\n"
- +
- " String keywords_vocabulary \"GCMD Science Keywords\";\n" +
- " String l2_flag_names \"ATMFAIL,LAND,HILT,HISATZEN,STRAYLIGHT,CLDICE,COCCOLITH,LOWLW,CHLWARN,CHLFAIL,NAVWARN,MAXAERITER,ATMWARN,HISOLZEN,NAVFAIL,FILTER,HIGLINT\";\n"
- +
- " String license \"https://science.nasa.gov/earth-science/earth-science-data/data-information-policy/\n"
- +
- "The data may be used and redistributed for free but is not intended\n" +
- "for legal use, since it may contain inaccuracies. Neither the data\n" +
- "Contributor, ERD, NOAA, nor the United States Government, nor any\n" +
- "of their employees or contractors, makes any warranty, express or\n" +
- "implied, including warranties of merchantability and fitness for a\n" +
- "particular purpose, or assumes any legal liability for the accuracy,\n" +
- "completeness, or usefulness, of this information.\";\n" +
- " String map_projection \"Equidistant Cylindrical\";\n" +
- " String measure \"Mean\";\n" +
- " String naming_authority \"gov.noaa.pfeg.coastwatch\";\n" +
- " Float64 Northernmost_Northing 89.97916;\n" +
- " String platform \"Aqua\";\n" +
- " String processing_level \"L3 Mapped\";\n" +
- " String processing_version \"VERSION\";\n" +
- " String project \"Ocean Biology Processing Group (NASA/GSFC/OBPG)\";\n" +
- " String publisher_email \"erd.data@noaa.gov\";\n" +
- " String publisher_name \"NOAA NMFS SWFSC ERD\";\n" +
- " String publisher_type \"institution\";\n" +
- " String publisher_url \"https://www.pfeg.noaa.gov\";\n" +
- " String sourceUrl \"(local files)\";\n" +
- " Float64 Southernmost_Northing -89.97918;\n" +
- " String spatialResolution \"4.64 km\";\n" +
- " String standard_name_vocabulary \"CF Standard Name Table v70\";\n" +
- " String summary \"MODIS Aqua, Level-3 Standard Mapped Image (SMI), Global, 4km, Particulate Organic Carbon (POC) (1 Day Composite)\";\n"
- +
- " String temporal_range \"day\";\n" +
- " String testOutOfDate \"now-4days\";\n" +
- " String time_coverage_end \"2016-08-29T12:00:00Z\";\n" +
- " String time_coverage_start \"2003-01-01T12:00:00Z\";\n" +
- " String title \"MODIS Aqua, Level-3 SMI, Global, 4km, Particulate Organic Carbon, 2003-present (1 Day Composite)\";\n"
- +
- " Float64 Westernmost_Easting -179.9792;\n" +
- " }\n" +
- "}\n";
- results = results.replaceAll("String processing_version \\\"[\\w\\d]+(\\.[\\w\\d]+)*\\\"", "String processing_version \\\"VERSION\\\"");
- int tpo = results.indexOf(expected.substring(0, 50));
- Test.ensureTrue(tpo >= 0, "tpo=-1 results=\n" + results);
- Test.ensureEqual(
- results.substring(tpo, Math.min(results.length(), tpo + expected.length())),
- expected, "results=\n" + results);
-
- // .dds dds isn't affected by userDapQuery
- tName = eddGrid.makeNewFileForDapQuery(language, null, null, "",
- tDir, eddGrid.className(), ".dds");
- results = File2.directReadFrom88591File(
- tDir + tName);
- expected = "Dataset {\n" +
- " Float64 time[time = 4];\n" + // 2 unpacked files + 2 packed files
- " Float32 latitude[latitude = 4320];\n" +
- " Float32 longitude[longitude = 8640];\n" +
- " GRID {\n" +
- " ARRAY:\n" +
- " Float32 poc[time = 4][latitude = 4320][longitude = 8640];\n" +
- " MAPS:\n" +
- " Float64 time[time = 4];\n" +
- " Float32 latitude[latitude = 4320];\n" +
- " Float32 longitude[longitude = 8640];\n" +
- " } poc;\n" +
- "} erdMPOC1day;\n";
- Test.ensureEqual(results, expected, "\nresults=\n" + results);
-
- // .csv time values
- userDapQuery = "time";
- tName = eddGrid.makeNewFileForDapQuery(language, null, null, userDapQuery,
- tDir, eddGrid.className() + "time", ".csv");
- results = File2.directReadFrom88591File(
- tDir + tName);
- String2.log(results);
- expected = "time\n" +
- "UTC\n" +
- "2003-01-01T12:00:00Z\n" +
- "2003-01-02T12:00:00Z\n" +
- "2016-08-28T12:00:00Z\n" +
- "2016-08-29T12:00:00Z\n";
- Test.ensureEqual(results, expected, "\nresults=\n" + results);
-
- // .csv poc values
- userDapQuery = "poc[(2003-01-01T12:00:00Z)][0:1000:4000][0:1000:8000]"; // match direct read above
- tName = eddGrid.makeNewFileForDapQuery(language, null, null, userDapQuery,
- tDir, eddGrid.className() + "poc1", ".csv");
- results = File2.directReadFrom88591File(
- tDir + tName);
- String2.log(results);
- expected = "time,latitude,longitude,poc\n" +
- "UTC,degrees_north,degrees_east,mg m-3\n" +
- "2003-01-01T12:00:00Z,89.979164,-179.97917,NaN\n" +
- "2003-01-01T12:00:00Z,89.979164,-138.3125,NaN\n" +
- "2003-01-01T12:00:00Z,89.979164,-96.64583,NaN\n" +
- "2003-01-01T12:00:00Z,89.979164,-54.979168,NaN\n" +
- "2003-01-01T12:00:00Z,89.979164,-13.312495,NaN\n" +
- "2003-01-01T12:00:00Z,89.979164,28.354177,NaN\n" +
- "2003-01-01T12:00:00Z,89.979164,70.020836,NaN\n" +
- "2003-01-01T12:00:00Z,89.979164,111.68752,NaN\n" +
- "2003-01-01T12:00:00Z,89.979164,153.35417,NaN\n" +
- "2003-01-01T12:00:00Z,48.3125,-179.97917,NaN\n" +
- "2003-01-01T12:00:00Z,48.3125,-138.3125,NaN\n" +
- "2003-01-01T12:00:00Z,48.3125,-96.64583,NaN\n" +
- "2003-01-01T12:00:00Z,48.3125,-54.979168,NaN\n" +
- "2003-01-01T12:00:00Z,48.3125,-13.312495,NaN\n" +
- "2003-01-01T12:00:00Z,48.3125,28.354177,NaN\n" +
- "2003-01-01T12:00:00Z,48.3125,70.020836,NaN\n" +
- "2003-01-01T12:00:00Z,48.3125,111.68752,NaN\n" +
- "2003-01-01T12:00:00Z,48.3125,153.35417,NaN\n" +
- "2003-01-01T12:00:00Z,6.6458306,-179.97917,NaN\n" +
- "2003-01-01T12:00:00Z,6.6458306,-138.3125,NaN\n" +
- "2003-01-01T12:00:00Z,6.6458306,-96.64583,NaN\n" +
- "2003-01-01T12:00:00Z,6.6458306,-54.979168,NaN\n" +
- "2003-01-01T12:00:00Z,6.6458306,-13.312495,NaN\n" +
- "2003-01-01T12:00:00Z,6.6458306,28.354177,NaN\n" +
- "2003-01-01T12:00:00Z,6.6458306,70.020836,NaN\n" +
- "2003-01-01T12:00:00Z,6.6458306,111.68752,NaN\n" +
- "2003-01-01T12:00:00Z,6.6458306,153.35417,29.476826\n" +
- "2003-01-01T12:00:00Z,-35.020832,-179.97917,NaN\n" +
- "2003-01-01T12:00:00Z,-35.020832,-138.3125,NaN\n" +
- "2003-01-01T12:00:00Z,-35.020832,-96.64583,NaN\n" +
- "2003-01-01T12:00:00Z,-35.020832,-54.979168,431.7499\n" +
- "2003-01-01T12:00:00Z,-35.020832,-13.312495,NaN\n" +
- "2003-01-01T12:00:00Z,-35.020832,28.354177,36.19993\n" +
- "2003-01-01T12:00:00Z,-35.020832,70.020836,NaN\n" +
- "2003-01-01T12:00:00Z,-35.020832,111.68752,NaN\n" +
- "2003-01-01T12:00:00Z,-35.020832,153.35417,NaN\n" +
- "2003-01-01T12:00:00Z,-76.68751,-179.97917,NaN\n" +
- "2003-01-01T12:00:00Z,-76.68751,-138.3125,NaN\n" +
- "2003-01-01T12:00:00Z,-76.68751,-96.64583,NaN\n" +
- "2003-01-01T12:00:00Z,-76.68751,-54.979168,NaN\n" +
- "2003-01-01T12:00:00Z,-76.68751,-13.312495,NaN\n" +
- "2003-01-01T12:00:00Z,-76.68751,28.354177,NaN\n" +
- "2003-01-01T12:00:00Z,-76.68751,70.020836,NaN\n" +
- "2003-01-01T12:00:00Z,-76.68751,111.68752,NaN\n" +
- "2003-01-01T12:00:00Z,-76.68751,153.35417,NaN\n";
- Test.ensureEqual(results, expected, "\nresults=\n" + results);
-
- // .csv poc values 70:4100:1000,70:8100:1000
- userDapQuery = "poc[(2016-08-28T12:00:00Z)][70:1000:4100][70:1000:8100]"; // match direct read above
- tName = eddGrid.makeNewFileForDapQuery(language, null, null, userDapQuery,
- tDir, eddGrid.className() + "poc2", ".csv");
- results = File2.directReadFrom88591File(
- tDir + tName);
- String2.log(results);
- expected = "time,latitude,longitude,poc\n" +
- "UTC,degrees_north,degrees_east,mg m-3\n" +
- "2016-08-28T12:00:00Z,87.0625,-177.0625,NaN\n" +
- "2016-08-28T12:00:00Z,87.0625,-135.39583,NaN\n" +
- "2016-08-28T12:00:00Z,87.0625,-93.729164,NaN\n" +
- "2016-08-28T12:00:00Z,87.0625,-52.062496,NaN\n" +
- "2016-08-28T12:00:00Z,87.0625,-10.3958235,NaN\n" +
- "2016-08-28T12:00:00Z,87.0625,31.270834,NaN\n" +
- "2016-08-28T12:00:00Z,87.0625,72.93751,NaN\n" +
- "2016-08-28T12:00:00Z,87.0625,114.60418,NaN\n" +
- "2016-08-28T12:00:00Z,87.0625,156.27083,NaN\n" +
- "2016-08-28T12:00:00Z,45.395832,-177.0625,NaN\n" +
- "2016-08-28T12:00:00Z,45.395832,-135.39583,NaN\n" +
- "2016-08-28T12:00:00Z,45.395832,-93.729164,NaN\n" +
- "2016-08-28T12:00:00Z,45.395832,-52.062496,96.4\n" +
- "2016-08-28T12:00:00Z,45.395832,-10.3958235,NaN\n" +
- "2016-08-28T12:00:00Z,45.395832,31.270834,162.8\n" +
- "2016-08-28T12:00:00Z,45.395832,72.93751,NaN\n" +
- "2016-08-28T12:00:00Z,45.395832,114.60418,NaN\n" +
- "2016-08-28T12:00:00Z,45.395832,156.27083,78.2\n" +
- "2016-08-28T12:00:00Z,3.7291667,-177.0625,NaN\n" +
- "2016-08-28T12:00:00Z,3.7291667,-135.39583,NaN\n" +
- "2016-08-28T12:00:00Z,3.7291667,-93.729164,NaN\n" +
- "2016-08-28T12:00:00Z,3.7291667,-52.062496,NaN\n" +
- "2016-08-28T12:00:00Z,3.7291667,-10.3958235,NaN\n" +
- "2016-08-28T12:00:00Z,3.7291667,31.270834,NaN\n" +
- "2016-08-28T12:00:00Z,3.7291667,72.93751,NaN\n" +
- "2016-08-28T12:00:00Z,3.7291667,114.60418,NaN\n" +
- "2016-08-28T12:00:00Z,3.7291667,156.27083,26.6\n" +
- "2016-08-28T12:00:00Z,-37.937504,-177.0625,NaN\n" +
- "2016-08-28T12:00:00Z,-37.937504,-135.39583,NaN\n" +
- "2016-08-28T12:00:00Z,-37.937504,-93.729164,NaN\n" +
- "2016-08-28T12:00:00Z,-37.937504,-52.062496,NaN\n" +
- "2016-08-28T12:00:00Z,-37.937504,-10.3958235,NaN\n" +
- "2016-08-28T12:00:00Z,-37.937504,31.270834,NaN\n" +
- "2016-08-28T12:00:00Z,-37.937504,72.93751,NaN\n" +
- "2016-08-28T12:00:00Z,-37.937504,114.60418,NaN\n" +
- "2016-08-28T12:00:00Z,-37.937504,156.27083,NaN\n" +
- "2016-08-28T12:00:00Z,-79.60418,-177.0625,NaN\n" +
- "2016-08-28T12:00:00Z,-79.60418,-135.39583,NaN\n" +
- "2016-08-28T12:00:00Z,-79.60418,-93.729164,NaN\n" +
- "2016-08-28T12:00:00Z,-79.60418,-52.062496,NaN\n" +
- "2016-08-28T12:00:00Z,-79.60418,-10.3958235,NaN\n" +
- "2016-08-28T12:00:00Z,-79.60418,31.270834,NaN\n" +
- "2016-08-28T12:00:00Z,-79.60418,72.93751,NaN\n" +
- "2016-08-28T12:00:00Z,-79.60418,114.60418,NaN\n" +
- "2016-08-28T12:00:00Z,-79.60418,156.27083,NaN\n";
- Test.ensureEqual(results, expected, "\nresults=\n" + results);
-
- // display an image
- String2.log("\n\n* PNG ");
- String baseName = eddGrid.className() + "_testMissingValue";
- tName = eddGrid.makeNewFileForDapQuery(language, null, null,
- "poc[(2016-08-28T12:00:00Z)][][]",
- Image2Tests.urlToAbsolutePath(Image2Tests.OBS_DIR), baseName, ".png");
- // Test.displayInBrowser("file://" + tDir + tName);
- Image2Tests.testImagesIdentical(
- tName,
- baseName + ".png",
- baseName + "_diff.png");
-
- NcHelper.debugMode = oDebugMode;
+ // **** fileName2 -- packed data: poc is short
+ // DumpString
+ results = NcHelper.ncdump(fileDir + fileName2, "-h");
+ expected =
+ "netcdf A2016241.L3m_DAY_POC_poc_4km.nc {\n"
+ + " dimensions:\n"
+ + " eightbitcolor = 256;\n"
+ + " rgb = 3;\n"
+ + " lat = 4320;\n"
+ + " lon = 8640;\n"
+ + " variables:\n"
+ + " short poc(lat=4320, lon=8640);\n"
+ + " :long_name = \"Particulate Organic Carbon, D. Stramski, 2007 (443/555 version)\";\n"
+ + " :scale_factor = 0.2f; // float\n"
+ + " :add_offset = 6400.0f; // float\n"
+ + " :units = \"mg m^-3\";\n"
+ + " :standard_name = \"mole_concentration_of_particulate_organic_carbon_in_sea_water\";\n"
+ + " :_FillValue = -32767S; // short\n"
+ + " :valid_min = -32000S; // short\n"
+ + " :valid_max = -27000S; // short\n"
+ + " :reference = \"Stramski, D., et al. \\\"Relationships between the surface concentration of particulate organic carbon and optical properties in the eastern South Pacific and eastern Atlantic Oceans.\\\" Biogeosciences 5.1 (2008): 171-201.\";\n"
+ + " :display_scale = \"log\";\n"
+ + " :display_min = 10.0f; // float\n"
+ + " :display_max = 1000.0f; // float\n"
+ + " :_ChunkSizes = 40U, 1729U; // uint\n"
+ + "\n"
+ + " ubyte palette(rgb=3, eightbitcolor=256);\n"
+ + // 'u' added in netcdf-java 5.2
+ // " :_FillValue = -1UB; // byte\n" + //gone in netcdf-java 5.2
+ // " :_Unsigned = \"true\";\n" +
+ "\n"
+ + " float lat(lat=4320);\n"
+ + " :long_name = \"Latitude\";\n"
+ + " :units = \"degree_north\";\n"
+ + " :_FillValue = -999.0f; // float\n"
+ + " :valid_min = -90.0f; // float\n"
+ + " :valid_max = 90.0f; // float\n"
+ + "\n"
+ + " float lon(lon=8640);\n"
+ + " :long_name = \"Longitude\";\n"
+ + " :units = \"degree_east\";\n"
+ + " :_FillValue = -999.0f; // float\n"
+ + " :valid_min = -180.0f; // float\n"
+ + " :valid_max = 180.0f; // float\n";
+ Test.ensureEqual(results.substring(0, expected.length()), expected, "results=\n" + results);
+
+ ncFile = NcHelper.openFile(fileDir + fileName2);
+ try {
+
+ // lon
+ var = ncFile.findVariable("lon");
+ atts = new Attributes();
+ NcHelper.getVariableAttributes(var, atts);
+ results = atts.toString();
+ expected =
+ " _FillValue=-999.0f\n"
+ + " long_name=Longitude\n"
+ + " units=degree_east\n"
+ + // in file
+ " valid_max=180.0f\n"
+ + " valid_min=-180.0f\n";
+ Test.ensureEqual(results, expected, "results=\n" + results);
+
+ Units2.unpackVariableAttributes(atts, var.getFullName(), NcHelper.getElementPAType(var));
+ results = atts.toString();
+ expected =
+ " _FillValue=NaNf\n"
+ + // converted to PA standard mv
+ " long_name=Longitude\n"
+ + " units=degrees_east\n"
+ + // stardardized to 's'
+ " valid_max=180.0f\n"
+ + " valid_min=-180.0f\n";
+ Test.ensureEqual(results, expected, "results=\n" + results);
+
+ // palette
+ var = ncFile.findVariable("palette");
+ atts = new Attributes();
+ NcHelper.getVariableAttributes(var, atts);
+ results = atts.toString();
+ expected = "";
+ // " _FillValue=-1b\n";
+ Test.ensureEqual(results, expected, "results=\n" + results);
+
+ Units2.unpackVariableAttributes(atts, var.getFullName(), NcHelper.getElementPAType(var));
+ results = atts.toString();
+ expected = "";
+ // " _FillValue=32767s\n"; //byte -> short //converted to PA standard mv. gone
+ // in netcdf-java 5.2
+ Test.ensureEqual(results, expected, "results=\n" + results);
+
+ // palette as unsigned byte
+ pa =
+ NcHelper.getPrimitiveArray(
+ var.read(new int[] {0, 0}, new int[] {1, 10}), true, true); // origin, shape,
+ // buildStringFromChar, isUnsigned
+ Test.ensureEqual(pa.elementTypeString(), "ubyte", "");
+ results = pa.toString();
+ expected = "147, 0, 108, 144, 0, 111, 141, 0, 114, 138";
+ Test.ensureEqual(results, expected, "results=\n" + results);
+
+ // palette ubyte
+ pa = NcHelper.unpackPA(var, pa, true, true); // lookForStringTimes, lookForUnsigned
+ Test.ensureEqual(pa.elementTypeString(), "ubyte", "");
+ results = pa.toString();
+ expected = "147, 0, 108, 144, 0, 111, 141, 0, 114, 138";
+ Test.ensureEqual(results, expected, "results=\n" + results);
+
+ // poc
+ var = ncFile.findVariable("poc");
+ atts = new Attributes();
+ NcHelper.getVariableAttributes(var, atts);
+ results = atts.toString();
+ expected =
+ " _ChunkSizes=40ui,1729ui\n"
+ + " _FillValue=-32767s\n"
+ + " add_offset=6400.0f\n"
+ + " display_max=1000.0f\n"
+ + " display_min=10.0f\n"
+ + " display_scale=log\n"
+ + " long_name=\"Particulate Organic Carbon, D. Stramski, 2007 (443/555 version)\"\n"
+ + " reference=\"Stramski, D., et al. \"\"Relationships between the surface concentration of particulate organic carbon and optical properties in the eastern South Pacific and eastern Atlantic Oceans.\"\" Biogeosciences 5.1 (2008): 171-201.\"\n"
+ + " scale_factor=0.2f\n"
+ + " standard_name=mole_concentration_of_particulate_organic_carbon_in_sea_water\n"
+ + " units=mg m^-3\n"
+ + // in file
+ " valid_max=-27000s\n"
+ + " valid_min=-32000s\n";
+ Test.ensureEqual(results, expected, "results=\n" + results);
+
+ Units2.unpackVariableAttributes(atts, var.getFullName(), NcHelper.getElementPAType(var));
+ results = atts.toString();
+ expected =
+ " _ChunkSizes=40ui,1729ui\n"
+ + " _FillValue=NaNf\n"
+ + // standardized
+ // " add_offset=6400.0\n" + //removed
+ " display_max=1000.0f\n"
+ + " display_min=10.0f\n"
+ + " display_scale=log\n"
+ + " long_name=\"Particulate Organic Carbon, D. Stramski, 2007 (443/555 version)\"\n"
+ + " reference=\"Stramski, D., et al. \"\"Relationships between the surface concentration of particulate organic carbon and optical properties in the eastern South Pacific and eastern Atlantic Oceans.\"\" Biogeosciences 5.1 (2008): 171-201.\"\n"
+ +
+ // " scale_factor=0.2f\n" + removed
+ " standard_name=mole_concentration_of_particulate_organic_carbon_in_sea_water\n"
+ + " units=mg m-3\n"
+ + // standardized
+ " valid_max=1000.0f\n"
+ + // unpacked
+ " valid_min=0.0f\n"; // unpacked
+ Test.ensureEqual(results, expected, "results=\n" + results);
+
+ // poc as packed values (shorts)
+ pa =
+ NcHelper.getPrimitiveArray( // odd start to catch some data, not just mv
+ var.read(new Section("(70:4100:1000,70:8100:1000)")),
+ true,
+ NcHelper.isUnsigned(var)); // start:end:stride,
+ // buildStringFromChar
+ Test.ensureEqual(pa.elementTypeString(), "short", "");
+ results = pa.toString();
+ expected =
+ "-32767, -32767, -32767, -32767, -32767, -32767, -32767, -32767, -32767, -32767, "
+ + "-32767, -32767, -31518, -32767, -31186, -32767, -32767, -31609, -32767, -32767, "
+ + "-32767, -32767, -32767, -32767, -32767, -32767, -31867, -32767, -32767, -32767, "
+ + "-32767, -32767, -32767, -32767, -32767, -32767, -32767, -32767, -32767, -32767, "
+ + "-32767, -32767, -32767, -32767, -32767";
+ Test.ensureEqual(results, expected, "results=\n" + results);
+
+ // poc as unpacked values (float)
+ pa = NcHelper.unpackPA(var, pa, true, true); // lookForStringTimes, lookForUnsigned
+ Test.ensureEqual(pa.elementTypeString(), "float", "");
+ results = pa.toString();
+ expected =
+ "NaN, NaN, NaN, NaN, NaN, NaN, NaN, NaN, NaN, NaN, NaN, NaN, 96.4, NaN, 162.8, "
+ + "NaN, NaN, 78.2, NaN, NaN, NaN, NaN, NaN, NaN, NaN, NaN, 26.6, NaN, NaN, NaN, "
+ + "NaN, NaN, NaN, NaN, NaN, NaN, NaN, NaN, NaN, NaN, NaN, NaN, NaN, NaN, NaN";
+ Test.ensureEqual(results, expected, "results=\n" + results);
+ } finally {
+ try {
+ if (ncFile != null) ncFile.close();
+ } catch (Exception e9) {
+ }
}
+
+ // ***** test erdMPOC1day dataset
+ EDDGrid eddGrid = (EDDGrid) EDDTestDataset.geterdMPOC1day();
+
+ // .das
+ tName =
+ eddGrid.makeNewFileForDapQuery(language, null, null, "", tDir, eddGrid.className(), ".das");
+ results = File2.readFromFile88591(tDir + tName)[1];
+ expected =
+ "Attributes {\n"
+ + " time {\n"
+ + " String _CoordinateAxisType \"Time\";\n"
+ + " Float64 actual_range 1.0414224e+9, 1.472472e+9;\n"
+ + " String axis \"T\";\n"
+ + " String ioos_category \"Time\";\n"
+ + " String long_name \"Centered Time\";\n"
+ + " String standard_name \"time\";\n"
+ + " String time_origin \"01-JAN-1970 00:00:00\";\n"
+ + " String units \"seconds since 1970-01-01T00:00:00Z\";\n"
+ + " }\n"
+ + " latitude {\n"
+ + " String _CoordinateAxisType \"Lat\";\n"
+ + " Float32 _FillValue NaN;\n"
+ + " Float32 actual_range -89.97918, 89.97916;\n"
+ + // a test of descending lat axis
+ " String axis \"Y\";\n"
+ + " String ioos_category \"Location\";\n"
+ + " String long_name \"Latitude\";\n"
+ + " String standard_name \"latitude\";\n"
+ + " String units \"degrees_north\";\n"
+ + " Float32 valid_max 90.0;\n"
+ + " Float32 valid_min -90.0;\n"
+ + " }\n"
+ + " longitude {\n"
+ + " String _CoordinateAxisType \"Lon\";\n"
+ + " Float32 _FillValue NaN;\n"
+ + " Float32 actual_range -179.9792, 179.9792;\n"
+ + " String axis \"X\";\n"
+ + " String ioos_category \"Location\";\n"
+ + " String long_name \"Longitude\";\n"
+ + " String standard_name \"longitude\";\n"
+ + " String units \"degrees_east\";\n"
+ + " Float32 valid_max 180.0;\n"
+ + " Float32 valid_min -180.0;\n"
+ + " }\n"
+ + " poc {\n"
+ + " Float32 _FillValue NaN;\n"
+ + " Float64 colorBarMaximum 1000.0;\n"
+ + " Float64 colorBarMinimum 10.0;\n"
+ + " String colorBarScale \"Log\";\n"
+ + " String ioos_category \"Ocean Color\";\n"
+ + " String long_name \"Particulate Organic Carbon, D. Stramski, 2007 (443/555 version)\";\n"
+ + " String references \"Stramski, D., et al. \\\"Relationships between the surface concentration of particulate organic carbon and optical properties in the eastern South Pacific and eastern Atlantic Oceans.\\\" Biogeosciences 5.1 (2008): 171-201.\";\n"
+ + " String standard_name \"mole_concentration_of_particulate_organic_carbon_in_sea_water\";\n"
+ + " String units \"mg m-3\";\n"
+ + " Float32 valid_max 1000.0;\n"
+ + " Float32 valid_min 0.0;\n"
+ + " }\n"
+ + " NC_GLOBAL {\n"
+ + " String _lastModified \"YYYY-MM-DDThh:mm:ss.000Z\";\n"
+ + " String cdm_data_type \"Grid\";\n"
+ + " String Conventions \"CF-1.6, COARDS, ACDD-1.3\";\n"
+ + " String creator_email \"data@oceancolor.gsfc.nasa.gov\";\n"
+ + " String creator_name \"NASA/GSFC/OBPG\";\n"
+ + " String creator_type \"group\";\n"
+ + " String creator_url \"https://oceandata.sci.gsfc.nasa.gov\";\n"
+ + " String date_created \"YYYY-MM-DDThh:mm:ss.000Z\";\n"
+ + " Float64 Easternmost_Easting 179.9792;\n"
+ + " Float64 geospatial_lat_max 89.97916;\n"
+ + " Float64 geospatial_lat_min -89.97918;\n"
+ + " String geospatial_lat_units \"degrees_north\";\n"
+ + " Float64 geospatial_lon_max 179.9792;\n"
+ + " Float64 geospatial_lon_min -179.9792;\n"
+ + " String geospatial_lon_units \"degrees_east\";\n"
+ + " String grid_mapping_name \"latitude_longitude\";\n"
+ + " String history \"Datafiles are downloaded ASAP from https://oceandata.sci.gsfc.nasa.gov/MODIS-Aqua/L3SMI to NOAA NMFS SWFSC ERD.\n"
+ + "NOAA NMFS SWFSC ERD (erd.data@noaa.gov) uses ERDDAP to add the time variable and slightly modify the metadata.\n"
+ + "Direct read of HDF4 file through CDM library.\n";
+ results =
+ results.replaceAll(
+ "String _lastModified \"....-..-..T..:..:...000Z",
+ "String _lastModified \"YYYY-MM-DDThh:mm:ss.000Z");
+ results =
+ results.replaceAll(
+ "String date_created \"....-..-..T..:..:...000Z",
+ "String date_created \"YYYY-MM-DDThh:mm:ss.000Z");
+ tResults = results.substring(0, Math.min(results.length(), expected.length()));
+ Test.ensureEqual(tResults, expected, "\nresults=\n" + results);
+
+ expected =
+ // "2015-10-30T18:17:10Z (local files)
+ // 2015-10-30T18:17:10Z
+ // http://localhost:8080/cwexperimental/griddap/testUInt16File.das";
+ " String identifier_product_doi \"10.5067/AQUA/MODIS_OC.2014.0\";\n"
+ + " String identifier_product_doi_authority \"https://dx.doi.org\";\n"
+ + " String infoUrl \"https://coastwatch.pfeg.noaa.gov/infog/MPOC_las.html\";\n"
+ + " String institution \"NASA/GSFC OBPG\";\n"
+ + " String instrument \"MODIS\";\n"
+ + " String keywords \"443/555, biology, carbon, center, chemistry, chlorophyll, color, concentration, data, Earth Science > Oceans > Ocean Chemistry > Chlorophyll, Earth Science > Oceans > Ocean Optics > Ocean Color, ecology, flight, goddard, group, gsfc, image, imaging, L3, laboratory, level, level-3, mapped, moderate, modis, mole, mole_concentration_of_particulate_organic_carbon_in_sea_water, nasa, ocean, ocean color, oceans, optics, organic, particulate, poc, processing, resolution, sea, seawater, smi, space, spectroradiometer, standard, stramski, time, version, water\";\n"
+ + " String keywords_vocabulary \"GCMD Science Keywords\";\n"
+ + " String l2_flag_names \"ATMFAIL,LAND,HILT,HISATZEN,STRAYLIGHT,CLDICE,COCCOLITH,LOWLW,CHLWARN,CHLFAIL,NAVWARN,MAXAERITER,ATMWARN,HISOLZEN,NAVFAIL,FILTER,HIGLINT\";\n"
+ + " String license \"https://science.nasa.gov/earth-science/earth-science-data/data-information-policy/\n"
+ + "The data may be used and redistributed for free but is not intended\n"
+ + "for legal use, since it may contain inaccuracies. Neither the data\n"
+ + "Contributor, ERD, NOAA, nor the United States Government, nor any\n"
+ + "of their employees or contractors, makes any warranty, express or\n"
+ + "implied, including warranties of merchantability and fitness for a\n"
+ + "particular purpose, or assumes any legal liability for the accuracy,\n"
+ + "completeness, or usefulness, of this information.\";\n"
+ + " String map_projection \"Equidistant Cylindrical\";\n"
+ + " String measure \"Mean\";\n"
+ + " String naming_authority \"gov.noaa.pfeg.coastwatch\";\n"
+ + " Float64 Northernmost_Northing 89.97916;\n"
+ + " String platform \"Aqua\";\n"
+ + " String processing_level \"L3 Mapped\";\n"
+ + " String processing_version \"VERSION\";\n"
+ + " String project \"Ocean Biology Processing Group (NASA/GSFC/OBPG)\";\n"
+ + " String publisher_email \"erd.data@noaa.gov\";\n"
+ + " String publisher_name \"NOAA NMFS SWFSC ERD\";\n"
+ + " String publisher_type \"institution\";\n"
+ + " String publisher_url \"https://www.pfeg.noaa.gov\";\n"
+ + " String sourceUrl \"(local files)\";\n"
+ + " Float64 Southernmost_Northing -89.97918;\n"
+ + " String spatialResolution \"X.X km\";\n"
+ + " String standard_name_vocabulary \"CF Standard Name Table v70\";\n"
+ + " String summary \"MODIS Aqua, Level-3 Standard Mapped Image (SMI), Global, 4km, Particulate Organic Carbon (POC) (1 Day Composite)\";\n"
+ + " String temporal_range \"day\";\n"
+ + " String testOutOfDate \"now-4days\";\n"
+ + " String time_coverage_end \"2016-08-29T12:00:00Z\";\n"
+ + " String time_coverage_start \"2003-01-01T12:00:00Z\";\n"
+ + " String title \"MODIS Aqua, Level-3 SMI, Global, 4km, Particulate Organic Carbon, 2003-present (1 Day Composite)\";\n"
+ + " Float64 Westernmost_Easting -179.9792;\n"
+ + " }\n"
+ + "}\n";
+ results =
+ results.replaceAll(
+ "String processing_version \\\"[\\w\\d]+(\\.[\\w\\d]+)*\\\"",
+ "String processing_version \\\"VERSION\\\"");
+ results =
+ results.replaceAll(
+ "String spatialResolution \\\"[0-9]+.[0-9]+ km\\\"",
+ "String spatialResolution \"X.X km\"");
+ int tpo = results.indexOf(expected.substring(0, 50));
+ Test.ensureTrue(tpo >= 0, "tpo=-1 results=\n" + results);
+ Test.ensureEqual(
+ results.substring(tpo, Math.min(results.length(), tpo + expected.length())),
+ expected,
+ "results=\n" + results);
+
+ // .dds dds isn't affected by userDapQuery
+ tName =
+ eddGrid.makeNewFileForDapQuery(language, null, null, "", tDir, eddGrid.className(), ".dds");
+ results = File2.directReadFrom88591File(tDir + tName);
+ expected =
+ "Dataset {\n"
+ + " Float64 time[time = 4];\n"
+ + // 2 unpacked files + 2 packed files
+ " Float32 latitude[latitude = 4320];\n"
+ + " Float32 longitude[longitude = 8640];\n"
+ + " GRID {\n"
+ + " ARRAY:\n"
+ + " Float32 poc[time = 4][latitude = 4320][longitude = 8640];\n"
+ + " MAPS:\n"
+ + " Float64 time[time = 4];\n"
+ + " Float32 latitude[latitude = 4320];\n"
+ + " Float32 longitude[longitude = 8640];\n"
+ + " } poc;\n"
+ + "} erdMPOC1day;\n";
+ Test.ensureEqual(results, expected, "\nresults=\n" + results);
+
+ // .csv time values
+ userDapQuery = "time";
+ tName =
+ eddGrid.makeNewFileForDapQuery(
+ language, null, null, userDapQuery, tDir, eddGrid.className() + "time", ".csv");
+ results = File2.directReadFrom88591File(tDir + tName);
+ String2.log(results);
+ expected =
+ "time\n"
+ + "UTC\n"
+ + "2003-01-01T12:00:00Z\n"
+ + "2003-01-02T12:00:00Z\n"
+ + "2016-08-28T12:00:00Z\n"
+ + "2016-08-29T12:00:00Z\n";
+ Test.ensureEqual(results, expected, "\nresults=\n" + results);
+
+ // .csv poc values
+ userDapQuery =
+ "poc[(2003-01-01T12:00:00Z)][0:1000:4000][0:1000:8000]"; // match direct read above
+ tName =
+ eddGrid.makeNewFileForDapQuery(
+ language, null, null, userDapQuery, tDir, eddGrid.className() + "poc1", ".csv");
+ results = File2.directReadFrom88591File(tDir + tName);
+ String2.log(results);
+ expected =
+ "time,latitude,longitude,poc\n"
+ + "UTC,degrees_north,degrees_east,mg m-3\n"
+ + "2003-01-01T12:00:00Z,89.979164,-179.97917,NaN\n"
+ + "2003-01-01T12:00:00Z,89.979164,-138.3125,NaN\n"
+ + "2003-01-01T12:00:00Z,89.979164,-96.64583,NaN\n"
+ + "2003-01-01T12:00:00Z,89.979164,-54.979168,NaN\n"
+ + "2003-01-01T12:00:00Z,89.979164,-13.312495,NaN\n"
+ + "2003-01-01T12:00:00Z,89.979164,28.354177,NaN\n"
+ + "2003-01-01T12:00:00Z,89.979164,70.020836,NaN\n"
+ + "2003-01-01T12:00:00Z,89.979164,111.68752,NaN\n"
+ + "2003-01-01T12:00:00Z,89.979164,153.35417,NaN\n"
+ + "2003-01-01T12:00:00Z,48.3125,-179.97917,NaN\n"
+ + "2003-01-01T12:00:00Z,48.3125,-138.3125,NaN\n"
+ + "2003-01-01T12:00:00Z,48.3125,-96.64583,NaN\n"
+ + "2003-01-01T12:00:00Z,48.3125,-54.979168,NaN\n"
+ + "2003-01-01T12:00:00Z,48.3125,-13.312495,NaN\n"
+ + "2003-01-01T12:00:00Z,48.3125,28.354177,NaN\n"
+ + "2003-01-01T12:00:00Z,48.3125,70.020836,NaN\n"
+ + "2003-01-01T12:00:00Z,48.3125,111.68752,NaN\n"
+ + "2003-01-01T12:00:00Z,48.3125,153.35417,NaN\n"
+ + "2003-01-01T12:00:00Z,6.6458306,-179.97917,NaN\n"
+ + "2003-01-01T12:00:00Z,6.6458306,-138.3125,NaN\n"
+ + "2003-01-01T12:00:00Z,6.6458306,-96.64583,NaN\n"
+ + "2003-01-01T12:00:00Z,6.6458306,-54.979168,NaN\n"
+ + "2003-01-01T12:00:00Z,6.6458306,-13.312495,NaN\n"
+ + "2003-01-01T12:00:00Z,6.6458306,28.354177,NaN\n"
+ + "2003-01-01T12:00:00Z,6.6458306,70.020836,NaN\n"
+ + "2003-01-01T12:00:00Z,6.6458306,111.68752,NaN\n"
+ + "2003-01-01T12:00:00Z,6.6458306,153.35417,29.476826\n"
+ + "2003-01-01T12:00:00Z,-35.020832,-179.97917,NaN\n"
+ + "2003-01-01T12:00:00Z,-35.020832,-138.3125,NaN\n"
+ + "2003-01-01T12:00:00Z,-35.020832,-96.64583,NaN\n"
+ + "2003-01-01T12:00:00Z,-35.020832,-54.979168,431.7499\n"
+ + "2003-01-01T12:00:00Z,-35.020832,-13.312495,NaN\n"
+ + "2003-01-01T12:00:00Z,-35.020832,28.354177,36.19993\n"
+ + "2003-01-01T12:00:00Z,-35.020832,70.020836,NaN\n"
+ + "2003-01-01T12:00:00Z,-35.020832,111.68752,NaN\n"
+ + "2003-01-01T12:00:00Z,-35.020832,153.35417,NaN\n"
+ + "2003-01-01T12:00:00Z,-76.68751,-179.97917,NaN\n"
+ + "2003-01-01T12:00:00Z,-76.68751,-138.3125,NaN\n"
+ + "2003-01-01T12:00:00Z,-76.68751,-96.64583,NaN\n"
+ + "2003-01-01T12:00:00Z,-76.68751,-54.979168,NaN\n"
+ + "2003-01-01T12:00:00Z,-76.68751,-13.312495,NaN\n"
+ + "2003-01-01T12:00:00Z,-76.68751,28.354177,NaN\n"
+ + "2003-01-01T12:00:00Z,-76.68751,70.020836,NaN\n"
+ + "2003-01-01T12:00:00Z,-76.68751,111.68752,NaN\n"
+ + "2003-01-01T12:00:00Z,-76.68751,153.35417,NaN\n";
+ Test.ensureEqual(results, expected, "\nresults=\n" + results);
+
+ // .csv poc values 70:4100:1000,70:8100:1000
+ userDapQuery =
+ "poc[(2016-08-28T12:00:00Z)][70:1000:4100][70:1000:8100]"; // match direct read above
+ tName =
+ eddGrid.makeNewFileForDapQuery(
+ language, null, null, userDapQuery, tDir, eddGrid.className() + "poc2", ".csv");
+ results = File2.directReadFrom88591File(tDir + tName);
+ String2.log(results);
+ expected =
+ "time,latitude,longitude,poc\n"
+ + "UTC,degrees_north,degrees_east,mg m-3\n"
+ + "2016-08-28T12:00:00Z,87.0625,-177.0625,NaN\n"
+ + "2016-08-28T12:00:00Z,87.0625,-135.39583,NaN\n"
+ + "2016-08-28T12:00:00Z,87.0625,-93.729164,NaN\n"
+ + "2016-08-28T12:00:00Z,87.0625,-52.062496,NaN\n"
+ + "2016-08-28T12:00:00Z,87.0625,-10.3958235,NaN\n"
+ + "2016-08-28T12:00:00Z,87.0625,31.270834,NaN\n"
+ + "2016-08-28T12:00:00Z,87.0625,72.93751,NaN\n"
+ + "2016-08-28T12:00:00Z,87.0625,114.60418,NaN\n"
+ + "2016-08-28T12:00:00Z,87.0625,156.27083,NaN\n"
+ + "2016-08-28T12:00:00Z,45.395832,-177.0625,NaN\n"
+ + "2016-08-28T12:00:00Z,45.395832,-135.39583,NaN\n"
+ + "2016-08-28T12:00:00Z,45.395832,-93.729164,NaN\n"
+ + "2016-08-28T12:00:00Z,45.395832,-52.062496,96.4\n"
+ + "2016-08-28T12:00:00Z,45.395832,-10.3958235,NaN\n"
+ + "2016-08-28T12:00:00Z,45.395832,31.270834,162.8\n"
+ + "2016-08-28T12:00:00Z,45.395832,72.93751,NaN\n"
+ + "2016-08-28T12:00:00Z,45.395832,114.60418,NaN\n"
+ + "2016-08-28T12:00:00Z,45.395832,156.27083,78.2\n"
+ + "2016-08-28T12:00:00Z,3.7291667,-177.0625,NaN\n"
+ + "2016-08-28T12:00:00Z,3.7291667,-135.39583,NaN\n"
+ + "2016-08-28T12:00:00Z,3.7291667,-93.729164,NaN\n"
+ + "2016-08-28T12:00:00Z,3.7291667,-52.062496,NaN\n"
+ + "2016-08-28T12:00:00Z,3.7291667,-10.3958235,NaN\n"
+ + "2016-08-28T12:00:00Z,3.7291667,31.270834,NaN\n"
+ + "2016-08-28T12:00:00Z,3.7291667,72.93751,NaN\n"
+ + "2016-08-28T12:00:00Z,3.7291667,114.60418,NaN\n"
+ + "2016-08-28T12:00:00Z,3.7291667,156.27083,26.6\n"
+ + "2016-08-28T12:00:00Z,-37.937504,-177.0625,NaN\n"
+ + "2016-08-28T12:00:00Z,-37.937504,-135.39583,NaN\n"
+ + "2016-08-28T12:00:00Z,-37.937504,-93.729164,NaN\n"
+ + "2016-08-28T12:00:00Z,-37.937504,-52.062496,NaN\n"
+ + "2016-08-28T12:00:00Z,-37.937504,-10.3958235,NaN\n"
+ + "2016-08-28T12:00:00Z,-37.937504,31.270834,NaN\n"
+ + "2016-08-28T12:00:00Z,-37.937504,72.93751,NaN\n"
+ + "2016-08-28T12:00:00Z,-37.937504,114.60418,NaN\n"
+ + "2016-08-28T12:00:00Z,-37.937504,156.27083,NaN\n"
+ + "2016-08-28T12:00:00Z,-79.60418,-177.0625,NaN\n"
+ + "2016-08-28T12:00:00Z,-79.60418,-135.39583,NaN\n"
+ + "2016-08-28T12:00:00Z,-79.60418,-93.729164,NaN\n"
+ + "2016-08-28T12:00:00Z,-79.60418,-52.062496,NaN\n"
+ + "2016-08-28T12:00:00Z,-79.60418,-10.3958235,NaN\n"
+ + "2016-08-28T12:00:00Z,-79.60418,31.270834,NaN\n"
+ + "2016-08-28T12:00:00Z,-79.60418,72.93751,NaN\n"
+ + "2016-08-28T12:00:00Z,-79.60418,114.60418,NaN\n"
+ + "2016-08-28T12:00:00Z,-79.60418,156.27083,NaN\n";
+ Test.ensureEqual(results, expected, "\nresults=\n" + results);
+
+ // display an image
+ String2.log("\n\n* PNG ");
+ String baseName = eddGrid.className() + "_testMissingValue";
+ tName =
+ eddGrid.makeNewFileForDapQuery(
+ language,
+ null,
+ null,
+ "poc[(2016-08-28T12:00:00Z)][][]",
+ Image2Tests.urlToAbsolutePath(Image2Tests.OBS_DIR),
+ baseName,
+ ".png");
+ // Test.displayInBrowser("file://" + tDir + tName);
+ Image2Tests.testImagesIdentical(tName, baseName + ".png", baseName + "_diff.png");
+
+ NcHelper.debugMode = oDebugMode;
+ }
}
diff --git a/src/test/java/gov/noaa/pfel/erddap/dataset/EDDTableFromMultidimNcFilesTests.java b/src/test/java/gov/noaa/pfel/erddap/dataset/EDDTableFromMultidimNcFilesTests.java
index 9fc7f4ec..f869726d 100644
--- a/src/test/java/gov/noaa/pfel/erddap/dataset/EDDTableFromMultidimNcFilesTests.java
+++ b/src/test/java/gov/noaa/pfel/erddap/dataset/EDDTableFromMultidimNcFilesTests.java
@@ -1,21 +1,22 @@
package gov.noaa.pfel.erddap.dataset;
-import java.nio.file.Path;
-
-import org.junit.jupiter.api.BeforeAll;
-import org.junit.jupiter.params.ParameterizedTest;
-import org.junit.jupiter.params.provider.ValueSource;
+import static org.junit.jupiter.api.Assertions.assertEquals;
import com.cohort.util.Calendar2;
import com.cohort.util.File2;
import com.cohort.util.Math2;
import com.cohort.util.String2;
import com.cohort.util.Test;
-
import gov.noaa.pfel.coastwatch.griddata.NcHelper;
import gov.noaa.pfel.erddap.GenerateDatasetsXml;
import gov.noaa.pfel.erddap.util.EDStatic;
import gov.noaa.pfel.erddap.variable.EDV;
+import java.io.BufferedOutputStream;
+import java.io.FileOutputStream;
+import java.nio.file.Path;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.ValueSource;
import tags.TagFlaky;
import tags.TagIncompleteTest;
import tags.TagMissingDataset;
@@ -29,818 +30,832 @@ static void init() {
}
/**
- * testGenerateDatasetsXml.
- * This doesn't test suggestTestOutOfDate, except that for old data
- * it doesn't suggest anything.
+ * testGenerateDatasetsXml. This doesn't test suggestTestOutOfDate, except that for old data it
+ * doesn't suggest anything.
*/
@org.junit.jupiter.api.Test
void testGenerateDatasetsXml() throws Throwable {
// testVerboseOn();
- String dataDir = File2.addSlash(Path.of(
- EDDTableFromMultidimNcFilesTests.class.getResource("/data/nc/").toURI()).toString());
+ String dataDir =
+ File2.addSlash(
+ Path.of(EDDTableFromMultidimNcFilesTests.class.getResource("/data/nc/").toURI())
+ .toString());
String fileNameRegex = ".*_prof\\.nc";
String useDimensionsCSV = "N_PROF, N_LEVELS";
- String results = EDDTableFromMultidimNcFiles.generateDatasetsXml(
- dataDir,
- fileNameRegex,
- "",
- useDimensionsCSV,
- 1440,
- "^", "_prof.nc$", ".*", "fileNumber", // just for test purposes
- true, // removeMVRows
- "FLOAT_SERIAL_NO JULD", // sort files by
- "", "", "", "",
- -1, // defaultStandardizeWhat
- "", // treatDimensionsAs
- null, // cacheFromUrl
- null) + "\n";
+ String results =
+ EDDTableFromMultidimNcFiles.generateDatasetsXml(
+ dataDir,
+ fileNameRegex,
+ "",
+ useDimensionsCSV,
+ 1440,
+ "^",
+ "_prof.nc$",
+ ".*",
+ "fileNumber", // just for test purposes
+ true, // removeMVRows
+ "FLOAT_SERIAL_NO JULD", // sort files by
+ "",
+ "",
+ "",
+ "",
+ -1, // defaultStandardizeWhat
+ "", // treatDimensionsAs
+ null, // cacheFromUrl
+ null)
+ + "\n";
- String tDatasetID = EDDTableFromMultidimNcFiles.suggestDatasetID(dataDir + fileNameRegex + useDimensionsCSV);
- String expected = "\n"
- +
- " 1440\n" +
- " 10000\n" +
- " " + dataDir + "\n" +
- " " + fileNameRegex + "\n" +
- " true\n" +
- " .*\n" +
- " last\n" +
- " 0\n" +
- " ^\n" +
- " _prof.nc$\n" +
- " .*\n" +
- " fileNumber\n" +
- " true\n" +
- " FLOAT_SERIAL_NO JULD\n" +
- " false\n" +
- " \n" +
- " \n" +
- " \n" +
- " TrajectoryProfile\n" +
- " profile_id, ???\n" +
- " trajectory_id, ???\n" +
- " Argo-3.1, CF-1.10, COARDS, ACDD-1.3\n" +
- " Coriolis GDAC\n" +
- " institution\n" +
- " http://www.argodatamgt.org/Documentation\n" +
- " http://www.argodatamgt.org/Documentation\n" +
- " adjusted, argo, array, assembly, centre, centres, charge, coded, CONFIG_MISSION_NUMBER, contains, coriolis, creation, currents, cycle, CYCLE_NUMBER, data, DATA_CENTRE, DATA_MODE, DATA_STATE_INDICATOR, DATA_TYPE, date, DATE_CREATION, DATE_UPDATE, day, days, DC_REFERENCE, degree, delayed, denoting, density, determined, direction, earth, Earth Science > Oceans > Ocean Pressure > Water Pressure, Earth Science > Oceans > Ocean Temperature > Water Temperature, Earth Science > Oceans > Salinity/Density > Salinity, equals, error, file, firmware, FIRMWARE_VERSION, flag, float, FLOAT_SERIAL_NO, format, FORMAT_VERSION, gdac, geostrophic, global, handbook, HANDBOOK_VERSION, identifier, in-situ, instrument, investigator, its, its-90, JULD_LOCATION, JULD_QC, julian, latitude, level, longitude, missions, mode, name, number, ocean, oceanography, oceans, passed, performed, PI_NAME, PLATFORM_NUMBER, PLATFORM_TYPE, position, POSITION_QC, positioning, POSITIONING_SYSTEM, practical, pres, PRES_ADJUSTED, PRES_ADJUSTED_ERROR, PRES_ADJUSTED_QC, PRES_QC, pressure, principal, process, processing, profile, PROFILE_PRES_QC, PROFILE_PSAL_QC, PROFILE_TEMP_QC, profiles, project, PROJECT_NAME, psal, PSAL_ADJUSTED, PSAL_ADJUSTED_ERROR, PSAL_ADJUSTED_QC, PSAL_QC, quality, real, real time, real-time, realtime, reference, REFERENCE_DATE_TIME, relative, salinity, sampling, scale, scheme, science, sea, sea level, sea-level, sea_water_practical_salinity, sea_water_pressure, sea_water_temperature, seawater, serial, situ, station, statistics, system, TEMP, TEMP_ADJUSTED, TEMP_ADJUSTED_ERROR, TEMP_ADJUSTED_QC, TEMP_QC, temperature, through, time, type, unique, update, values, version, vertical, VERTICAL_SAMPLING_SCHEME, water, WMO_INST_TYPE\n"
- +
- " GCMD Science Keywords\n" +
- " [standard]\n" +
- " (local files)\n" +
- " CF Standard Name Table v70\n" +
- " DATA_TYPE, FORMAT_VERSION, HANDBOOK_VERSION, REFERENCE_DATE_TIME, DATE_CREATION, DATE_UPDATE, PLATFORM_NUMBER, PROJECT_NAME, PI_NAME, DIRECTION, DATA_CENTRE, WMO_INST_TYPE, JULD_QC, POSITION_QC, POSITIONING_SYSTEM, CONFIG_MISSION_NUMBER\n"
- +
- " Argo float vertical profile. Coriolis Global Data Assembly Centres (GDAC) data from a local source.\n"
- +
- " \n" +
- " \n" +
- " fileNumber\n" +
- " fileNumber\n" +
- " String\n" +
- " \n" +
- " \n" +
- " Identifier\n" +
- " File Number\n" +
- " \n" +
- " \n" +
- " \n" +
- " DATA_TYPE\n" +
- " DATA_TYPE\n" +
- " String\n" +
- " \n" +
- " \n" +
- " Unknown\n" +
- " \n" +
- " \n" +
- " \n" +
- " FORMAT_VERSION\n" +
- " FORMAT_VERSION\n" +
- " String\n" +
- " \n" +
- " \n" +
- " Unknown\n" +
- " \n" +
- " \n" +
- " \n" +
- " HANDBOOK_VERSION\n" +
- " HANDBOOK_VERSION\n" +
- " String\n" +
- " \n" +
- " \n" +
- " Unknown\n" +
- " \n" +
- " \n" +
- " \n" +
- " REFERENCE_DATE_TIME\n" +
- " REFERENCE_DATE_TIME\n" +
- " String\n" +
- " \n" +
- " \n" +
- " Time\n" +
- " \n" +
- " \n" +
- " \n" +
- " DATE_CREATION\n" +
- " DATE_CREATION\n" +
- " String\n" +
- " \n" +
- " \n" +
- " Time\n" +
- " \n" +
- " \n" +
- " \n" +
- " DATE_UPDATE\n" +
- " DATE_UPDATE\n" +
- " String\n" +
- " \n" +
- " \n" +
- " Time\n" +
- " \n" +
- " \n" +
- " \n" +
- " PLATFORM_NUMBER\n" +
- " PLATFORM_NUMBER\n" +
- " String\n" +
- " \n" +
- " \n" +
- " Identifier\n" +
- " \n" +
- " \n" +
- " \n" +
- " PROJECT_NAME\n" +
- " PROJECT_NAME\n" +
- " String\n" +
- " \n" +
- " \n" +
- " Identifier\n" +
- " \n" +
- " \n" +
- " \n" +
- " PI_NAME\n" +
- " PI_NAME\n" +
- " String\n" +
- " \n" +
- " \n" +
- " Identifier\n" +
- " \n" +
- " \n" +
- " \n" +
- " CYCLE_NUMBER\n" +
- " CYCLE_NUMBER\n" +
- " int\n" +
- " \n" +
- " \n" +
- " 100.0\n" +
- " 0.0\n" +
- " Statistics\n" +
- " \n" +
- " \n" +
- " \n" +
- " DIRECTION\n" +
- " DIRECTION\n" +
- " char\n" +
- " \n" +
- " \n" +
- " 360.0\n" +
- " 0.0\n" +
- " Currents\n" +
- " \n" +
- " \n" +
- " \n" +
- " DATA_CENTRE\n" +
- " DATA_CENTRE\n" +
- " String\n" +
- " \n" +
- " \n" +
- " Unknown\n" +
- " \n" +
- " \n" +
- " \n" +
- " DC_REFERENCE\n" +
- " DC_REFERENCE\n" +
- " String\n" +
- " \n" +
- " \n" +
- " Identifier\n" +
- " \n" +
- " \n" +
- " \n" +
- " DATA_STATE_INDICATOR\n" +
- " DATA_STATE_INDICATOR\n" +
- " String\n" +
- " \n" +
- " \n" +
- " Location\n" +
- " \n" +
- " \n" +
- " \n" +
- " DATA_MODE\n" +
- " DATA_MODE\n" +
- " char\n" +
- " \n" +
- " \n" +
- " Time\n" +
- " \n" +
- " \n" +
- " \n" +
- " PLATFORM_TYPE\n" +
- " PLATFORM_TYPE\n" +
- " String\n" +
- " \n" +
- " \n" +
- " Unknown\n" +
- " \n" +
- " \n" +
- " \n" +
- " FLOAT_SERIAL_NO\n" +
- " FLOAT_SERIAL_NO\n" +
- " String\n" +
- " \n" +
- " \n" +
- " Statistics\n" +
- " \n" +
- " \n" +
- " \n" +
- " FIRMWARE_VERSION\n" +
- " FIRMWARE_VERSION\n" +
- " String\n" +
- " \n" +
- " \n" +
- " Unknown\n" +
- " \n" +
- " \n" +
- " \n" +
- " WMO_INST_TYPE\n" +
- " WMO_INST_TYPE\n" +
- " String\n" +
- " \n" +
- " \n" +
- " Unknown\n" +
- " \n" +
- " \n" +
- " \n" +
- " JULD\n" +
- " time\n" +
- " double\n" +
- " \n" +
- " \n" +
- " Time\n" +
- " null\n" +
- " JULD\n" +
- " days since 1950-01-01T00:00:00Z\n" +
- " \n" +
- " \n" +
- " \n" +
- " JULD_QC\n" +
- " JULD_QC\n" +
- " char\n" +
- " \n" +
- " \n" +
- " 150.0\n" +
- " 0.0\n" +
- " Quality\n" +
- " \n" +
- " \n" +
- " \n" +
- " JULD_LOCATION\n" +
- " JULD_LOCATION\n" +
- " double\n" +
- " \n" +
- " \n" +
- " Time\n" +
- " null\n" +
- " time\n" +
- " days since 1950-01-01T00:00:00Z\n" +
- " \n" +
- " \n" +
- " \n" +
- " LATITUDE\n" +
- " latitude\n" +
- " double\n" +
- " \n" +
- " \n" +
- " 90.0\n" +
- " -90.0\n" +
- " Location\n" +
- " degrees_north\n" +
- " \n" +
- " \n" +
- " \n" +
- " LONGITUDE\n" +
- " longitude\n" +
- " double\n" +
- " \n" +
- " \n" +
- " 180.0\n" +
- " -180.0\n" +
- " Location\n" +
- " degrees_east\n" +
- " \n" +
- " \n" +
- " \n" +
- " POSITION_QC\n" +
- " POSITION_QC\n" +
- " char\n" +
- " \n" +
- " \n" +
- " 150.0\n" +
- " 0.0\n" +
- " Quality\n" +
- " \n" +
- " \n" +
- " \n" +
- " POSITIONING_SYSTEM\n" +
- " POSITIONING_SYSTEM\n" +
- " String\n" +
- " \n" +
- " \n" +
- " Unknown\n" +
- " \n" +
- " \n" +
- " \n" +
- " PROFILE_PRES_QC\n" +
- " PROFILE_PRES_QC\n" +
- " char\n" +
- " \n" +
- " \n" +
- " 150.0\n" +
- " 0.0\n" +
- " Quality\n" +
- " \n" +
- " \n" +
- " \n" +
- " PROFILE_TEMP_QC\n" +
- " PROFILE_TEMP_QC\n" +
- " char\n" +
- " \n" +
- " \n" +
- " 150.0\n" +
- " 0.0\n" +
- " Quality\n" +
- " \n" +
- " \n" +
- " \n" +
- " PROFILE_PSAL_QC\n" +
- " PROFILE_PSAL_QC\n" +
- " char\n" +
- " \n" +
- " \n" +
- " 150.0\n" +
- " 0.0\n" +
- " Quality\n" +
- " \n" +
- " \n" +
- " \n" +
- " VERTICAL_SAMPLING_SCHEME\n" +
- " VERTICAL_SAMPLING_SCHEME\n" +
- " String\n" +
- " \n" +
- " \n" +
- " Unknown\n" +
- " \n" +
- " \n" +
- " \n" +
- " CONFIG_MISSION_NUMBER\n" +
- " CONFIG_MISSION_NUMBER\n" +
- " int\n" +
- " \n" +
- " \n" +
- " 100.0\n" +
- " 0.0\n" +
- " Statistics\n" +
- " \n" +
- " \n" +
- " \n" +
- " PRES\n" +
- " PRES\n" +
- " float\n" +
- " \n" +
- " \n" +
- " 5000.0\n" +
- " 0.0\n" +
- " Sea Level\n" +
- " null\n" +
- " \n" +
- " \n" +
- " \n" +
- " PRES_QC\n" +
- " PRES_QC\n" +
- " char\n" +
- " \n" +
- " \n" +
- " 150.0\n" +
- " 0.0\n" +
- " Quality\n" +
- " \n" +
- " \n" +
- " \n" +
- " PRES_ADJUSTED\n" +
- " PRES_ADJUSTED\n" +
- " float\n" +
- " \n" +
- " \n" +
- " 5000.0\n" +
- " 0.0\n" +
- " Sea Level\n" +
- " null\n" +
- " \n" +
- " \n" +
- " \n" +
- " PRES_ADJUSTED_QC\n" +
- " PRES_ADJUSTED_QC\n" +
- " char\n" +
- " \n" +
- " \n" +
- " 150.0\n" +
- " 0.0\n" +
- " Quality\n" +
- " \n" +
- " \n" +
- " \n" +
- " PRES_ADJUSTED_ERROR\n" +
- " PRES_ADJUSTED_ERROR\n" +
- " float\n" +
- " \n" +
- " \n" +
- " 50.0\n" +
- " 0.0\n" +
- " Statistics\n" +
- " null\n" +
- " \n" +
- " \n" +
- " \n" +
- " TEMP\n" +
- " TEMP\n" +
- " float\n" +
- " \n" +
- " \n" +
- " 32.0\n" +
- " 0.0\n" +
- " Temperature\n" +
- " null\n" +
- " \n" +
- " \n" +
- " \n" +
- " TEMP_QC\n" +
- " TEMP_QC\n" +
- " char\n" +
- " \n" +
- " \n" +
- " 150.0\n" +
- " 0.0\n" +
- " Quality\n" +
- " \n" +
- " \n" +
- " \n" +
- " TEMP_ADJUSTED\n" +
- " TEMP_ADJUSTED\n" +
- " float\n" +
- " \n" +
- " \n" +
- " 32.0\n" +
- " 0.0\n" +
- " Temperature\n" +
- " null\n" +
- " \n" +
- " \n" +
- " \n" +
- " TEMP_ADJUSTED_QC\n" +
- " TEMP_ADJUSTED_QC\n" +
- " char\n" +
- " \n" +
- " \n" +
- " 150.0\n" +
- " 0.0\n" +
- " Quality\n" +
- " \n" +
- " \n" +
- " \n" +
- " TEMP_ADJUSTED_ERROR\n" +
- " TEMP_ADJUSTED_ERROR\n" +
- " float\n" +
- " \n" +
- " \n" +
- " 5.0\n" +
- " 0.0\n" +
- " Statistics\n" +
- " null\n" +
- " \n" +
- " \n" +
- " \n" +
- " PSAL\n" +
- " PSAL\n" +
- " float\n" +
- " \n" +
- " \n" +
- " 37.0\n" +
- " 32.0\n" +
- " Salinity\n" +
- " null\n" +
- " sea_water_practical_salinity\n" +
- " PSU\n" +
- " \n" +
- " \n" +
- " \n" +
- " PSAL_QC\n" +
- " PSAL_QC\n" +
- " char\n" +
- " \n" +
- " \n" +
- " 150.0\n" +
- " 0.0\n" +
- " Quality\n" +
- " \n" +
- " \n" +
- " \n" +
- " PSAL_ADJUSTED\n" +
- " PSAL_ADJUSTED\n" +
- " float\n" +
- " \n" +
- " \n" +
- " 37.0\n" +
- " 32.0\n" +
- " Salinity\n" +
- " null\n" +
- " sea_water_practical_salinity\n" +
- " PSU\n" +
- " \n" +
- " \n" +
- " \n" +
- " PSAL_ADJUSTED_QC\n" +
- " PSAL_ADJUSTED_QC\n" +
- " char\n" +
- " \n" +
- " \n" +
- " 150.0\n" +
- " 0.0\n" +
- " Quality\n" +
- " \n" +
- " \n" +
- " \n" +
- " PSAL_ADJUSTED_ERROR\n" +
- " PSAL_ADJUSTED_ERROR\n" +
- " float\n" +
- " \n" +
- " \n" +
- " 1.0\n" +
- " 0.0\n" +
- " Statistics\n" +
- " null\n" +
- " PSU\n" +
- " \n" +
- " \n" +
- "\n" +
- "\n\n";
+ String tDatasetID =
+ EDDTableFromMultidimNcFiles.suggestDatasetID(dataDir + fileNameRegex + useDimensionsCSV);
+ String expected =
+ "\n"
+ + " 1440\n"
+ + " 10000\n"
+ + " "
+ + dataDir
+ + "\n"
+ + " "
+ + fileNameRegex
+ + "\n"
+ + " true\n"
+ + " .*\n"
+ + " last\n"
+ + " 0\n"
+ + " ^\n"
+ + " _prof.nc$\n"
+ + " .*\n"
+ + " fileNumber\n"
+ + " true\n"
+ + " FLOAT_SERIAL_NO JULD\n"
+ + " false\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " TrajectoryProfile\n"
+ + " profile_id, ???\n"
+ + " trajectory_id, ???\n"
+ + " Argo-3.1, CF-1.10, COARDS, ACDD-1.3\n"
+ + " Coriolis GDAC\n"
+ + " institution\n"
+ + " http://www.argodatamgt.org/Documentation\n"
+ + " http://www.argodatamgt.org/Documentation\n"
+ + " adjusted, argo, array, assembly, centre, centres, charge, coded, CONFIG_MISSION_NUMBER, contains, coriolis, creation, currents, cycle, CYCLE_NUMBER, data, DATA_CENTRE, DATA_MODE, DATA_STATE_INDICATOR, DATA_TYPE, date, DATE_CREATION, DATE_UPDATE, day, days, DC_REFERENCE, degree, delayed, denoting, density, determined, direction, earth, Earth Science > Oceans > Ocean Pressure > Water Pressure, Earth Science > Oceans > Ocean Temperature > Water Temperature, Earth Science > Oceans > Salinity/Density > Salinity, equals, error, file, firmware, FIRMWARE_VERSION, flag, float, FLOAT_SERIAL_NO, format, FORMAT_VERSION, gdac, geostrophic, global, handbook, HANDBOOK_VERSION, identifier, in-situ, instrument, investigator, its, its-90, JULD_LOCATION, JULD_QC, julian, latitude, level, longitude, missions, mode, name, number, ocean, oceanography, oceans, passed, performed, PI_NAME, PLATFORM_NUMBER, PLATFORM_TYPE, position, POSITION_QC, positioning, POSITIONING_SYSTEM, practical, pres, PRES_ADJUSTED, PRES_ADJUSTED_ERROR, PRES_ADJUSTED_QC, PRES_QC, pressure, principal, process, processing, profile, PROFILE_PRES_QC, PROFILE_PSAL_QC, PROFILE_TEMP_QC, profiles, project, PROJECT_NAME, psal, PSAL_ADJUSTED, PSAL_ADJUSTED_ERROR, PSAL_ADJUSTED_QC, PSAL_QC, quality, real, real time, real-time, realtime, reference, REFERENCE_DATE_TIME, relative, salinity, sampling, scale, scheme, science, sea, sea level, sea-level, sea_water_practical_salinity, sea_water_pressure, sea_water_temperature, seawater, serial, situ, station, statistics, system, TEMP, TEMP_ADJUSTED, TEMP_ADJUSTED_ERROR, TEMP_ADJUSTED_QC, TEMP_QC, temperature, through, time, type, unique, update, values, version, vertical, VERTICAL_SAMPLING_SCHEME, water, WMO_INST_TYPE\n"
+ + " GCMD Science Keywords\n"
+ + " [standard]\n"
+ + " (local files)\n"
+ + " CF Standard Name Table v70\n"
+ + " DATA_TYPE, FORMAT_VERSION, HANDBOOK_VERSION, REFERENCE_DATE_TIME, DATE_CREATION, DATE_UPDATE, PLATFORM_NUMBER, PROJECT_NAME, PI_NAME, DIRECTION, DATA_CENTRE, WMO_INST_TYPE, JULD_QC, POSITION_QC, POSITIONING_SYSTEM, CONFIG_MISSION_NUMBER\n"
+ + " Argo float vertical profile. Coriolis Global Data Assembly Centres (GDAC) data from a local source.\n"
+ + " \n"
+ + " \n"
+ + " fileNumber\n"
+ + " fileNumber\n"
+ + " String\n"
+ + " \n"
+ + " \n"
+ + " Identifier\n"
+ + " File Number\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " DATA_TYPE\n"
+ + " DATA_TYPE\n"
+ + " String\n"
+ + " \n"
+ + " \n"
+ + " Unknown\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " FORMAT_VERSION\n"
+ + " FORMAT_VERSION\n"
+ + " String\n"
+ + " \n"
+ + " \n"
+ + " Unknown\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " HANDBOOK_VERSION\n"
+ + " HANDBOOK_VERSION\n"
+ + " String\n"
+ + " \n"
+ + " \n"
+ + " Unknown\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " REFERENCE_DATE_TIME\n"
+ + " REFERENCE_DATE_TIME\n"
+ + " String\n"
+ + " \n"
+ + " \n"
+ + " Time\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " DATE_CREATION\n"
+ + " DATE_CREATION\n"
+ + " String\n"
+ + " \n"
+ + " \n"
+ + " Time\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " DATE_UPDATE\n"
+ + " DATE_UPDATE\n"
+ + " String\n"
+ + " \n"
+ + " \n"
+ + " Time\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " PLATFORM_NUMBER\n"
+ + " PLATFORM_NUMBER\n"
+ + " String\n"
+ + " \n"
+ + " \n"
+ + " Identifier\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " PROJECT_NAME\n"
+ + " PROJECT_NAME\n"
+ + " String\n"
+ + " \n"
+ + " \n"
+ + " Identifier\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " PI_NAME\n"
+ + " PI_NAME\n"
+ + " String\n"
+ + " \n"
+ + " \n"
+ + " Identifier\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " CYCLE_NUMBER\n"
+ + " CYCLE_NUMBER\n"
+ + " int\n"
+ + " \n"
+ + " \n"
+ + " 100.0\n"
+ + " 0.0\n"
+ + " Statistics\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " DIRECTION\n"
+ + " DIRECTION\n"
+ + " char\n"
+ + " \n"
+ + " \n"
+ + " 360.0\n"
+ + " 0.0\n"
+ + " Currents\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " DATA_CENTRE\n"
+ + " DATA_CENTRE\n"
+ + " String\n"
+ + " \n"
+ + " \n"
+ + " Unknown\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " DC_REFERENCE\n"
+ + " DC_REFERENCE\n"
+ + " String\n"
+ + " \n"
+ + " \n"
+ + " Identifier\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " DATA_STATE_INDICATOR\n"
+ + " DATA_STATE_INDICATOR\n"
+ + " String\n"
+ + " \n"
+ + " \n"
+ + " Location\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " DATA_MODE\n"
+ + " DATA_MODE\n"
+ + " char\n"
+ + " \n"
+ + " \n"
+ + " Time\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " PLATFORM_TYPE\n"
+ + " PLATFORM_TYPE\n"
+ + " String\n"
+ + " \n"
+ + " \n"
+ + " Unknown\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " FLOAT_SERIAL_NO\n"
+ + " FLOAT_SERIAL_NO\n"
+ + " String\n"
+ + " \n"
+ + " \n"
+ + " Statistics\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " FIRMWARE_VERSION\n"
+ + " FIRMWARE_VERSION\n"
+ + " String\n"
+ + " \n"
+ + " \n"
+ + " Unknown\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " WMO_INST_TYPE\n"
+ + " WMO_INST_TYPE\n"
+ + " String\n"
+ + " \n"
+ + " \n"
+ + " Unknown\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " JULD\n"
+ + " time\n"
+ + " double\n"
+ + " \n"
+ + " \n"
+ + " Time\n"
+ + " null\n"
+ + " JULD\n"
+ + " days since 1950-01-01T00:00:00Z\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " JULD_QC\n"
+ + " JULD_QC\n"
+ + " char\n"
+ + " \n"
+ + " \n"
+ + " 150.0\n"
+ + " 0.0\n"
+ + " Quality\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " JULD_LOCATION\n"
+ + " JULD_LOCATION\n"
+ + " double\n"
+ + " \n"
+ + " \n"
+ + " Time\n"
+ + " null\n"
+ + " time\n"
+ + " days since 1950-01-01T00:00:00Z\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " LATITUDE\n"
+ + " latitude\n"
+ + " double\n"
+ + " \n"
+ + " \n"
+ + " 90.0\n"
+ + " -90.0\n"
+ + " Location\n"
+ + " degrees_north\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " LONGITUDE\n"
+ + " longitude\n"
+ + " double\n"
+ + " \n"
+ + " \n"
+ + " 180.0\n"
+ + " -180.0\n"
+ + " Location\n"
+ + " degrees_east\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " POSITION_QC\n"
+ + " POSITION_QC\n"
+ + " char\n"
+ + " \n"
+ + " \n"
+ + " 150.0\n"
+ + " 0.0\n"
+ + " Quality\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " POSITIONING_SYSTEM\n"
+ + " POSITIONING_SYSTEM\n"
+ + " String\n"
+ + " \n"
+ + " \n"
+ + " Unknown\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " PROFILE_PRES_QC\n"
+ + " PROFILE_PRES_QC\n"
+ + " char\n"
+ + " \n"
+ + " \n"
+ + " 150.0\n"
+ + " 0.0\n"
+ + " Quality\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " PROFILE_TEMP_QC\n"
+ + " PROFILE_TEMP_QC\n"
+ + " char\n"
+ + " \n"
+ + " \n"
+ + " 150.0\n"
+ + " 0.0\n"
+ + " Quality\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " PROFILE_PSAL_QC\n"
+ + " PROFILE_PSAL_QC\n"
+ + " char\n"
+ + " \n"
+ + " \n"
+ + " 150.0\n"
+ + " 0.0\n"
+ + " Quality\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " VERTICAL_SAMPLING_SCHEME\n"
+ + " VERTICAL_SAMPLING_SCHEME\n"
+ + " String\n"
+ + " \n"
+ + " \n"
+ + " Unknown\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " CONFIG_MISSION_NUMBER\n"
+ + " CONFIG_MISSION_NUMBER\n"
+ + " int\n"
+ + " \n"
+ + " \n"
+ + " 100.0\n"
+ + " 0.0\n"
+ + " Statistics\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " PRES\n"
+ + " PRES\n"
+ + " float\n"
+ + " \n"
+ + " \n"
+ + " 5000.0\n"
+ + " 0.0\n"
+ + " Sea Level\n"
+ + " null\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " PRES_QC\n"
+ + " PRES_QC\n"
+ + " char\n"
+ + " \n"
+ + " \n"
+ + " 150.0\n"
+ + " 0.0\n"
+ + " Quality\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " PRES_ADJUSTED\n"
+ + " PRES_ADJUSTED\n"
+ + " float\n"
+ + " \n"
+ + " \n"
+ + " 5000.0\n"
+ + " 0.0\n"
+ + " Sea Level\n"
+ + " null\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " PRES_ADJUSTED_QC\n"
+ + " PRES_ADJUSTED_QC\n"
+ + " char\n"
+ + " \n"
+ + " \n"
+ + " 150.0\n"
+ + " 0.0\n"
+ + " Quality\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " PRES_ADJUSTED_ERROR\n"
+ + " PRES_ADJUSTED_ERROR\n"
+ + " float\n"
+ + " \n"
+ + " \n"
+ + " 50.0\n"
+ + " 0.0\n"
+ + " Statistics\n"
+ + " null\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " TEMP\n"
+ + " TEMP\n"
+ + " float\n"
+ + " \n"
+ + " \n"
+ + " 32.0\n"
+ + " 0.0\n"
+ + " Temperature\n"
+ + " null\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " TEMP_QC\n"
+ + " TEMP_QC\n"
+ + " char\n"
+ + " \n"
+ + " \n"
+ + " 150.0\n"
+ + " 0.0\n"
+ + " Quality\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " TEMP_ADJUSTED\n"
+ + " TEMP_ADJUSTED\n"
+ + " float\n"
+ + " \n"
+ + " \n"
+ + " 32.0\n"
+ + " 0.0\n"
+ + " Temperature\n"
+ + " null\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " TEMP_ADJUSTED_QC\n"
+ + " TEMP_ADJUSTED_QC\n"
+ + " char\n"
+ + " \n"
+ + " \n"
+ + " 150.0\n"
+ + " 0.0\n"
+ + " Quality\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " TEMP_ADJUSTED_ERROR\n"
+ + " TEMP_ADJUSTED_ERROR\n"
+ + " float\n"
+ + " \n"
+ + " \n"
+ + " 5.0\n"
+ + " 0.0\n"
+ + " Statistics\n"
+ + " null\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " PSAL\n"
+ + " PSAL\n"
+ + " float\n"
+ + " \n"
+ + " \n"
+ + " 37.0\n"
+ + " 32.0\n"
+ + " Salinity\n"
+ + " null\n"
+ + " sea_water_practical_salinity\n"
+ + " PSU\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " PSAL_QC\n"
+ + " PSAL_QC\n"
+ + " char\n"
+ + " \n"
+ + " \n"
+ + " 150.0\n"
+ + " 0.0\n"
+ + " Quality\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " PSAL_ADJUSTED\n"
+ + " PSAL_ADJUSTED\n"
+ + " float\n"
+ + " \n"
+ + " \n"
+ + " 37.0\n"
+ + " 32.0\n"
+ + " Salinity\n"
+ + " null\n"
+ + " sea_water_practical_salinity\n"
+ + " PSU\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " PSAL_ADJUSTED_QC\n"
+ + " PSAL_ADJUSTED_QC\n"
+ + " char\n"
+ + " \n"
+ + " \n"
+ + " 150.0\n"
+ + " 0.0\n"
+ + " Quality\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " PSAL_ADJUSTED_ERROR\n"
+ + " PSAL_ADJUSTED_ERROR\n"
+ + " float\n"
+ + " \n"
+ + " \n"
+ + " 1.0\n"
+ + " 0.0\n"
+ + " Statistics\n"
+ + " null\n"
+ + " PSU\n"
+ + " \n"
+ + " \n"
+ + "\n"
+ + "\n\n";
Test.ensureEqual(results, expected, "results=\n" + results);
// GenerateDatasetsXml
- results = (new GenerateDatasetsXml()).doIt(new String[] { "-verbose",
- "EDDTableFromMultidimNcFiles",
- dataDir, ".*_prof\\.nc", "",
- "N_PROF, N_LEVELS",
- "1440",
- "^", "_prof.nc$", ".*", "fileNumber", // just for test purposes
- "true", // removeMVRows
- "FLOAT_SERIAL_NO JULD", // sort files by
- "", "", "", "",
- "-1", // defaultStandardizeWhat
- "", // treatDimensionsAs
- "" }, // cacheFromUrl
- false); // doIt loop?
+ results =
+ (new GenerateDatasetsXml())
+ .doIt(
+ new String[] {
+ "-verbose",
+ "EDDTableFromMultidimNcFiles",
+ dataDir,
+ ".*_prof\\.nc",
+ "",
+ "N_PROF, N_LEVELS",
+ "1440",
+ "^",
+ "_prof.nc$",
+ ".*",
+ "fileNumber", // just for test purposes
+ "true", // removeMVRows
+ "FLOAT_SERIAL_NO JULD", // sort files by
+ "",
+ "",
+ "",
+ "",
+ "-1", // defaultStandardizeWhat
+ "", // treatDimensionsAs
+ ""
+ }, // cacheFromUrl
+ false); // doIt loop?
Test.ensureEqual(results, expected, "Unexpected results from GenerateDatasetsXml.doIt.");
// Test.ensureEqual(results.substring(0, Math.min(results.length(),
@@ -850,13 +865,13 @@ void testGenerateDatasetsXml() throws Throwable {
// ensure it is ready-to-use by making a dataset from it
// with one small change to addAttributes:
- String tr = " TrajectoryProfile\n" +
- " profile_id, ???\n" +
- " trajectory_id, ???\n";
+ String tr =
+ " TrajectoryProfile\n"
+ + " profile_id, ???\n"
+ + " trajectory_id, ???\n";
int po = results.indexOf(tr);
Test.ensureTrue(po > 0, "pre replaceAll:\n" + results);
- results = String2.replaceAll(results, tr,
- " Point\n");
+ results = String2.replaceAll(results, tr, " Point\n");
String2.log("post replaceAll:\n" + results);
// String tDatasetID = "_prof_8d3d_e39b_5d82";
@@ -864,10 +879,10 @@ void testGenerateDatasetsXml() throws Throwable {
EDD edd = EDDTableFromMultidimNcFiles.oneFromXmlFragment(null, results);
Test.ensureEqual(edd.datasetID(), tDatasetID, "");
Test.ensureEqual(edd.title(), "Argo float vertical profile", "");
- Test.ensureEqual(String2.toCSSVString(edd.dataVariableDestinationNames()),
+ Test.ensureEqual(
+ String2.toCSSVString(edd.dataVariableDestinationNames()),
"fileNumber, DATA_TYPE, FORMAT_VERSION, HANDBOOK_VERSION, REFERENCE_DATE_TIME, DATE_CREATION, DATE_UPDATE, PLATFORM_NUMBER, PROJECT_NAME, PI_NAME, CYCLE_NUMBER, DIRECTION, DATA_CENTRE, DC_REFERENCE, DATA_STATE_INDICATOR, DATA_MODE, PLATFORM_TYPE, FLOAT_SERIAL_NO, FIRMWARE_VERSION, WMO_INST_TYPE, time, JULD_QC, JULD_LOCATION, latitude, longitude, POSITION_QC, POSITIONING_SYSTEM, PROFILE_PRES_QC, PROFILE_TEMP_QC, PROFILE_PSAL_QC, VERTICAL_SAMPLING_SCHEME, CONFIG_MISSION_NUMBER, PRES, PRES_QC, PRES_ADJUSTED, PRES_ADJUSTED_QC, PRES_ADJUSTED_ERROR, TEMP, TEMP_QC, TEMP_ADJUSTED, TEMP_ADJUSTED_QC, TEMP_ADJUSTED_ERROR, PSAL, PSAL_QC, PSAL_ADJUSTED, PSAL_ADJUSTED_QC, PSAL_ADJUSTED_ERROR",
"");
-
}
/**
@@ -876,7 +891,7 @@ void testGenerateDatasetsXml() throws Throwable {
* @throws Throwable if trouble
*/
@ParameterizedTest
- @ValueSource(booleans = { true, false })
+ @ValueSource(booleans = {true, false})
void testBasic(boolean deleteCachedInfo) throws Throwable {
// String2.log("\n****************** EDDTableFromMultidimNcFiles.testBasic()
// *****************\n");
@@ -886,566 +901,584 @@ void testBasic(boolean deleteCachedInfo) throws Throwable {
String error = "";
EDV edv;
String dir = EDStatic.fullTestCacheDirectory;
- String today = Calendar2.getCurrentISODateTimeStringZulu().substring(0, 14); // 14 is enough to check hour. Hard
- // to check min:sec.
+ String today =
+ Calendar2.getCurrentISODateTimeStringZulu()
+ .substring(0, 14); // 14 is enough to check hour. Hard
+ // to check min:sec.
String id = "argoFloats";
- if (deleteCachedInfo)
- EDD.deleteCachedDatasetInfo(id);
+ if (deleteCachedInfo) EDD.deleteCachedDatasetInfo(id);
EDDTable eddTable = (EDDTable) EDDTestDataset.getArgoFloats();
// *** test getting das for entire dataset
- String2.log("\n****************** EDDTableFromMultidimNcFiles test das and dds for entire dataset\n");
- tName = eddTable.makeNewFileForDapQuery(language, null, null, "", dir,
- eddTable.className() + "_Entire", ".das");
+ String2.log(
+ "\n****************** EDDTableFromMultidimNcFiles test das and dds for entire dataset\n");
+ tName =
+ eddTable.makeNewFileForDapQuery(
+ language, null, null, "", dir, eddTable.className() + "_Entire", ".das");
results = File2.directReadFrom88591File(dir + tName);
// String2.log(results);
- expected = "Attributes {\n" +
- " s {\n" +
- " fileNumber {\n" +
- " String ioos_category \"Identifier\";\n" +
- " String long_name \"File Number\";\n" +
- " }\n" +
- " data_type {\n" +
- " String conventions \"Argo reference table 1\";\n" +
- " String ioos_category \"Unknown\";\n" +
- " String long_name \"Data type\";\n" +
- " }\n" +
- " format_version {\n" +
- " String ioos_category \"Unknown\";\n" +
- " String long_name \"File format version\";\n" +
- " }\n" +
- " handbook_version {\n" +
- " String ioos_category \"Unknown\";\n" +
- " String long_name \"Data handbook version\";\n" +
- " }\n" +
- " reference_date_time {\n" +
- " Float64 actual_range -6.31152e+8, -6.31152e+8;\n" +
- " String ioos_category \"Time\";\n" +
- " String long_name \"Date of reference for Julian days\";\n" +
- " String time_origin \"01-JAN-1970 00:00:00\";\n" +
- " String units \"seconds since 1970-01-01T00:00:00Z\";\n" +
- " }\n" +
- " date_creation {\n" +
- " Float64 actual_range 1.397639727e+9, 1.467319628e+9;\n" +
- " String ioos_category \"Time\";\n" +
- " String long_name \"Date of file creation\";\n" +
- " String time_origin \"01-JAN-1970 00:00:00\";\n" +
- " String units \"seconds since 1970-01-01T00:00:00Z\";\n" +
- " }\n" +
- " date_update {\n" +
- " Float64 actual_range 1.49100283e+9, 1.491241644e+9;\n" +
- " String ioos_category \"Time\";\n" +
- " String long_name \"Date of update of this file\";\n" +
- " String time_origin \"01-JAN-1970 00:00:00\";\n" +
- " String units \"seconds since 1970-01-01T00:00:00Z\";\n" +
- " }\n" +
- " platform_number {\n" +
- " String cf_role \"trajectory_id\";\n" +
- " String conventions \"WMO float identifier : A9IIIII\";\n" +
- " String ioos_category \"Identifier\";\n" +
- " String long_name \"Float unique identifier\";\n" +
- " }\n" +
- " project_name {\n" +
- " String ioos_category \"Identifier\";\n" +
- " String long_name \"Name of the project\";\n" +
- " }\n" +
- " pi_name {\n" +
- " String ioos_category \"Identifier\";\n" +
- " String long_name \"Name of the principal investigator\";\n" +
- " }\n" +
- " cycle_number {\n" +
- " Int32 _FillValue 99999;\n" +
- " Int32 actual_range 1, 110;\n" +
- " String cf_role \"profile_id\";\n" +
- " Float64 colorBarMaximum 200.0;\n" +
- " Float64 colorBarMinimum 0.0;\n" +
- " String conventions \"0...N, 0 : launch cycle (if exists), 1 : first complete cycle\";\n" +
- " String ioos_category \"Statistics\";\n" +
- " String long_name \"Float cycle number\";\n" +
- " }\n" +
- " direction {\n" +
- // " String actual_range \"A\nA\";\n" +
- " Float64 colorBarMaximum 360.0;\n" +
- " Float64 colorBarMinimum 0.0;\n" +
- " String conventions \"A: ascending profiles, D: descending profiles\";\n" +
- " String ioos_category \"Currents\";\n" +
- " String long_name \"Direction of the station profiles\";\n" +
- " }\n" +
- " data_center {\n" +
- " String conventions \"Argo reference table 4\";\n" +
- " String ioos_category \"Unknown\";\n" +
- " String long_name \"Data centre in charge of float data processing\";\n" +
- " }\n" +
- " dc_reference {\n" +
- " String conventions \"Data centre convention\";\n" +
- " String ioos_category \"Identifier\";\n" +
- " String long_name \"Station unique identifier in data centre\";\n" +
- " }\n" +
- " data_state_indicator {\n" +
- " String conventions \"Argo reference table 6\";\n" +
- " String ioos_category \"Location\";\n" +
- " String long_name \"Degree of processing the data have passed through\";\n" +
- " }\n" +
- " data_mode {\n" +
- // " String actual_range \"A\nD\";\n" +
- " String conventions \"R : real time; D : delayed mode; A : real time with adjustment\";\n" +
- " String ioos_category \"Time\";\n" +
- " String long_name \"Delayed mode or real time data\";\n" +
- " }\n" +
- " platform_type {\n" +
- " String conventions \"Argo reference table 23\";\n" +
- " String ioos_category \"Unknown\";\n" +
- " String long_name \"Type of float\";\n" +
- " }\n" +
- " float_serial_no {\n" +
- " Float64 colorBarMaximum 100.0;\n" +
- " Float64 colorBarMinimum 0.0;\n" +
- " String ioos_category \"Statistics\";\n" +
- " String long_name \"Serial number of the float\";\n" +
- " }\n" +
- " firmware_version {\n" +
- " String ioos_category \"Unknown\";\n" +
- " String long_name \"Instrument firmware version\";\n" +
- " }\n" +
- " wmo_inst_type {\n" +
- " String conventions \"Argo reference table 8\";\n" +
- " String ioos_category \"Unknown\";\n" +
- " String long_name \"Coded instrument type\";\n" +
- " }\n" +
- " time {\n" +
- " String _CoordinateAxisType \"Time\";\n" +
- " Float64 actual_range 1.397033909e+9, 1.491210572e+9;\n" +
- " String axis \"T\";\n" +
- " String ioos_category \"Time\";\n" +
- " String long_name \"Julian day (UTC) of the station relative to REFERENCE_DATE_TIME\";\n" +
- " String standard_name \"time\";\n" +
- " String time_origin \"01-JAN-1970 00:00:00\";\n" +
- " String units \"seconds since 1970-01-01T00:00:00Z\";\n" +
- " }\n" +
- " time_qc {\n" +
- // " String actual_range \"1\n1\";\n" +
- " Float64 colorBarMaximum 150.0;\n" +
- " Float64 colorBarMinimum 0.0;\n" +
- " String conventions \"Argo reference table 2\";\n" +
- " String ioos_category \"Quality\";\n" +
- " String long_name \"Quality on date and time\";\n" +
- " }\n" +
- " time_location {\n" +
- " Float64 actual_range 1.397033909e+9, 1.491213835e+9;\n" +
- " String ioos_category \"Time\";\n" +
- " String long_name \"Julian day (UTC) of the location relative to REFERENCE_DATE_TIME\";\n" +
- " String standard_name \"time\";\n" +
- " String time_origin \"01-JAN-1970 00:00:00\";\n" +
- " String units \"seconds since 1970-01-01T00:00:00Z\";\n" +
- " }\n" +
- " latitude {\n" +
- " String _CoordinateAxisType \"Lat\";\n" +
- " Float64 _FillValue 99999.0;\n" +
- " Float64 actual_range 3.035, 42.68257;\n" +
- " String axis \"Y\";\n" +
- " Float64 colorBarMaximum 90.0;\n" +
- " Float64 colorBarMinimum -90.0;\n" +
- " String ioos_category \"Location\";\n" +
- " String long_name \"Latitude of the station, best estimate\";\n" +
- " String standard_name \"latitude\";\n" +
- " String units \"degrees_north\";\n" +
- " Float64 valid_max 90.0;\n" +
- " Float64 valid_min -90.0;\n" +
- " }\n" +
- " longitude {\n" +
- " String _CoordinateAxisType \"Lon\";\n" +
- " Float64 _FillValue 99999.0;\n" +
- " Float64 actual_range -27.099, 8.141141666666666;\n" +
- " String axis \"X\";\n" +
- " Float64 colorBarMaximum 180.0;\n" +
- " Float64 colorBarMinimum -180.0;\n" +
- " String ioos_category \"Location\";\n" +
- " String long_name \"Longitude of the station, best estimate\";\n" +
- " String standard_name \"longitude\";\n" +
- " String units \"degrees_east\";\n" +
- " Float64 valid_max 180.0;\n" +
- " Float64 valid_min -180.0;\n" +
- " }\n" +
- " position_qc {\n" +
- // " String actual_range \"1\n1\";\n" +
- " Float64 colorBarMaximum 150.0;\n" +
- " Float64 colorBarMinimum 0.0;\n" +
- " String conventions \"Argo reference table 2\";\n" +
- " String ioos_category \"Quality\";\n" +
- " String long_name \"Quality on position (latitude and longitude)\";\n" +
- " }\n" +
- " positioning_system {\n" +
- " String ioos_category \"Unknown\";\n" +
- " String long_name \"Positioning system\";\n" +
- " }\n" +
- " profile_pres_qc {\n" +
- // " String actual_range \"A\nF\";\n" +
- " Float64 colorBarMaximum 150.0;\n" +
- " Float64 colorBarMinimum 0.0;\n" +
- " String conventions \"Argo reference table 2a\";\n" +
- " String ioos_category \"Quality\";\n" +
- " String long_name \"Global quality flag of PRES profile\";\n" +
- " }\n" +
- " profile_temp_qc {\n" +
- // " String actual_range \"A\nF\";\n" +
- " Float64 colorBarMaximum 150.0;\n" +
- " Float64 colorBarMinimum 0.0;\n" +
- " String conventions \"Argo reference table 2a\";\n" +
- " String ioos_category \"Quality\";\n" +
- " String long_name \"Global quality flag of TEMP profile\";\n" +
- " }\n" +
- " profile_psal_qc {\n" +
- // " String actual_range \"A\nF\";\n" +
- " Float64 colorBarMaximum 150.0;\n" +
- " Float64 colorBarMinimum 0.0;\n" +
- " String conventions \"Argo reference table 2a\";\n" +
- " String ioos_category \"Quality\";\n" +
- " String long_name \"Global quality flag of PSAL profile\";\n" +
- " }\n" +
- " vertical_sampling_scheme {\n" +
- " String conventions \"Argo reference table 16\";\n" +
- " String ioos_category \"Unknown\";\n" +
- " String long_name \"Vertical sampling scheme\";\n" +
- " }\n" +
- " config_mission_number {\n" +
- " Int32 _FillValue 99999;\n" +
- " Int32 actual_range 1, 16;\n" +
- " Float64 colorBarMaximum 100.0;\n" +
- " Float64 colorBarMinimum 0.0;\n" +
- " String conventions \"1...N, 1 : first complete mission\";\n" +
- " String ioos_category \"Statistics\";\n" +
- " String long_name \"Unique number denoting the missions performed by the float\";\n" +
- " }\n" +
- " pres {\n" +
- " String _CoordinateAxisType \"Height\";\n" +
- " Float32 _FillValue 99999.0;\n" +
- " Float32 actual_range 2.1, 2000.9;\n" +
- " String axis \"Z\";\n" +
- " String C_format \"%7.1f\";\n" +
- " Float64 colorBarMaximum 5000.0;\n" +
- " Float64 colorBarMinimum 0.0;\n" +
- " String FORTRAN_format \"F7.1\";\n" +
- " String ioos_category \"Sea Level\";\n" +
- " String long_name \"Sea water pressure, equals 0 at sea-level\";\n" +
- " String standard_name \"sea_water_pressure\";\n" +
- " String units \"decibar\";\n" +
- " Float32 valid_max 12000.0;\n" +
- " Float32 valid_min 0.0;\n" +
- " }\n" +
- " pres_qc {\n" +
- // " String actual_range \"1\n4\";\n" +
- " Float64 colorBarMaximum 150.0;\n" +
- " Float64 colorBarMinimum 0.0;\n" +
- " String conventions \"Argo reference table 2\";\n" +
- " String ioos_category \"Quality\";\n" +
- " String long_name \"quality flag\";\n" +
- " }\n" +
- " pres_adjusted {\n" +
- " Float32 _FillValue 99999.0;\n" +
- " Float32 actual_range 3.6, 2000.8;\n" +
- " String axis \"Z\";\n" +
- " String C_format \"%7.1f\";\n" +
- " Float64 colorBarMaximum 5000.0;\n" +
- " Float64 colorBarMinimum 0.0;\n" +
- " String FORTRAN_format \"F7.1\";\n" +
- " String ioos_category \"Sea Level\";\n" +
- " String long_name \"Sea water pressure, equals 0 at sea-level\";\n" +
- " String standard_name \"sea_water_pressure\";\n" +
- " String units \"decibar\";\n" +
- " Float32 valid_max 12000.0;\n" +
- " Float32 valid_min 0.0;\n" +
- " }\n" +
- " pres_adjusted_qc {\n" +
- // " String actual_range \" \n4\";\n" +
- " Float64 colorBarMaximum 150.0;\n" +
- " Float64 colorBarMinimum 0.0;\n" +
- " String conventions \"Argo reference table 2\";\n" +
- " String ioos_category \"Quality\";\n" +
- " String long_name \"quality flag\";\n" +
- " }\n" +
- " pres_adjusted_error {\n" +
- " Float32 _FillValue 99999.0;\n" +
- " Float32 actual_range 2.4, 2.4;\n" +
- " String C_format \"%7.1f\";\n" +
- " Float64 colorBarMaximum 50.0;\n" +
- " Float64 colorBarMinimum 0.0;\n" +
- " String FORTRAN_format \"F7.1\";\n" +
- " String ioos_category \"Statistics\";\n" +
- " String long_name \"Contains the error on the adjusted values as determined by the delayed mode QC process\";\n"
- +
- " String units \"decibar\";\n" +
- " }\n" +
- " temp {\n" +
- " Float32 _FillValue 99999.0;\n" +
- " Float32 actual_range 3.466, 29.233;\n" +
- " String C_format \"%9.3f\";\n" +
- " Float64 colorBarMaximum 32.0;\n" +
- " Float64 colorBarMinimum 0.0;\n" +
- " String FORTRAN_format \"F9.3\";\n" +
- " String ioos_category \"Temperature\";\n" +
- " String long_name \"Sea temperature in-situ ITS-90 scale\";\n" +
- " String standard_name \"sea_water_temperature\";\n" +
- " String units \"degree_Celsius\";\n" +
- " Float32 valid_max 40.0;\n" +
- " Float32 valid_min -2.5;\n" +
- " }\n" +
- " temp_qc {\n" +
- // " String actual_range \"1\n4\";\n" +
- " Float64 colorBarMaximum 150.0;\n" +
- " Float64 colorBarMinimum 0.0;\n" +
- " String conventions \"Argo reference table 2\";\n" +
- " String ioos_category \"Quality\";\n" +
- " String long_name \"quality flag\";\n" +
- " }\n" +
- " temp_adjusted {\n" +
- " Float32 _FillValue 99999.0;\n" +
- " Float32 actual_range 3.466, 29.233;\n" +
- " String C_format \"%9.3f\";\n" +
- " Float64 colorBarMaximum 32.0;\n" +
- " Float64 colorBarMinimum 0.0;\n" +
- " String FORTRAN_format \"F9.3\";\n" +
- " String ioos_category \"Temperature\";\n" +
- " String long_name \"Sea temperature in-situ ITS-90 scale\";\n" +
- " String standard_name \"sea_water_temperature\";\n" +
- " String units \"degree_Celsius\";\n" +
- " Float32 valid_max 40.0;\n" +
- " Float32 valid_min -2.5;\n" +
- " }\n" +
- " temp_adjusted_qc {\n" +
- // " String actual_range \" \n4\";\n" +
- " Float64 colorBarMaximum 150.0;\n" +
- " Float64 colorBarMinimum 0.0;\n" +
- " String conventions \"Argo reference table 2\";\n" +
- " String ioos_category \"Quality\";\n" +
- " String long_name \"quality flag\";\n" +
- " }\n" +
- " temp_adjusted_error {\n" +
- " Float32 _FillValue 99999.0;\n" +
- " Float32 actual_range 0.002, 0.002;\n" +
- " String C_format \"%9.3f\";\n" +
- " Float64 colorBarMaximum 1.0;\n" +
- " Float64 colorBarMinimum 0.0;\n" +
- " String FORTRAN_format \"F9.3\";\n" +
- " String ioos_category \"Statistics\";\n" +
- " String long_name \"Contains the error on the adjusted values as determined by the delayed mode QC process\";\n"
- +
- " String units \"degree_Celsius\";\n" +
- " }\n" +
- " psal {\n" +
- " Float32 _FillValue 99999.0;\n" +
- " Float32 actual_range 34.161, 38.693;\n" +
- " String C_format \"%9.3f\";\n" +
- " Float64 colorBarMaximum 37.0;\n" +
- " Float64 colorBarMinimum 32.0;\n" +
- " String FORTRAN_format \"F9.3\";\n" +
- " String ioos_category \"Salinity\";\n" +
- " String long_name \"Practical salinity\";\n" +
- " String standard_name \"sea_water_practical_salinity\";\n" +
- " String units \"PSU\";\n" +
- " Float32 valid_max 41.0;\n" +
- " Float32 valid_min 2.0;\n" +
- " }\n" +
- " psal_qc {\n" +
- // " String actual_range \"1\n4\";\n" +
- " Float64 colorBarMaximum 150.0;\n" +
- " Float64 colorBarMinimum 0.0;\n" +
- " String conventions \"Argo reference table 2\";\n" +
- " String ioos_category \"Quality\";\n" +
- " String long_name \"quality flag\";\n" +
- " }\n" +
- " psal_adjusted {\n" +
- " Float32 _FillValue 99999.0;\n" +
- " Float32 actual_range 34.1611, 36.4901;\n" +
- " String C_format \"%9.3f\";\n" +
- " Float64 colorBarMaximum 37.0;\n" +
- " Float64 colorBarMinimum 32.0;\n" +
- " String FORTRAN_format \"F9.3\";\n" +
- " String ioos_category \"Salinity\";\n" +
- " String long_name \"Practical salinity\";\n" +
- " String standard_name \"sea_water_practical_salinity\";\n" +
- " String units \"PSU\";\n" +
- " Float32 valid_max 41.0;\n" +
- " Float32 valid_min 2.0;\n" +
- " }\n" +
- " psal_adjusted_qc {\n" +
- // " String actual_range \" \n4\";\n" +
- " Float64 colorBarMaximum 150.0;\n" +
- " Float64 colorBarMinimum 0.0;\n" +
- " String conventions \"Argo reference table 2\";\n" +
- " String ioos_category \"Quality\";\n" +
- " String long_name \"quality flag\";\n" +
- " }\n" +
- " psal_adjusted_error {\n" +
- " Float32 _FillValue 99999.0;\n" +
- " Float32 actual_range 0.01, 0.01;\n" +
- " String C_format \"%9.3f\";\n" +
- " Float64 colorBarMaximum 1.0;\n" +
- " Float64 colorBarMinimum 0.0;\n" +
- " String FORTRAN_format \"F9.3\";\n" +
- " String ioos_category \"Statistics\";\n" +
- " String long_name \"Contains the error on the adjusted values as determined by the delayed mode QC process\";\n"
- +
- " String units \"psu\";\n" +
- " }\n" +
- " }\n" +
- " NC_GLOBAL {\n" +
- " String cdm_altitude_proxy \"pres\";\n" +
- " String cdm_data_type \"TrajectoryProfile\";\n" +
- " String cdm_profile_variables \"cycle_number, data_type, format_version, handbook_version, reference_date_time, date_creation, date_update, direction, data_center, dc_reference, data_state_indicator, data_mode, firmware_version, wmo_inst_type, time, time_qc, time_location, latitude, longitude, position_qc, positioning_system, profile_pres_qc, profile_temp_qc, profile_psal_qc, vertical_sampling_scheme\";\n"
- +
- " String cdm_trajectory_variables \"platform_number, project_name, pi_name, platform_type, float_serial_no\";\n"
- +
- " String Conventions \"Argo-3.1, CF-1.6, COARDS, ACDD-1.3\";\n" +
- " String creator_email \"support@argo.net\";\n" +
- " String creator_name \"Argo\";\n" +
- " String creator_url \"http://www.argo.net/\";\n" +
- " Float64 Easternmost_Easting 8.141141666666666;\n" +
- " String featureType \"TrajectoryProfile\";\n" +
- " Float64 geospatial_lat_max 42.68257;\n" +
- " Float64 geospatial_lat_min 3.035;\n" +
- " String geospatial_lat_units \"degrees_north\";\n" +
- " Float64 geospatial_lon_max 8.141141666666666;\n" +
- " Float64 geospatial_lon_min -27.099;\n" +
- " String geospatial_lon_units \"degrees_east\";\n" +
- " String history \"";
+ expected =
+ "Attributes {\n"
+ + " s {\n"
+ + " fileNumber {\n"
+ + " String ioos_category \"Identifier\";\n"
+ + " String long_name \"File Number\";\n"
+ + " }\n"
+ + " data_type {\n"
+ + " String conventions \"Argo reference table 1\";\n"
+ + " String ioos_category \"Unknown\";\n"
+ + " String long_name \"Data type\";\n"
+ + " }\n"
+ + " format_version {\n"
+ + " String ioos_category \"Unknown\";\n"
+ + " String long_name \"File format version\";\n"
+ + " }\n"
+ + " handbook_version {\n"
+ + " String ioos_category \"Unknown\";\n"
+ + " String long_name \"Data handbook version\";\n"
+ + " }\n"
+ + " reference_date_time {\n"
+ + " Float64 actual_range -6.31152e+8, -6.31152e+8;\n"
+ + " String ioos_category \"Time\";\n"
+ + " String long_name \"Date of reference for Julian days\";\n"
+ + " String time_origin \"01-JAN-1970 00:00:00\";\n"
+ + " String units \"seconds since 1970-01-01T00:00:00Z\";\n"
+ + " }\n"
+ + " date_creation {\n"
+ + " Float64 actual_range 1.397639727e+9, 1.467319628e+9;\n"
+ + " String ioos_category \"Time\";\n"
+ + " String long_name \"Date of file creation\";\n"
+ + " String time_origin \"01-JAN-1970 00:00:00\";\n"
+ + " String units \"seconds since 1970-01-01T00:00:00Z\";\n"
+ + " }\n"
+ + " date_update {\n"
+ + " Float64 actual_range 1.49100283e+9, 1.491241644e+9;\n"
+ + " String ioos_category \"Time\";\n"
+ + " String long_name \"Date of update of this file\";\n"
+ + " String time_origin \"01-JAN-1970 00:00:00\";\n"
+ + " String units \"seconds since 1970-01-01T00:00:00Z\";\n"
+ + " }\n"
+ + " platform_number {\n"
+ + " String cf_role \"trajectory_id\";\n"
+ + " String conventions \"WMO float identifier : A9IIIII\";\n"
+ + " String ioos_category \"Identifier\";\n"
+ + " String long_name \"Float unique identifier\";\n"
+ + " }\n"
+ + " project_name {\n"
+ + " String ioos_category \"Identifier\";\n"
+ + " String long_name \"Name of the project\";\n"
+ + " }\n"
+ + " pi_name {\n"
+ + " String ioos_category \"Identifier\";\n"
+ + " String long_name \"Name of the principal investigator\";\n"
+ + " }\n"
+ + " cycle_number {\n"
+ + " Int32 _FillValue 99999;\n"
+ + " Int32 actual_range 1, 110;\n"
+ + " String cf_role \"profile_id\";\n"
+ + " Float64 colorBarMaximum 200.0;\n"
+ + " Float64 colorBarMinimum 0.0;\n"
+ + " String conventions \"0...N, 0 : launch cycle (if exists), 1 : first complete cycle\";\n"
+ + " String ioos_category \"Statistics\";\n"
+ + " String long_name \"Float cycle number\";\n"
+ + " }\n"
+ + " direction {\n"
+ +
+ // " String actual_range \"A\nA\";\n" +
+ " Float64 colorBarMaximum 360.0;\n"
+ + " Float64 colorBarMinimum 0.0;\n"
+ + " String conventions \"A: ascending profiles, D: descending profiles\";\n"
+ + " String ioos_category \"Currents\";\n"
+ + " String long_name \"Direction of the station profiles\";\n"
+ + " }\n"
+ + " data_center {\n"
+ + " String conventions \"Argo reference table 4\";\n"
+ + " String ioos_category \"Unknown\";\n"
+ + " String long_name \"Data centre in charge of float data processing\";\n"
+ + " }\n"
+ + " dc_reference {\n"
+ + " String conventions \"Data centre convention\";\n"
+ + " String ioos_category \"Identifier\";\n"
+ + " String long_name \"Station unique identifier in data centre\";\n"
+ + " }\n"
+ + " data_state_indicator {\n"
+ + " String conventions \"Argo reference table 6\";\n"
+ + " String ioos_category \"Location\";\n"
+ + " String long_name \"Degree of processing the data have passed through\";\n"
+ + " }\n"
+ + " data_mode {\n"
+ +
+ // " String actual_range \"A\nD\";\n" +
+ " String conventions \"R : real time; D : delayed mode; A : real time with adjustment\";\n"
+ + " String ioos_category \"Time\";\n"
+ + " String long_name \"Delayed mode or real time data\";\n"
+ + " }\n"
+ + " platform_type {\n"
+ + " String conventions \"Argo reference table 23\";\n"
+ + " String ioos_category \"Unknown\";\n"
+ + " String long_name \"Type of float\";\n"
+ + " }\n"
+ + " float_serial_no {\n"
+ + " Float64 colorBarMaximum 100.0;\n"
+ + " Float64 colorBarMinimum 0.0;\n"
+ + " String ioos_category \"Statistics\";\n"
+ + " String long_name \"Serial number of the float\";\n"
+ + " }\n"
+ + " firmware_version {\n"
+ + " String ioos_category \"Unknown\";\n"
+ + " String long_name \"Instrument firmware version\";\n"
+ + " }\n"
+ + " wmo_inst_type {\n"
+ + " String conventions \"Argo reference table 8\";\n"
+ + " String ioos_category \"Unknown\";\n"
+ + " String long_name \"Coded instrument type\";\n"
+ + " }\n"
+ + " time {\n"
+ + " String _CoordinateAxisType \"Time\";\n"
+ + " Float64 actual_range 1.397033909e+9, 1.491210572e+9;\n"
+ + " String axis \"T\";\n"
+ + " String ioos_category \"Time\";\n"
+ + " String long_name \"Julian day (UTC) of the station relative to REFERENCE_DATE_TIME\";\n"
+ + " String standard_name \"time\";\n"
+ + " String time_origin \"01-JAN-1970 00:00:00\";\n"
+ + " String units \"seconds since 1970-01-01T00:00:00Z\";\n"
+ + " }\n"
+ + " time_qc {\n"
+ +
+ // " String actual_range \"1\n1\";\n" +
+ " Float64 colorBarMaximum 150.0;\n"
+ + " Float64 colorBarMinimum 0.0;\n"
+ + " String conventions \"Argo reference table 2\";\n"
+ + " String ioos_category \"Quality\";\n"
+ + " String long_name \"Quality on date and time\";\n"
+ + " }\n"
+ + " time_location {\n"
+ + " Float64 actual_range 1.397033909e+9, 1.491213835e+9;\n"
+ + " String ioos_category \"Time\";\n"
+ + " String long_name \"Julian day (UTC) of the location relative to REFERENCE_DATE_TIME\";\n"
+ + " String standard_name \"time\";\n"
+ + " String time_origin \"01-JAN-1970 00:00:00\";\n"
+ + " String units \"seconds since 1970-01-01T00:00:00Z\";\n"
+ + " }\n"
+ + " latitude {\n"
+ + " String _CoordinateAxisType \"Lat\";\n"
+ + " Float64 _FillValue 99999.0;\n"
+ + " Float64 actual_range 3.035, 42.68257;\n"
+ + " String axis \"Y\";\n"
+ + " Float64 colorBarMaximum 90.0;\n"
+ + " Float64 colorBarMinimum -90.0;\n"
+ + " String ioos_category \"Location\";\n"
+ + " String long_name \"Latitude of the station, best estimate\";\n"
+ + " String standard_name \"latitude\";\n"
+ + " String units \"degrees_north\";\n"
+ + " Float64 valid_max 90.0;\n"
+ + " Float64 valid_min -90.0;\n"
+ + " }\n"
+ + " longitude {\n"
+ + " String _CoordinateAxisType \"Lon\";\n"
+ + " Float64 _FillValue 99999.0;\n"
+ + " Float64 actual_range -27.099, 8.141141666666666;\n"
+ + " String axis \"X\";\n"
+ + " Float64 colorBarMaximum 180.0;\n"
+ + " Float64 colorBarMinimum -180.0;\n"
+ + " String ioos_category \"Location\";\n"
+ + " String long_name \"Longitude of the station, best estimate\";\n"
+ + " String standard_name \"longitude\";\n"
+ + " String units \"degrees_east\";\n"
+ + " Float64 valid_max 180.0;\n"
+ + " Float64 valid_min -180.0;\n"
+ + " }\n"
+ + " position_qc {\n"
+ +
+ // " String actual_range \"1\n1\";\n" +
+ " Float64 colorBarMaximum 150.0;\n"
+ + " Float64 colorBarMinimum 0.0;\n"
+ + " String conventions \"Argo reference table 2\";\n"
+ + " String ioos_category \"Quality\";\n"
+ + " String long_name \"Quality on position (latitude and longitude)\";\n"
+ + " }\n"
+ + " positioning_system {\n"
+ + " String ioos_category \"Unknown\";\n"
+ + " String long_name \"Positioning system\";\n"
+ + " }\n"
+ + " profile_pres_qc {\n"
+ +
+ // " String actual_range \"A\nF\";\n" +
+ " Float64 colorBarMaximum 150.0;\n"
+ + " Float64 colorBarMinimum 0.0;\n"
+ + " String conventions \"Argo reference table 2a\";\n"
+ + " String ioos_category \"Quality\";\n"
+ + " String long_name \"Global quality flag of PRES profile\";\n"
+ + " }\n"
+ + " profile_temp_qc {\n"
+ +
+ // " String actual_range \"A\nF\";\n" +
+ " Float64 colorBarMaximum 150.0;\n"
+ + " Float64 colorBarMinimum 0.0;\n"
+ + " String conventions \"Argo reference table 2a\";\n"
+ + " String ioos_category \"Quality\";\n"
+ + " String long_name \"Global quality flag of TEMP profile\";\n"
+ + " }\n"
+ + " profile_psal_qc {\n"
+ +
+ // " String actual_range \"A\nF\";\n" +
+ " Float64 colorBarMaximum 150.0;\n"
+ + " Float64 colorBarMinimum 0.0;\n"
+ + " String conventions \"Argo reference table 2a\";\n"
+ + " String ioos_category \"Quality\";\n"
+ + " String long_name \"Global quality flag of PSAL profile\";\n"
+ + " }\n"
+ + " vertical_sampling_scheme {\n"
+ + " String conventions \"Argo reference table 16\";\n"
+ + " String ioos_category \"Unknown\";\n"
+ + " String long_name \"Vertical sampling scheme\";\n"
+ + " }\n"
+ + " config_mission_number {\n"
+ + " Int32 _FillValue 99999;\n"
+ + " Int32 actual_range 1, 16;\n"
+ + " Float64 colorBarMaximum 100.0;\n"
+ + " Float64 colorBarMinimum 0.0;\n"
+ + " String conventions \"1...N, 1 : first complete mission\";\n"
+ + " String ioos_category \"Statistics\";\n"
+ + " String long_name \"Unique number denoting the missions performed by the float\";\n"
+ + " }\n"
+ + " pres {\n"
+ + " String _CoordinateAxisType \"Height\";\n"
+ + " Float32 _FillValue 99999.0;\n"
+ + " Float32 actual_range 2.1, 2000.9;\n"
+ + " String axis \"Z\";\n"
+ + " String C_format \"%7.1f\";\n"
+ + " Float64 colorBarMaximum 5000.0;\n"
+ + " Float64 colorBarMinimum 0.0;\n"
+ + " String FORTRAN_format \"F7.1\";\n"
+ + " String ioos_category \"Sea Level\";\n"
+ + " String long_name \"Sea water pressure, equals 0 at sea-level\";\n"
+ + " String standard_name \"sea_water_pressure\";\n"
+ + " String units \"decibar\";\n"
+ + " Float32 valid_max 12000.0;\n"
+ + " Float32 valid_min 0.0;\n"
+ + " }\n"
+ + " pres_qc {\n"
+ +
+ // " String actual_range \"1\n4\";\n" +
+ " Float64 colorBarMaximum 150.0;\n"
+ + " Float64 colorBarMinimum 0.0;\n"
+ + " String conventions \"Argo reference table 2\";\n"
+ + " String ioos_category \"Quality\";\n"
+ + " String long_name \"quality flag\";\n"
+ + " }\n"
+ + " pres_adjusted {\n"
+ + " Float32 _FillValue 99999.0;\n"
+ + " Float32 actual_range 3.6, 2000.8;\n"
+ + " String axis \"Z\";\n"
+ + " String C_format \"%7.1f\";\n"
+ + " Float64 colorBarMaximum 5000.0;\n"
+ + " Float64 colorBarMinimum 0.0;\n"
+ + " String FORTRAN_format \"F7.1\";\n"
+ + " String ioos_category \"Sea Level\";\n"
+ + " String long_name \"Sea water pressure, equals 0 at sea-level\";\n"
+ + " String standard_name \"sea_water_pressure\";\n"
+ + " String units \"decibar\";\n"
+ + " Float32 valid_max 12000.0;\n"
+ + " Float32 valid_min 0.0;\n"
+ + " }\n"
+ + " pres_adjusted_qc {\n"
+ +
+ // " String actual_range \" \n4\";\n" +
+ " Float64 colorBarMaximum 150.0;\n"
+ + " Float64 colorBarMinimum 0.0;\n"
+ + " String conventions \"Argo reference table 2\";\n"
+ + " String ioos_category \"Quality\";\n"
+ + " String long_name \"quality flag\";\n"
+ + " }\n"
+ + " pres_adjusted_error {\n"
+ + " Float32 _FillValue 99999.0;\n"
+ + " Float32 actual_range 2.4, 2.4;\n"
+ + " String C_format \"%7.1f\";\n"
+ + " Float64 colorBarMaximum 50.0;\n"
+ + " Float64 colorBarMinimum 0.0;\n"
+ + " String FORTRAN_format \"F7.1\";\n"
+ + " String ioos_category \"Statistics\";\n"
+ + " String long_name \"Contains the error on the adjusted values as determined by the delayed mode QC process\";\n"
+ + " String units \"decibar\";\n"
+ + " }\n"
+ + " temp {\n"
+ + " Float32 _FillValue 99999.0;\n"
+ + " Float32 actual_range 3.466, 29.233;\n"
+ + " String C_format \"%9.3f\";\n"
+ + " Float64 colorBarMaximum 32.0;\n"
+ + " Float64 colorBarMinimum 0.0;\n"
+ + " String FORTRAN_format \"F9.3\";\n"
+ + " String ioos_category \"Temperature\";\n"
+ + " String long_name \"Sea temperature in-situ ITS-90 scale\";\n"
+ + " String standard_name \"sea_water_temperature\";\n"
+ + " String units \"degree_Celsius\";\n"
+ + " Float32 valid_max 40.0;\n"
+ + " Float32 valid_min -2.5;\n"
+ + " }\n"
+ + " temp_qc {\n"
+ +
+ // " String actual_range \"1\n4\";\n" +
+ " Float64 colorBarMaximum 150.0;\n"
+ + " Float64 colorBarMinimum 0.0;\n"
+ + " String conventions \"Argo reference table 2\";\n"
+ + " String ioos_category \"Quality\";\n"
+ + " String long_name \"quality flag\";\n"
+ + " }\n"
+ + " temp_adjusted {\n"
+ + " Float32 _FillValue 99999.0;\n"
+ + " Float32 actual_range 3.466, 29.233;\n"
+ + " String C_format \"%9.3f\";\n"
+ + " Float64 colorBarMaximum 32.0;\n"
+ + " Float64 colorBarMinimum 0.0;\n"
+ + " String FORTRAN_format \"F9.3\";\n"
+ + " String ioos_category \"Temperature\";\n"
+ + " String long_name \"Sea temperature in-situ ITS-90 scale\";\n"
+ + " String standard_name \"sea_water_temperature\";\n"
+ + " String units \"degree_Celsius\";\n"
+ + " Float32 valid_max 40.0;\n"
+ + " Float32 valid_min -2.5;\n"
+ + " }\n"
+ + " temp_adjusted_qc {\n"
+ +
+ // " String actual_range \" \n4\";\n" +
+ " Float64 colorBarMaximum 150.0;\n"
+ + " Float64 colorBarMinimum 0.0;\n"
+ + " String conventions \"Argo reference table 2\";\n"
+ + " String ioos_category \"Quality\";\n"
+ + " String long_name \"quality flag\";\n"
+ + " }\n"
+ + " temp_adjusted_error {\n"
+ + " Float32 _FillValue 99999.0;\n"
+ + " Float32 actual_range 0.002, 0.002;\n"
+ + " String C_format \"%9.3f\";\n"
+ + " Float64 colorBarMaximum 1.0;\n"
+ + " Float64 colorBarMinimum 0.0;\n"
+ + " String FORTRAN_format \"F9.3\";\n"
+ + " String ioos_category \"Statistics\";\n"
+ + " String long_name \"Contains the error on the adjusted values as determined by the delayed mode QC process\";\n"
+ + " String units \"degree_Celsius\";\n"
+ + " }\n"
+ + " psal {\n"
+ + " Float32 _FillValue 99999.0;\n"
+ + " Float32 actual_range 34.161, 38.693;\n"
+ + " String C_format \"%9.3f\";\n"
+ + " Float64 colorBarMaximum 37.0;\n"
+ + " Float64 colorBarMinimum 32.0;\n"
+ + " String FORTRAN_format \"F9.3\";\n"
+ + " String ioos_category \"Salinity\";\n"
+ + " String long_name \"Practical salinity\";\n"
+ + " String standard_name \"sea_water_practical_salinity\";\n"
+ + " String units \"PSU\";\n"
+ + " Float32 valid_max 41.0;\n"
+ + " Float32 valid_min 2.0;\n"
+ + " }\n"
+ + " psal_qc {\n"
+ +
+ // " String actual_range \"1\n4\";\n" +
+ " Float64 colorBarMaximum 150.0;\n"
+ + " Float64 colorBarMinimum 0.0;\n"
+ + " String conventions \"Argo reference table 2\";\n"
+ + " String ioos_category \"Quality\";\n"
+ + " String long_name \"quality flag\";\n"
+ + " }\n"
+ + " psal_adjusted {\n"
+ + " Float32 _FillValue 99999.0;\n"
+ + " Float32 actual_range 34.1611, 36.4901;\n"
+ + " String C_format \"%9.3f\";\n"
+ + " Float64 colorBarMaximum 37.0;\n"
+ + " Float64 colorBarMinimum 32.0;\n"
+ + " String FORTRAN_format \"F9.3\";\n"
+ + " String ioos_category \"Salinity\";\n"
+ + " String long_name \"Practical salinity\";\n"
+ + " String standard_name \"sea_water_practical_salinity\";\n"
+ + " String units \"PSU\";\n"
+ + " Float32 valid_max 41.0;\n"
+ + " Float32 valid_min 2.0;\n"
+ + " }\n"
+ + " psal_adjusted_qc {\n"
+ +
+ // " String actual_range \" \n4\";\n" +
+ " Float64 colorBarMaximum 150.0;\n"
+ + " Float64 colorBarMinimum 0.0;\n"
+ + " String conventions \"Argo reference table 2\";\n"
+ + " String ioos_category \"Quality\";\n"
+ + " String long_name \"quality flag\";\n"
+ + " }\n"
+ + " psal_adjusted_error {\n"
+ + " Float32 _FillValue 99999.0;\n"
+ + " Float32 actual_range 0.01, 0.01;\n"
+ + " String C_format \"%9.3f\";\n"
+ + " Float64 colorBarMaximum 1.0;\n"
+ + " Float64 colorBarMinimum 0.0;\n"
+ + " String FORTRAN_format \"F9.3\";\n"
+ + " String ioos_category \"Statistics\";\n"
+ + " String long_name \"Contains the error on the adjusted values as determined by the delayed mode QC process\";\n"
+ + " String units \"psu\";\n"
+ + " }\n"
+ + " }\n"
+ + " NC_GLOBAL {\n"
+ + " String cdm_altitude_proxy \"pres\";\n"
+ + " String cdm_data_type \"TrajectoryProfile\";\n"
+ + " String cdm_profile_variables \"cycle_number, data_type, format_version, handbook_version, reference_date_time, date_creation, date_update, direction, data_center, dc_reference, data_state_indicator, data_mode, firmware_version, wmo_inst_type, time, time_qc, time_location, latitude, longitude, position_qc, positioning_system, profile_pres_qc, profile_temp_qc, profile_psal_qc, vertical_sampling_scheme\";\n"
+ + " String cdm_trajectory_variables \"platform_number, project_name, pi_name, platform_type, float_serial_no\";\n"
+ + " String Conventions \"Argo-3.1, CF-1.6, COARDS, ACDD-1.3\";\n"
+ + " String creator_email \"support@argo.net\";\n"
+ + " String creator_name \"Argo\";\n"
+ + " String creator_url \"http://www.argo.net/\";\n"
+ + " Float64 Easternmost_Easting 8.141141666666666;\n"
+ + " String featureType \"TrajectoryProfile\";\n"
+ + " Float64 geospatial_lat_max 42.68257;\n"
+ + " Float64 geospatial_lat_min 3.035;\n"
+ + " String geospatial_lat_units \"degrees_north\";\n"
+ + " Float64 geospatial_lon_max 8.141141666666666;\n"
+ + " Float64 geospatial_lon_min -27.099;\n"
+ + " String geospatial_lon_units \"degrees_east\";\n"
+ + " String history \"";
tResults = results.substring(0, Math.min(results.length(), expected.length()));
Test.ensureEqual(tResults, expected, "\nresults=\n" + results);
// 2016-05-09T15:34:11Z (local files)
// 2016-05-09T15:34:11Z
// http://localhost:8080/cwexperimental/tabledap/testMultidimNc.das\";
- expected = "String infoUrl \"http://www.argo.net/\";\n" +
- " String institution \"Argo\";\n" +
- " String keywords \"adjusted, argo, array, assembly, best, centre, centres, charge, coded, CONFIG_MISSION_NUMBER, contains, coriolis, creation, currents, cycle, CYCLE_NUMBER, data, DATA_CENTRE, DATA_MODE, DATA_STATE_INDICATOR, DATA_TYPE, date, DATE_CREATION, DATE_UPDATE, day, days, DC_REFERENCE, degree, delayed, denoting, density, determined, direction, Earth Science > Oceans > Ocean Pressure > Water Pressure, Earth Science > Oceans > Ocean Temperature > Water Temperature, Earth Science > Oceans > Salinity/Density > Salinity, equals, error, estimate, file, firmware, FIRMWARE_VERSION, flag, float, FLOAT_SERIAL_NO, format, FORMAT_VERSION, gdac, geostrophic, global, handbook, HANDBOOK_VERSION, have, identifier, in-situ, instrument, investigator, its, its-90, JULD, JULD_LOCATION, JULD_QC, julian, latitude, level, longitude, missions, mode, name, number, ocean, oceanography, oceans, passed, performed, PI_NAME, PLATFORM_NUMBER, PLATFORM_TYPE, position, POSITION_QC, positioning, POSITIONING_SYSTEM, practical, pres, PRES_ADJUSTED, PRES_ADJUSTED_ERROR, PRES_ADJUSTED_QC, PRES_QC, pressure, principal, process, processing, profile, PROFILE_PRES_QC, PROFILE_PSAL_QC, PROFILE_TEMP_QC, profiles, project, PROJECT_NAME, psal, PSAL_ADJUSTED, PSAL_ADJUSTED_ERROR, PSAL_ADJUSTED_QC, PSAL_QC, quality, rdac, real, real time, real-time, realtime, reference, REFERENCE_DATE_TIME, regional, relative, salinity, sampling, scale, scheme, sea, sea level, sea-level, sea_water_practical_salinity, sea_water_pressure, sea_water_temperature, seawater, serial, situ, station, statistics, system, TEMP, TEMP_ADJUSTED, TEMP_ADJUSTED_ERROR, TEMP_ADJUSTED_QC, TEMP_QC, temperature, through, time, type, unique, update, values, version, vertical, VERTICAL_SAMPLING_SCHEME, water, WMO_INST_TYPE\";\n"
- +
- " String keywords_vocabulary \"GCMD Science Keywords\";\n" +
- " String license \"The data may be used and redistributed for free but is not intended\n" +
- "for legal use, since it may contain inaccuracies. Neither the data\n" +
- "Contributor, ERD, NOAA, nor the United States Government, nor any\n" +
- "of their employees or contractors, makes any warranty, express or\n" +
- "implied, including warranties of merchantability and fitness for a\n" +
- "particular purpose, or assumes any legal liability for the accuracy,\n" +
- "completeness, or usefulness, of this information.\";\n" +
- " Float64 Northernmost_Northing 42.68257;\n" +
- " String references \"http://www.argodatamgt.org/Documentation\";\n" +
- " String source \"Argo float\";\n" +
- " String sourceUrl \"(local files)\";\n" +
- " Float64 Southernmost_Northing 3.035;\n" +
- " String standard_name_vocabulary \"CF Standard Name Table v70\";\n" +
- // " String subsetVariables \"platform_number, project_name, pi_name,
- // platform_type, float_serial_no, cycle_number, data_type, format_version,
- // handbook_version, reference_date_time, date_creation, date_update, direction,
- // data_center, dc_reference, data_state_indicator, data_mode, firmware_version,
- // wmo_inst_type, time, time_qc, time_location, latitude, longitude,
- // position_qc, positioning_system, profile_pres_qc, profile_temp_qc,
- // profile_psal_qc, vertical_sampling_scheme\";\n" +
- " String summary \"Argo float vertical profiles from Coriolis Global Data Assembly Centres\n" +
- "(GDAC). Argo is an international collaboration that collects high-quality\n" +
- "temperature and salinity profiles from the upper 2000m of the ice-free\n" +
- "global ocean and currents from intermediate depths. The data come from\n" +
- "battery-powered autonomous floats that spend most of their life drifting\n" +
- "at depth where they are stabilised by being neutrally buoyant at the\n" +
- "\\\"parking depth\\\" pressure by having a density equal to the ambient pressure\n" +
- "and a compressibility that is less than that of sea water. At present there\n" +
- "are several models of profiling float used in Argo. All work in a similar\n" +
- "fashion but differ somewhat in their design characteristics. At typically\n" +
- "10-day intervals, the floats pump fluid into an external bladder and rise\n" +
- "to the surface over about 6 hours while measuring temperature and salinity.\n" +
- "Satellites or GPS determine the position of the floats when they surface,\n" +
- "and the floats transmit their data to the satellites. The bladder then\n" +
- "deflates and the float returns to its original density and sinks to drift\n" +
- "until the cycle is repeated. Floats are designed to make about 150 such\n" +
- "cycles.\n" +
- "Data Management URL: http://www.argodatamgt.org/Documentation\";\n" +
- " String time_coverage_end \"2017-04-03T09:09:32Z\";\n" +
- " String time_coverage_start \"2014-04-09T08:58:29Z\";\n" +
- " String title \"Argo Float Vertical Profiles\";\n" +
- " String user_manual_version \"3.1\";\n" +
- " Float64 Westernmost_Easting -27.099;\n" +
- " }\n" +
- "}\n";
+ expected =
+ "String infoUrl \"http://www.argo.net/\";\n"
+ + " String institution \"Argo\";\n"
+ + " String keywords \"adjusted, argo, array, assembly, best, centre, centres, charge, coded, CONFIG_MISSION_NUMBER, contains, coriolis, creation, currents, cycle, CYCLE_NUMBER, data, DATA_CENTRE, DATA_MODE, DATA_STATE_INDICATOR, DATA_TYPE, date, DATE_CREATION, DATE_UPDATE, day, days, DC_REFERENCE, degree, delayed, denoting, density, determined, direction, Earth Science > Oceans > Ocean Pressure > Water Pressure, Earth Science > Oceans > Ocean Temperature > Water Temperature, Earth Science > Oceans > Salinity/Density > Salinity, equals, error, estimate, file, firmware, FIRMWARE_VERSION, flag, float, FLOAT_SERIAL_NO, format, FORMAT_VERSION, gdac, geostrophic, global, handbook, HANDBOOK_VERSION, have, identifier, in-situ, instrument, investigator, its, its-90, JULD, JULD_LOCATION, JULD_QC, julian, latitude, level, longitude, missions, mode, name, number, ocean, oceanography, oceans, passed, performed, PI_NAME, PLATFORM_NUMBER, PLATFORM_TYPE, position, POSITION_QC, positioning, POSITIONING_SYSTEM, practical, pres, PRES_ADJUSTED, PRES_ADJUSTED_ERROR, PRES_ADJUSTED_QC, PRES_QC, pressure, principal, process, processing, profile, PROFILE_PRES_QC, PROFILE_PSAL_QC, PROFILE_TEMP_QC, profiles, project, PROJECT_NAME, psal, PSAL_ADJUSTED, PSAL_ADJUSTED_ERROR, PSAL_ADJUSTED_QC, PSAL_QC, quality, rdac, real, real time, real-time, realtime, reference, REFERENCE_DATE_TIME, regional, relative, salinity, sampling, scale, scheme, sea, sea level, sea-level, sea_water_practical_salinity, sea_water_pressure, sea_water_temperature, seawater, serial, situ, station, statistics, system, TEMP, TEMP_ADJUSTED, TEMP_ADJUSTED_ERROR, TEMP_ADJUSTED_QC, TEMP_QC, temperature, through, time, type, unique, update, values, version, vertical, VERTICAL_SAMPLING_SCHEME, water, WMO_INST_TYPE\";\n"
+ + " String keywords_vocabulary \"GCMD Science Keywords\";\n"
+ + " String license \"The data may be used and redistributed for free but is not intended\n"
+ + "for legal use, since it may contain inaccuracies. Neither the data\n"
+ + "Contributor, ERD, NOAA, nor the United States Government, nor any\n"
+ + "of their employees or contractors, makes any warranty, express or\n"
+ + "implied, including warranties of merchantability and fitness for a\n"
+ + "particular purpose, or assumes any legal liability for the accuracy,\n"
+ + "completeness, or usefulness, of this information.\";\n"
+ + " Float64 Northernmost_Northing 42.68257;\n"
+ + " String references \"http://www.argodatamgt.org/Documentation\";\n"
+ + " String source \"Argo float\";\n"
+ + " String sourceUrl \"(local files)\";\n"
+ + " Float64 Southernmost_Northing 3.035;\n"
+ + " String standard_name_vocabulary \"CF Standard Name Table v70\";\n"
+ +
+ // " String subsetVariables \"platform_number, project_name, pi_name,
+ // platform_type, float_serial_no, cycle_number, data_type, format_version,
+ // handbook_version, reference_date_time, date_creation, date_update, direction,
+ // data_center, dc_reference, data_state_indicator, data_mode, firmware_version,
+ // wmo_inst_type, time, time_qc, time_location, latitude, longitude,
+ // position_qc, positioning_system, profile_pres_qc, profile_temp_qc,
+ // profile_psal_qc, vertical_sampling_scheme\";\n" +
+ " String summary \"Argo float vertical profiles from Coriolis Global Data Assembly Centres\n"
+ + "(GDAC). Argo is an international collaboration that collects high-quality\n"
+ + "temperature and salinity profiles from the upper 2000m of the ice-free\n"
+ + "global ocean and currents from intermediate depths. The data come from\n"
+ + "battery-powered autonomous floats that spend most of their life drifting\n"
+ + "at depth where they are stabilised by being neutrally buoyant at the\n"
+ + "\\\"parking depth\\\" pressure by having a density equal to the ambient pressure\n"
+ + "and a compressibility that is less than that of sea water. At present there\n"
+ + "are several models of profiling float used in Argo. All work in a similar\n"
+ + "fashion but differ somewhat in their design characteristics. At typically\n"
+ + "10-day intervals, the floats pump fluid into an external bladder and rise\n"
+ + "to the surface over about 6 hours while measuring temperature and salinity.\n"
+ + "Satellites or GPS determine the position of the floats when they surface,\n"
+ + "and the floats transmit their data to the satellites. The bladder then\n"
+ + "deflates and the float returns to its original density and sinks to drift\n"
+ + "until the cycle is repeated. Floats are designed to make about 150 such\n"
+ + "cycles.\n"
+ + "Data Management URL: http://www.argodatamgt.org/Documentation\";\n"
+ + " String time_coverage_end \"2017-04-03T09:09:32Z\";\n"
+ + " String time_coverage_start \"2014-04-09T08:58:29Z\";\n"
+ + " String title \"Argo Float Vertical Profiles\";\n"
+ + " String user_manual_version \"3.1\";\n"
+ + " Float64 Westernmost_Easting -27.099;\n"
+ + " }\n"
+ + "}\n";
int tPo = results.indexOf(expected.substring(0, 40));
Test.ensureTrue(tPo >= 0, "tPo=-1 results=\n" + results);
Test.ensureEqual(
results.substring(tPo, Math.min(results.length(), tPo + expected.length())),
- expected, "results=\n" + results);
+ expected,
+ "results=\n" + results);
// *** test getting dds for entire dataset
- tName = eddTable.makeNewFileForDapQuery(language, null, null, "", dir,
- eddTable.className() + "_Entire", ".dds");
+ tName =
+ eddTable.makeNewFileForDapQuery(
+ language, null, null, "", dir, eddTable.className() + "_Entire", ".dds");
results = File2.directReadFrom88591File(dir + tName);
// String2.log(results);
- expected = "Dataset {\n" +
- " Sequence {\n" +
- " String fileNumber;\n" +
- " String data_type;\n" +
- " String format_version;\n" +
- " String handbook_version;\n" +
- " Float64 reference_date_time;\n" +
- " Float64 date_creation;\n" +
- " Float64 date_update;\n" +
- " String platform_number;\n" +
- " String project_name;\n" +
- " String pi_name;\n" +
- " Int32 cycle_number;\n" +
- " String direction;\n" +
- " String data_center;\n" +
- " String dc_reference;\n" +
- " String data_state_indicator;\n" +
- " String data_mode;\n" +
- " String platform_type;\n" +
- " String float_serial_no;\n" +
- " String firmware_version;\n" +
- " String wmo_inst_type;\n" +
- " Float64 time;\n" +
- " String time_qc;\n" +
- " Float64 time_location;\n" +
- " Float64 latitude;\n" +
- " Float64 longitude;\n" +
- " String position_qc;\n" +
- " String positioning_system;\n" +
- " String profile_pres_qc;\n" +
- " String profile_temp_qc;\n" +
- " String profile_psal_qc;\n" +
- " String vertical_sampling_scheme;\n" +
- " Int32 config_mission_number;\n" +
- " Float32 pres;\n" +
- " String pres_qc;\n" +
- " Float32 pres_adjusted;\n" +
- " String pres_adjusted_qc;\n" +
- " Float32 pres_adjusted_error;\n" +
- " Float32 temp;\n" +
- " String temp_qc;\n" +
- " Float32 temp_adjusted;\n" +
- " String temp_adjusted_qc;\n" +
- " Float32 temp_adjusted_error;\n" +
- " Float32 psal;\n" +
- " String psal_qc;\n" +
- " Float32 psal_adjusted;\n" +
- " String psal_adjusted_qc;\n" +
- " Float32 psal_adjusted_error;\n" +
- " } s;\n" +
- "} s;\n";
+ expected =
+ "Dataset {\n"
+ + " Sequence {\n"
+ + " String fileNumber;\n"
+ + " String data_type;\n"
+ + " String format_version;\n"
+ + " String handbook_version;\n"
+ + " Float64 reference_date_time;\n"
+ + " Float64 date_creation;\n"
+ + " Float64 date_update;\n"
+ + " String platform_number;\n"
+ + " String project_name;\n"
+ + " String pi_name;\n"
+ + " Int32 cycle_number;\n"
+ + " String direction;\n"
+ + " String data_center;\n"
+ + " String dc_reference;\n"
+ + " String data_state_indicator;\n"
+ + " String data_mode;\n"
+ + " String platform_type;\n"
+ + " String float_serial_no;\n"
+ + " String firmware_version;\n"
+ + " String wmo_inst_type;\n"
+ + " Float64 time;\n"
+ + " String time_qc;\n"
+ + " Float64 time_location;\n"
+ + " Float64 latitude;\n"
+ + " Float64 longitude;\n"
+ + " String position_qc;\n"
+ + " String positioning_system;\n"
+ + " String profile_pres_qc;\n"
+ + " String profile_temp_qc;\n"
+ + " String profile_psal_qc;\n"
+ + " String vertical_sampling_scheme;\n"
+ + " Int32 config_mission_number;\n"
+ + " Float32 pres;\n"
+ + " String pres_qc;\n"
+ + " Float32 pres_adjusted;\n"
+ + " String pres_adjusted_qc;\n"
+ + " Float32 pres_adjusted_error;\n"
+ + " Float32 temp;\n"
+ + " String temp_qc;\n"
+ + " Float32 temp_adjusted;\n"
+ + " String temp_adjusted_qc;\n"
+ + " Float32 temp_adjusted_error;\n"
+ + " Float32 psal;\n"
+ + " String psal_qc;\n"
+ + " Float32 psal_adjusted;\n"
+ + " String psal_adjusted_qc;\n"
+ + " Float32 psal_adjusted_error;\n"
+ + " } s;\n"
+ + "} s;\n";
Test.ensureEqual(results, expected, "\nresults=\n" + results);
// *** test make data files
String2.log("\n****************** EDDTableFromMultidimNcFiles.test make DATA FILES\n");
// .csv for one lat,lon 26.587,154.853
- userDapQuery = "" +
- // "&longitude=154.853&latitude=26.587";
- // "&latitude=42.50334333333333&longitude=7.837398333333333";
- "&pres=804.9&cycle_number=53";
- tName = eddTable.makeNewFileForDapQuery(language, null, null, userDapQuery, dir,
- eddTable.className() + "_1profile", ".csv");
+ userDapQuery =
+ ""
+ +
+ // "&longitude=154.853&latitude=26.587";
+ // "&latitude=42.50334333333333&longitude=7.837398333333333";
+ "&pres=804.9&cycle_number=53";
+ tName =
+ eddTable.makeNewFileForDapQuery(
+ language, null, null, userDapQuery, dir, eddTable.className() + "_1profile", ".csv");
results = File2.directReadFrom88591File(dir + tName);
// String2.log(results);
- expected = "fileNumber,data_type,format_version,handbook_version,reference_date_time,date_creation,date_update,platform_number,project_name,pi_name,cycle_number,direction,data_center,dc_reference,data_state_indicator,data_mode,platform_type,float_serial_no,firmware_version,wmo_inst_type,time,time_qc,time_location,latitude,longitude,position_qc,positioning_system,profile_pres_qc,profile_temp_qc,profile_psal_qc,vertical_sampling_scheme,config_mission_number,pres,pres_qc,pres_adjusted,pres_adjusted_qc,pres_adjusted_error,temp,temp_qc,temp_adjusted,temp_adjusted_qc,temp_adjusted_error,psal,psal_qc,psal_adjusted,psal_adjusted_qc,psal_adjusted_error\n"
- +
- ",,,,UTC,UTC,UTC,,,,,,,,,,,,,,UTC,,UTC,degrees_north,degrees_east,,,,,,,,decibar,,decibar,,decibar,degree_Celsius,,degree_Celsius,,degree_Celsius,PSU,,PSU,,psu\n"
- +
- "6902733,Argo profile,3.1,1.2,1950-01-01T00:00:00Z,2016-06-30T20:47:08Z,2017-03-31T23:27:10Z,6902733,NAOS,Fabrizio D'ortenzio,53,A,IF,,2B,R,PROVOR_III,OIN-12-RA-S31-001,1.07,836,2017-03-31T15:35:00Z,1,2017-03-31T16:04:13Z,42.50334333333333,7.837398333333333,1,GPS,A,A,A,\"Primary sampling: averaged [2s sampling, 10dbar average from 1000dbar to 250dbar;2s samp., 1dbar avg from 250dbar to 10dbar;2s samp., 1dbar avg from 10dbar to 2.2dbar]\",12,804.9,1,NaN,\" \",NaN,13.348,1,NaN,\" \",NaN,38.556,1,NaN,\" \",NaN\n";
+ expected =
+ "fileNumber,data_type,format_version,handbook_version,reference_date_time,date_creation,date_update,platform_number,project_name,pi_name,cycle_number,direction,data_center,dc_reference,data_state_indicator,data_mode,platform_type,float_serial_no,firmware_version,wmo_inst_type,time,time_qc,time_location,latitude,longitude,position_qc,positioning_system,profile_pres_qc,profile_temp_qc,profile_psal_qc,vertical_sampling_scheme,config_mission_number,pres,pres_qc,pres_adjusted,pres_adjusted_qc,pres_adjusted_error,temp,temp_qc,temp_adjusted,temp_adjusted_qc,temp_adjusted_error,psal,psal_qc,psal_adjusted,psal_adjusted_qc,psal_adjusted_error\n"
+ + ",,,,UTC,UTC,UTC,,,,,,,,,,,,,,UTC,,UTC,degrees_north,degrees_east,,,,,,,,decibar,,decibar,,decibar,degree_Celsius,,degree_Celsius,,degree_Celsius,PSU,,PSU,,psu\n"
+ + "6902733,Argo profile,3.1,1.2,1950-01-01T00:00:00Z,2016-06-30T20:47:08Z,2017-03-31T23:27:10Z,6902733,NAOS,Fabrizio D'ortenzio,53,A,IF,,2B,R,PROVOR_III,OIN-12-RA-S31-001,1.07,836,2017-03-31T15:35:00Z,1,2017-03-31T16:04:13Z,42.50334333333333,7.837398333333333,1,GPS,A,A,A,\"Primary sampling: averaged [2s sampling, 10dbar average from 1000dbar to 250dbar;2s samp., 1dbar avg from 250dbar to 10dbar;2s samp., 1dbar avg from 10dbar to 2.2dbar]\",12,804.9,1,NaN,\" \",NaN,13.348,1,NaN,\" \",NaN,38.556,1,NaN,\" \",NaN\n";
// "2901175,Argo
// profile,3.1,1.2,1950-01-01T00:00:00Z,2009-04-22T12:19:13Z,2016-04-15T20:47:22Z,2901175,CHINA
// ARGO PROJECT,JIANPING
@@ -1490,22 +1523,27 @@ void testBasic(boolean deleteCachedInfo) throws Throwable {
// expected, "\nresults=\n" + results);
// .csv for one lat,lon via lon > <
- userDapQuery = "" +
- "&longitude>7&longitude<=154.854&cycle_number=53&pres>970";
+ userDapQuery = "" + "&longitude>7&longitude<=154.854&cycle_number=53&pres>970";
// "&longitude>154.852&longitude<=154.854";
- tName = eddTable.makeNewFileForDapQuery(language, null, null, userDapQuery, dir,
- eddTable.className() + "_1StationGTLT", ".csv");
+ tName =
+ eddTable.makeNewFileForDapQuery(
+ language,
+ null,
+ null,
+ userDapQuery,
+ dir,
+ eddTable.className() + "_1StationGTLT",
+ ".csv");
results = File2.directReadFrom88591File(dir + tName);
// String2.log(results);
- expected = "fileNumber,data_type,format_version,handbook_version,reference_date_time,date_creation,date_update,platform_number,project_name,pi_name,cycle_number,direction,data_center,dc_reference,data_state_indicator,data_mode,platform_type,float_serial_no,firmware_version,wmo_inst_type,time,time_qc,time_location,latitude,longitude,position_qc,positioning_system,profile_pres_qc,profile_temp_qc,profile_psal_qc,vertical_sampling_scheme,config_mission_number,pres,pres_qc,pres_adjusted,pres_adjusted_qc,pres_adjusted_error,temp,temp_qc,temp_adjusted,temp_adjusted_qc,temp_adjusted_error,psal,psal_qc,psal_adjusted,psal_adjusted_qc,psal_adjusted_error\n"
- +
- ",,,,UTC,UTC,UTC,,,,,,,,,,,,,,UTC,,UTC,degrees_north,degrees_east,,,,,,,,decibar,,decibar,,decibar,degree_Celsius,,degree_Celsius,,degree_Celsius,PSU,,PSU,,psu\n"
- +
- "6902733,Argo profile,3.1,1.2,1950-01-01T00:00:00Z,2016-06-30T20:47:08Z,2017-03-31T23:27:10Z,6902733,NAOS,Fabrizio D'ortenzio,53,A,IF,,2B,R,PROVOR_III,OIN-12-RA-S31-001,1.07,836,2017-03-31T15:35:00Z,1,2017-03-31T16:04:13Z,42.50334333333333,7.837398333333333,1,GPS,A,A,A,\"Primary sampling: averaged [2s sampling, 10dbar average from 1000dbar to 250dbar;2s samp., 1dbar avg from 250dbar to 10dbar;2s samp., 1dbar avg from 10dbar to 2.2dbar]\",12,974.9,1,NaN,\" \",NaN,13.227,1,NaN,\" \",NaN,38.522,1,NaN,\" \",NaN\n"
- + //
- "6902733,Argo profile,3.1,1.2,1950-01-01T00:00:00Z,2016-06-30T20:47:08Z,2017-03-31T23:27:10Z,6902733,NAOS,Fabrizio D'ortenzio,53,A,IF,,2B,R,PROVOR_III,OIN-12-RA-S31-001,1.07,836,2017-03-31T15:35:00Z,1,2017-03-31T16:04:13Z,42.50334333333333,7.837398333333333,1,GPS,A,A,A,\"Primary sampling: averaged [2s sampling, 10dbar average from 1000dbar to 250dbar;2s samp., 1dbar avg from 250dbar to 10dbar;2s samp., 1dbar avg from 10dbar to 2.2dbar]\",12,985.1,1,NaN,\" \",NaN,13.226,1,NaN,\" \",NaN,38.521,1,NaN,\" \",NaN\n"
- + //
- "6902733,Argo profile,3.1,1.2,1950-01-01T00:00:00Z,2016-06-30T20:47:08Z,2017-03-31T23:27:10Z,6902733,NAOS,Fabrizio D'ortenzio,53,A,IF,,2B,R,PROVOR_III,OIN-12-RA-S31-001,1.07,836,2017-03-31T15:35:00Z,1,2017-03-31T16:04:13Z,42.50334333333333,7.837398333333333,1,GPS,A,A,A,\"Primary sampling: averaged [2s sampling, 10dbar average from 1000dbar to 250dbar;2s samp., 1dbar avg from 250dbar to 10dbar;2s samp., 1dbar avg from 10dbar to 2.2dbar]\",12,990.1,1,NaN,\" \",NaN,13.226,1,NaN,\" \",NaN,38.522,1,NaN,\" \",NaN\n";
+ expected =
+ "fileNumber,data_type,format_version,handbook_version,reference_date_time,date_creation,date_update,platform_number,project_name,pi_name,cycle_number,direction,data_center,dc_reference,data_state_indicator,data_mode,platform_type,float_serial_no,firmware_version,wmo_inst_type,time,time_qc,time_location,latitude,longitude,position_qc,positioning_system,profile_pres_qc,profile_temp_qc,profile_psal_qc,vertical_sampling_scheme,config_mission_number,pres,pres_qc,pres_adjusted,pres_adjusted_qc,pres_adjusted_error,temp,temp_qc,temp_adjusted,temp_adjusted_qc,temp_adjusted_error,psal,psal_qc,psal_adjusted,psal_adjusted_qc,psal_adjusted_error\n"
+ + ",,,,UTC,UTC,UTC,,,,,,,,,,,,,,UTC,,UTC,degrees_north,degrees_east,,,,,,,,decibar,,decibar,,decibar,degree_Celsius,,degree_Celsius,,degree_Celsius,PSU,,PSU,,psu\n"
+ + "6902733,Argo profile,3.1,1.2,1950-01-01T00:00:00Z,2016-06-30T20:47:08Z,2017-03-31T23:27:10Z,6902733,NAOS,Fabrizio D'ortenzio,53,A,IF,,2B,R,PROVOR_III,OIN-12-RA-S31-001,1.07,836,2017-03-31T15:35:00Z,1,2017-03-31T16:04:13Z,42.50334333333333,7.837398333333333,1,GPS,A,A,A,\"Primary sampling: averaged [2s sampling, 10dbar average from 1000dbar to 250dbar;2s samp., 1dbar avg from 250dbar to 10dbar;2s samp., 1dbar avg from 10dbar to 2.2dbar]\",12,974.9,1,NaN,\" \",NaN,13.227,1,NaN,\" \",NaN,38.522,1,NaN,\" \",NaN\n"
+ + //
+ "6902733,Argo profile,3.1,1.2,1950-01-01T00:00:00Z,2016-06-30T20:47:08Z,2017-03-31T23:27:10Z,6902733,NAOS,Fabrizio D'ortenzio,53,A,IF,,2B,R,PROVOR_III,OIN-12-RA-S31-001,1.07,836,2017-03-31T15:35:00Z,1,2017-03-31T16:04:13Z,42.50334333333333,7.837398333333333,1,GPS,A,A,A,\"Primary sampling: averaged [2s sampling, 10dbar average from 1000dbar to 250dbar;2s samp., 1dbar avg from 250dbar to 10dbar;2s samp., 1dbar avg from 10dbar to 2.2dbar]\",12,985.1,1,NaN,\" \",NaN,13.226,1,NaN,\" \",NaN,38.521,1,NaN,\" \",NaN\n"
+ + //
+ "6902733,Argo profile,3.1,1.2,1950-01-01T00:00:00Z,2016-06-30T20:47:08Z,2017-03-31T23:27:10Z,6902733,NAOS,Fabrizio D'ortenzio,53,A,IF,,2B,R,PROVOR_III,OIN-12-RA-S31-001,1.07,836,2017-03-31T15:35:00Z,1,2017-03-31T16:04:13Z,42.50334333333333,7.837398333333333,1,GPS,A,A,A,\"Primary sampling: averaged [2s sampling, 10dbar average from 1000dbar to 250dbar;2s samp., 1dbar avg from 250dbar to 10dbar;2s samp., 1dbar avg from 10dbar to 2.2dbar]\",12,990.1,1,NaN,\" \",NaN,13.226,1,NaN,\" \",NaN,38.522,1,NaN,\" \",NaN\n";
// "2901175,Argo
// profile,3.1,1.2,1950-01-01T00:00:00Z,2009-04-22T12:19:13Z,2016-04-15T20:47:22Z,2901175,CHINA
// ARGO PROJECT,JIANPING
@@ -1551,493 +1589,488 @@ void testBasic(boolean deleteCachedInfo) throws Throwable {
// .csv for test requesting scalar var
userDapQuery = "data_type&data_type=~\".*go.*\"";
- tName = eddTable.makeNewFileForDapQuery(language, null, null, userDapQuery, dir,
- eddTable.className() + "_scalar", ".csv");
+ tName =
+ eddTable.makeNewFileForDapQuery(
+ language, null, null, userDapQuery, dir, eddTable.className() + "_scalar", ".csv");
results = File2.directReadFrom88591File(dir + tName);
// String2.log(results);
- expected = "data_type\n" +
- "\n" +
- "Argo profile\n" +
- "Argo profile\n";
+ expected = "data_type\n" + "\n" + "Argo profile\n" + "Argo profile\n";
Test.ensureEqual(results, expected, "\nresults=\n" + results);
// .csv for test requesting distinct
userDapQuery = "pres&pres>10&pres<10.5&distinct()";
- tName = eddTable.makeNewFileForDapQuery(language, null, null, userDapQuery, dir,
- eddTable.className() + "_scalar", ".csv");
+ tName =
+ eddTable.makeNewFileForDapQuery(
+ language, null, null, userDapQuery, dir, eddTable.className() + "_scalar", ".csv");
results = File2.directReadFrom88591File(dir + tName);
// String2.log(results);
- expected = "pres\n" +
- "decibar\n" +
- "10.1\n" +
- "10.2\n" +
- "10.3\n" +
- "10.4\n";
+ expected = "pres\n" + "decibar\n" + "10.1\n" + "10.2\n" + "10.3\n" + "10.4\n";
Test.ensureEqual(results, expected, "\nresults=\n" + results);
-
}
/**
- * testGenerateDatasetsXml with a SeaDataNet file, specifically the
- * generation of sdn_P02_urn from sdn_parameter_urn attributes.
+ * testGenerateDatasetsXml with a SeaDataNet file, specifically the generation of sdn_P02_urn from
+ * sdn_parameter_urn attributes.
*/
@org.junit.jupiter.api.Test
void testGenerateDatasetsXmlSeaDataNet() throws Throwable {
// testVerboseOn();
// debugMode = true;
- String dataDir = File2.addSlash(Path.of(
- EDDTableFromMultidimNcFilesTests.class.getResource("/data/sdn/").toURI()).toString());
+ String dataDir =
+ File2.addSlash(
+ Path.of(EDDTableFromMultidimNcFilesTests.class.getResource("/data/sdn/").toURI())
+ .toString());
String fileNameRegex = "netCDF_timeseries_tidegauge\\.nc";
String useDimensionsCSV = "INSTANCE, MAXT";
- String results = EDDTableFromMultidimNcFiles.generateDatasetsXml(
- dataDir,
- fileNameRegex,
- "",
- useDimensionsCSV, // dimensions
- 1440,
- "", "", "", "", // just for test purposes; station is already a column in the file
- true, // removeMVRows
- "", // sortFilesBy
- "", "", "", "",
- -1, // defaultStandardizeWhat
- "", // treatDimensionsAs
- null, // cacheFromUrl
- null);
+ String results =
+ EDDTableFromMultidimNcFiles.generateDatasetsXml(
+ dataDir,
+ fileNameRegex,
+ "",
+ useDimensionsCSV, // dimensions
+ 1440,
+ "",
+ "",
+ "",
+ "", // just for test purposes; station is already a column in the file
+ true, // removeMVRows
+ "", // sortFilesBy
+ "",
+ "",
+ "",
+ "",
+ -1, // defaultStandardizeWhat
+ "", // treatDimensionsAs
+ null, // cacheFromUrl
+ null);
String2.setClipboardString(results);
- String tDatasetID = EDDTableFromMultidimNcFiles.suggestDatasetID(dataDir + fileNameRegex + useDimensionsCSV);
- String expected = "\n"
- +
- " 1440\n" +
- " 10000\n" +
- " " + dataDir + "\n" +
- " " + fileNameRegex + "\n" +
- " true\n" +
- " .*\n" +
- " last\n" +
- " 0\n" +
- " true\n" +
- " \n" +
- " false\n" +
- " \n" +
- " \n" +
- " \n" +
- " TimeSeries\n" +
- " station_id, latitude, longitude, ???\n" +
- " SeaDataNet_1.0, CF-1.10, COARDS, ACDD-1.3\n" +
- " SeaDataNet\n" +
- " https://www.seadatanet.org/\n" +
- " https://www.seadatanet.org/\n" +
- " SeaDataNet\n" +
- " above, ASLVZZ01, ASLVZZ01_SEADATANET_QC, bathymetric, bathymetry, below, cdi, code, common, crs, data, depth, DEPTH_SEADATANET_QC, directory, earth, Earth Science > Oceans > Bathymetry/Seafloor Topography > Bathymetry, Earth Science > Oceans > Ocean Pressure > Water Pressure, Earth Science > Oceans > Ocean Temperature > Water Temperature, Earth Science > Oceans > Sea Surface Topography > Sea Surface Height, Earth Science > Solid Earth > Geodetics/Gravity > Geoid Properties, european, flag, floor, format, generated, geodetics, geoid, gravity, height, identifier, latitude, level, list, longitude, marine, measurement, nemo, network, numbers, ocean, oceans, organisations, POSITION_SEADATANET_QC, PRESPR01, PRESPR01_SEADATANET_QC, pressure, properties, quality, science, SDN_BOT_DEPTH, SDN_CRUISE, SDN_EDMO_CODE, SDN_LOCAL_CDI_ID, SDN_STATION, sea, sea level, sea_floor_depth_below_sea_surface, sea_surface_height_above_geoid, sea_water_pressure, sea_water_temperature, seadatanet, seafloor, seawater, site, solid, station, statistics, supplier, surface, suva, temperature, TEMPPR01, TEMPPR01_SEADATANET_QC, time, TIME_SEADATANET_QC, timeseries, topography, version, water\n"
- +
- " GCMD Science Keywords\n" +
- " [standard]\n" +
- " (local files)\n" +
- " CF Standard Name Table v70\n" +
- " SDN_EDMO_CODE, SDN_CRUISE, SDN_STATION, SDN_LOCAL_CDI_ID, SDN_BOT_DEPTH, longitude, latitude, POSITION_SEADATANET_QC, crs, TIME_SEADATANET_QC, depth, DEPTH_SEADATANET_QC, ASLVZZ01_SEADATANET_QC, TEMPPR01_SEADATANET_QC, PRESPR01_SEADATANET_QC\n"
- +
- " Network Common Data Format (NetCDF) TIMESERIES - Generated by NEMO, version 1.6.0\n"
- +
- " NetCDF TIMESERIES, Generated by NEMO, version 1.6.0\n" +
- " \n" +
- " \n" +
- " SDN_EDMO_CODE\n" +
- " SDN_EDMO_CODE\n" +
- " int\n" +
- " \n" +
- " \n" +
- " 2147483647\n" +
- " Unknown\n" +
- " \n" +
- " \n" +
- " \n" +
- " SDN_CRUISE\n" +
- " SDN_CRUISE\n" +
- " String\n" +
- " \n" +
- " \n" +
- " Unknown\n" +
- " \n" +
- " \n" +
- " \n" +
- " SDN_STATION\n" +
- " SDN_STATION\n" +
- " String\n" +
- " \n" +
- " \n" +
- " Statistics\n" +
- " \n" +
- " \n" +
- " \n" +
- " SDN_LOCAL_CDI_ID\n" +
- " SDN_LOCAL_CDI_ID\n" +
- " String\n" +
- " \n" +
- " \n" +
- " Identifier\n" +
- " \n" +
- " \n" +
- " \n" +
- " SDN_BOT_DEPTH\n" +
- " SDN_BOT_DEPTH\n" +
- " float\n" +
- " \n" +
- " \n" +
- " 8000.0\n" +
- " -8000.0\n" +
- " TopographyDepth\n" +
- " Bathymetry\n" +
- " SDN:P02::MBAN\n" +
- " \n" +
- " \n" +
- " \n" +
- " LONGITUDE\n" +
- " longitude\n" +
- " double\n" +
- " \n" +
- " \n" +
- " 180.0\n" +
- " -180.0\n" +
- " null\n" +
- " Location\n" +
- " SDN:P02::ALAT\n" +
- " \n" +
- " \n" +
- " \n" +
- " LATITUDE\n" +
- " latitude\n" +
- " double\n" +
- " \n" +
- " \n" +
- " 90.0\n" +
- " -90.0\n" +
- " null\n" +
- " Location\n" +
- " SDN:P02::ALAT\n" +
- " \n" +
- " \n" +
- " \n" +
- " POSITION_SEADATANET_QC\n" +
- " POSITION_SEADATANET_QC\n" +
- " byte\n" +
- " \n" +
- " \n" +
- " 80.0\n" +
- " 0.0\n" +
- " Quality\n" +
- " \n" +
- " \n" +
- " \n" +
- " crs\n" +
- " crs\n" +
- " int\n" +
- " \n" +
- " \n" +
- " 2147483647\n" +
- " Unknown\n" +
- " CRS\n" +
- " \n" +
- " \n" +
- " \n" +
- " TIME\n" +
- " time\n" +
- " double\n" +
- " \n" +
- " \n" +
- " Time\n" +
- " SDN:P02::AYMD\n" +
- " days since -4712-01-01T00:00:00Z\n" +
- " \n" +
- " \n" +
- " \n" +
- " TIME_SEADATANET_QC\n" +
- " TIME_SEADATANET_QC\n" +
- " byte\n" +
- " \n" +
- " \n" +
- " 80.0\n" +
- " 0.0\n" +
- " Quality\n" +
- " \n" +
- " \n" +
- " \n" +
- " DEPTH\n" +
- " depth\n" +
- " double\n" +
- " \n" +
- " \n" +
- " 8000.0\n" +
- " -8000.0\n" +
- " TopographyDepth\n" +
- " Location\n" +
- " SDN:P02::AHGT\n" +
- " m\n" +
- " \n" +
- " \n" +
- " \n" +
- " DEPTH_SEADATANET_QC\n" +
- " DEPTH_SEADATANET_QC\n" +
- " byte\n" +
- " \n" +
- " \n" +
- " 80.0\n" +
- " 0.0\n" +
- " Quality\n" +
- " depth\n" +
- " \n" +
- " \n" +
- " \n" +
- " ASLVZZ01\n" +
- " ASLVZZ01\n" +
- " double\n" +
- " \n" +
- " \n" +
- " 2.0\n" +
- " -2.0\n" +
- " null\n" +
- " Sea Level\n" +
- " SDN:P02::ASLV\n" +
- " \n" +
- " \n" +
- " \n" +
- " ASLVZZ01_SEADATANET_QC\n" +
- " ASLVZZ01_SEADATANET_QC\n" +
- " byte\n" +
- " \n" +
- " \n" +
- " 80.0\n" +
- " 0.0\n" +
- " Quality\n" +
- " \n" +
- " \n" +
- " \n" +
- " TEMPPR01\n" +
- " TEMPPR01\n" +
- " double\n" +
- " \n" +
- " \n" +
- " 32.0\n" +
- " 0.0\n" +
- " null\n" +
- " Temperature\n" +
- " SDN:P02::TEMP\n" +
- " degree_C\n" +
- " \n" +
- " \n" +
- " \n" +
- " TEMPPR01_SEADATANET_QC\n" +
- " TEMPPR01_SEADATANET_QC\n" +
- " byte\n" +
- " \n" +
- " \n" +
- " 80.0\n" +
- " 0.0\n" +
- " Quality\n" +
- " \n" +
- " \n" +
- " \n" +
- " PRESPR01\n" +
- " PRESPR01\n" +
- " double\n" +
- " \n" +
- " \n" +
- " 5000.0\n" +
- " 0.0\n" +
- " null\n" +
- " Pressure\n" +
- " SDN:P02::AHGT\n" +
- " decibar\n" +
- " \n" +
- " \n" +
- " \n" +
- " PRESPR01_SEADATANET_QC\n" +
- " PRESPR01_SEADATANET_QC\n" +
- " byte\n" +
- " \n" +
- " \n" +
- " 80.0\n" +
- " 0.0\n" +
- " Quality\n" +
- " \n" +
- " \n" +
- "\n" +
- "\n";
+ String tDatasetID =
+ EDDTableFromMultidimNcFiles.suggestDatasetID(dataDir + fileNameRegex + useDimensionsCSV);
+ String expected =
+ "\n"
+ + " 1440\n"
+ + " 10000\n"
+ + " "
+ + dataDir
+ + "\n"
+ + " "
+ + fileNameRegex
+ + "\n"
+ + " true\n"
+ + " .*\n"
+ + " last\n"
+ + " 0\n"
+ + " true\n"
+ + " \n"
+ + " false\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " TimeSeries\n"
+ + " station_id, latitude, longitude, ???\n"
+ + " SeaDataNet_1.0, CF-1.10, COARDS, ACDD-1.3\n"
+ + " SeaDataNet\n"
+ + " https://www.seadatanet.org/\n"
+ + " https://www.seadatanet.org/\n"
+ + " SeaDataNet\n"
+ + " above, ASLVZZ01, ASLVZZ01_SEADATANET_QC, bathymetric, bathymetry, below, cdi, code, common, crs, data, depth, DEPTH_SEADATANET_QC, directory, earth, Earth Science > Oceans > Bathymetry/Seafloor Topography > Bathymetry, Earth Science > Oceans > Ocean Pressure > Water Pressure, Earth Science > Oceans > Ocean Temperature > Water Temperature, Earth Science > Oceans > Sea Surface Topography > Sea Surface Height, Earth Science > Solid Earth > Geodetics/Gravity > Geoid Properties, european, flag, floor, format, generated, geodetics, geoid, gravity, height, identifier, latitude, level, list, longitude, marine, measurement, nemo, network, numbers, ocean, oceans, organisations, POSITION_SEADATANET_QC, PRESPR01, PRESPR01_SEADATANET_QC, pressure, properties, quality, science, SDN_BOT_DEPTH, SDN_CRUISE, SDN_EDMO_CODE, SDN_LOCAL_CDI_ID, SDN_STATION, sea, sea level, sea_floor_depth_below_sea_surface, sea_surface_height_above_geoid, sea_water_pressure, sea_water_temperature, seadatanet, seafloor, seawater, site, solid, station, statistics, supplier, surface, suva, temperature, TEMPPR01, TEMPPR01_SEADATANET_QC, time, TIME_SEADATANET_QC, timeseries, topography, version, water\n"
+ + " GCMD Science Keywords\n"
+ + " [standard]\n"
+ + " (local files)\n"
+ + " CF Standard Name Table v70\n"
+ + " SDN_EDMO_CODE, SDN_CRUISE, SDN_STATION, SDN_LOCAL_CDI_ID, SDN_BOT_DEPTH, longitude, latitude, POSITION_SEADATANET_QC, crs, TIME_SEADATANET_QC, depth, DEPTH_SEADATANET_QC, ASLVZZ01_SEADATANET_QC, TEMPPR01_SEADATANET_QC, PRESPR01_SEADATANET_QC\n"
+ + " Network Common Data Format (NetCDF) TIMESERIES - Generated by NEMO, version 1.6.0\n"
+ + " NetCDF TIMESERIES, Generated by NEMO, version 1.6.0\n"
+ + " \n"
+ + " \n"
+ + " SDN_EDMO_CODE\n"
+ + " SDN_EDMO_CODE\n"
+ + " int\n"
+ + " \n"
+ + " \n"
+ + " 2147483647\n"
+ + " Unknown\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " SDN_CRUISE\n"
+ + " SDN_CRUISE\n"
+ + " String\n"
+ + " \n"
+ + " \n"
+ + " Unknown\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " SDN_STATION\n"
+ + " SDN_STATION\n"
+ + " String\n"
+ + " \n"
+ + " \n"
+ + " Statistics\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " SDN_LOCAL_CDI_ID\n"
+ + " SDN_LOCAL_CDI_ID\n"
+ + " String\n"
+ + " \n"
+ + " \n"
+ + " Identifier\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " SDN_BOT_DEPTH\n"
+ + " SDN_BOT_DEPTH\n"
+ + " float\n"
+ + " \n"
+ + " \n"
+ + " 8000.0\n"
+ + " -8000.0\n"
+ + " TopographyDepth\n"
+ + " Bathymetry\n"
+ + " SDN:P02::MBAN\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " LONGITUDE\n"
+ + " longitude\n"
+ + " double\n"
+ + " \n"
+ + " \n"
+ + " 180.0\n"
+ + " -180.0\n"
+ + " null\n"
+ + " Location\n"
+ + " SDN:P02::ALAT\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " LATITUDE\n"
+ + " latitude\n"
+ + " double\n"
+ + " \n"
+ + " \n"
+ + " 90.0\n"
+ + " -90.0\n"
+ + " null\n"
+ + " Location\n"
+ + " SDN:P02::ALAT\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " POSITION_SEADATANET_QC\n"
+ + " POSITION_SEADATANET_QC\n"
+ + " byte\n"
+ + " \n"
+ + " \n"
+ + " 80.0\n"
+ + " 0.0\n"
+ + " Quality\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " crs\n"
+ + " crs\n"
+ + " int\n"
+ + " \n"
+ + " \n"
+ + " 2147483647\n"
+ + " Unknown\n"
+ + " CRS\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " TIME\n"
+ + " time\n"
+ + " double\n"
+ + " \n"
+ + " \n"
+ + " Time\n"
+ + " SDN:P02::AYMD\n"
+ + " days since -4712-01-01T00:00:00Z\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " TIME_SEADATANET_QC\n"
+ + " TIME_SEADATANET_QC\n"
+ + " byte\n"
+ + " \n"
+ + " \n"
+ + " 80.0\n"
+ + " 0.0\n"
+ + " Quality\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " DEPTH\n"
+ + " depth\n"
+ + " double\n"
+ + " \n"
+ + " \n"
+ + " 8000.0\n"
+ + " -8000.0\n"
+ + " TopographyDepth\n"
+ + " Location\n"
+ + " SDN:P02::AHGT\n"
+ + " m\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " DEPTH_SEADATANET_QC\n"
+ + " DEPTH_SEADATANET_QC\n"
+ + " byte\n"
+ + " \n"
+ + " \n"
+ + " 80.0\n"
+ + " 0.0\n"
+ + " Quality\n"
+ + " depth\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " ASLVZZ01\n"
+ + " ASLVZZ01\n"
+ + " double\n"
+ + " \n"
+ + " \n"
+ + " 2.0\n"
+ + " -2.0\n"
+ + " null\n"
+ + " Sea Level\n"
+ + " SDN:P02::ASLV\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " ASLVZZ01_SEADATANET_QC\n"
+ + " ASLVZZ01_SEADATANET_QC\n"
+ + " byte\n"
+ + " \n"
+ + " \n"
+ + " 80.0\n"
+ + " 0.0\n"
+ + " Quality\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " TEMPPR01\n"
+ + " TEMPPR01\n"
+ + " double\n"
+ + " \n"
+ + " \n"
+ + " 32.0\n"
+ + " 0.0\n"
+ + " null\n"
+ + " Temperature\n"
+ + " SDN:P02::TEMP\n"
+ + " degree_C\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " TEMPPR01_SEADATANET_QC\n"
+ + " TEMPPR01_SEADATANET_QC\n"
+ + " byte\n"
+ + " \n"
+ + " \n"
+ + " 80.0\n"
+ + " 0.0\n"
+ + " Quality\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " PRESPR01\n"
+ + " PRESPR01\n"
+ + " double\n"
+ + " \n"
+ + " \n"
+ + " 5000.0\n"
+ + " 0.0\n"
+ + " null\n"
+ + " Pressure\n"
+ + " SDN:P02::AHGT\n"
+ + " decibar\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " PRESPR01_SEADATANET_QC\n"
+ + " PRESPR01_SEADATANET_QC\n"
+ + " byte\n"
+ + " \n"
+ + " \n"
+ + " 80.0\n"
+ + " 0.0\n"
+ + " Quality\n"
+ + " \n"
+ + " \n"
+ + "\n"
+ + "\n";
Test.ensureEqual(results, expected, "results=\n" + results);
// Test.ensureEqual(results.substring(0, Math.min(results.length(),
// expected.length())),
@@ -2046,443 +2079,469 @@ void testGenerateDatasetsXmlSeaDataNet() throws Throwable {
}
/**
- * testGenerateDatasetsXml with treatDimensionsAs and standadizeWhat.
- * This doesn't test suggestTestOutOfDate, except that for old data
- * it doesn't suggest anything.
+ * testGenerateDatasetsXml with treatDimensionsAs and standadizeWhat. This doesn't test
+ * suggestTestOutOfDate, except that for old data it doesn't suggest anything.
*/
@org.junit.jupiter.api.Test
void testGenerateDatasetsXmlDimensions() throws Throwable {
// testVerboseOn();
- String dataDir = File2.addSlash(Path.of(
- EDDTableFromMultidimNcFilesTests.class.getResource("/data/nc/").toURI()).toString());
+ String dataDir =
+ File2.addSlash(
+ Path.of(EDDTableFromMultidimNcFilesTests.class.getResource("/data/nc/").toURI())
+ .toString());
String fileNameRegex = "GL_.*44761\\.nc";
String useDimensionsCSV = "TIME, DEPTH";
- String results = EDDTableFromMultidimNcFiles.generateDatasetsXml(
- dataDir,
- fileNameRegex,
- "",
- useDimensionsCSV,
- 1440,
- "", "", "", "", // just for test purposes
- false, // removeMVRows
- "TIME", // sort files by
- "", "", "", "",
- 4355, // standardizeWhat 1+2(numericTime)+256(catch numeric mv)+4096(units)
- "LATITUDE, LONGITUDE, TIME", // treatDimensionsAs
- null, // cacheFromUrl
- null) + "\n";
+ String results =
+ EDDTableFromMultidimNcFiles.generateDatasetsXml(
+ dataDir,
+ fileNameRegex,
+ "",
+ useDimensionsCSV,
+ 1440,
+ "",
+ "",
+ "",
+ "", // just for test purposes
+ false, // removeMVRows
+ "TIME", // sort files by
+ "",
+ "",
+ "",
+ "",
+ 4355, // standardizeWhat 1+2(numericTime)+256(catch numeric mv)+4096(units)
+ "LATITUDE, LONGITUDE, TIME", // treatDimensionsAs
+ null, // cacheFromUrl
+ null)
+ + "\n";
- String tDatasetID = EDDTableFromMultidimNcFiles.suggestDatasetID(dataDir + fileNameRegex + useDimensionsCSV);
- String expected = "\n"
- +
- " 1440\n" +
- " 10000\n" +
- " " + dataDir + "\n" +
- " " + fileNameRegex + "\n" +
- " true\n" +
- " .*\n" +
- " last\n" +
- " 4355\n" +
- " false\n" +
- " TIME\n" +
- " false\n" +
- " \n" +
- " \n" +
- " \n" +
- " TimeSeries\n" +
- " station_id, latitude, longitude, ???\n" +
- " OceanSITES Manual 1.1, CF-1.10, COARDS, ACDD-1.3\n" +
- " null\n" +
- " codac@ifremer.fr\n" +
- " CODAC\n" +
- " institution\n" +
- " https://wwz.ifremer.fr/\n" +
- " http://www.myocean.eu\n" +
- " air, air_pressure_at_sea_level, atmosphere, atmospheric, ATMS, ATMS_DM, ATMS_QC, ATPT, ATPT_DM, ATPT_QC, data, depth, DEPTH_QC, earth, Earth Science > Atmosphere > Atmospheric Pressure > Atmospheric Pressure Measurements, Earth Science > Atmosphere > Atmospheric Pressure > Pressure Tendency, Earth Science > Atmosphere > Atmospheric Pressure > Sea Level Pressure, Earth Science > Atmosphere > Atmospheric Pressure > Static Pressure, Earth Science > Oceans > Ocean Temperature > Water Temperature, flag, hour, hourly, institution, latitude, level, local, longitude, measurements, method, ocean, oceans, pressure, processing, quality, science, sea, sea_water_temperature, seawater, source, static, TEMP, TEMP_DM, TEMP_QC, temperature, tendency, tendency_of_air_pressure, time, TIME_QC, water\n"
- +
- " GCMD Science Keywords\n" +
- " These data follow MyOcean standards; they are public and free of charge. User assumes all risk for use of data. User must display citation in any publication or product using data. User must contact PI prior to any commercial use of data. More on: http://www.myocean.eu/data_policy\n"
- +
- " http://www.myocean.eu,http://www.coriolis.eu.org\n" +
- " (local files)\n" +
- " CF Standard Name Table v70\n" +
- " TIME_QC, depth, DEPTH_QC, TEMP_QC, TEMP_DM, ATPT_QC, ATPT_DM, ATMS_QC, ATMS_DM\n"
- +
- " Unknown institution data from a local source.\n" +
- " Unknown institution data from a local source.\n" +
- " LATITUDE, LONGITUDE, TIME\n" +
- " \n" +
- " \n" +
- " TIME\n" +
- " time\n" +
- " double\n" +
- " \n" +
- " \n" +
- " 8.0E9\n" +
- " -2.0E9\n" +
- " Time\n" +
- " \n" +
- " \n" +
- " \n" +
- " TIME_QC\n" +
- " TIME_QC\n" +
- " byte\n" +
- " \n" +
- " \n" +
- " 10.0\n" +
- " 0.0\n" +
- " Quality\n" +
- " \n" +
- " \n" +
- " \n" +
- " DEPTH\n" +
- " depth\n" +
- " float\n" +
- " \n" +
- " \n" +
- " 8000.0\n" +
- " -8000.0\n" +
- " TopographyDepth\n" +
- " Location\n" +
- " NaN\n" +
- " null\n" +
- " sea_level\n" +
- " \n" +
- " \n" +
- " \n" +
- " DEPTH_QC\n" +
- " DEPTH_QC\n" +
- " byte\n" +
- " \n" +
- " \n" +
- " 10.0\n" +
- " 0.0\n" +
- " Quality\n" +
- " depth\n" +
- " \n" +
- " \n" +
- " \n" +
- " LATITUDE\n" +
- " latitude\n" +
- " float\n" +
- " \n" +
- " \n" +
- " 90.0\n" +
- " -90.0\n" +
- " Location\n" +
- " \n" +
- " \n" +
- " \n" +
- " LONGITUDE\n" +
- " longitude\n" +
- " float\n" +
- " \n" +
- " \n" +
- " 180.0\n" +
- " -180.0\n" +
- " Location\n" +
- " \n" +
- " \n" +
- " \n" +
- " TEMP\n" +
- " TEMP\n" +
- " float\n" +
- " \n" +
- " \n" +
- " 32.0\n" +
- " 0.0\n" +
- " Temperature\n" +
- " \n" +
- " \n" +
- " \n" +
- " TEMP_QC\n" +
- " TEMP_QC\n" +
- " byte\n" +
- " \n" +
- " \n" +
- " 10.0\n" +
- " 0.0\n" +
- " Quality\n" +
- " \n" +
- " \n" +
- " \n" +
- " TEMP_DM\n" +
- " TEMP_DM\n" +
- " char\n" +
- " \n" +
- " \n" +
- " 20.0\n" +
- " 0.0\n" +
- " Unknown\n" +
- " \n" +
- " \n" +
- " \n" +
- " ATPT\n" +
- " ATPT\n" +
- " float\n" +
- " \n" +
- " \n" +
- " 3.0\n" +
- " -3.0\n" +
- " Pressure\n" +
- " \n" +
- " \n" +
- " \n" +
- " ATPT_QC\n" +
- " ATPT_QC\n" +
- " byte\n" +
- " \n" +
- " \n" +
- " 10.0\n" +
- " 0.0\n" +
- " Quality\n" +
- " \n" +
- " \n" +
- " \n" +
- " ATPT_DM\n" +
- " ATPT_DM\n" +
- " char\n" +
- " \n" +
- " \n" +
- " 20.0\n" +
- " 0.0\n" +
- " Unknown\n" +
- " \n" +
- " \n" +
- " \n" +
- " ATMS\n" +
- " ATMS\n" +
- " float\n" +
- " \n" +
- " \n" +
- " 1050.0\n" +
- " 950.0\n" +
- " Pressure\n" +
- " \n" +
- " \n" +
- " \n" +
- " ATMS_QC\n" +
- " ATMS_QC\n" +
- " byte\n" +
- " \n" +
- " \n" +
- " 10.0\n" +
- " 0.0\n" +
- " Quality\n" +
- " \n" +
- " \n" +
- " \n" +
- " ATMS_DM\n" +
- " ATMS_DM\n" +
- " char\n" +
- " \n" +
- " \n" +
- " 20.0\n" +
- " 0.0\n" +
- " Unknown\n" +
- " \n" +
- " \n" +
- "\n" +
- "\n\n";
+ String tDatasetID =
+ EDDTableFromMultidimNcFiles.suggestDatasetID(dataDir + fileNameRegex + useDimensionsCSV);
+ String expected =
+ "\n"
+ + " 1440\n"
+ + " 10000\n"
+ + " "
+ + dataDir
+ + "\n"
+ + " "
+ + fileNameRegex
+ + "\n"
+ + " true\n"
+ + " .*\n"
+ + " last\n"
+ + " 4355\n"
+ + " false\n"
+ + " TIME\n"
+ + " false\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " TimeSeries\n"
+ + " station_id, latitude, longitude, ???\n"
+ + " OceanSITES Manual 1.1, CF-1.10, COARDS, ACDD-1.3\n"
+ + " null\n"
+ + " codac@ifremer.fr\n"
+ + " CODAC\n"
+ + " institution\n"
+ + " https://wwz.ifremer.fr/\n"
+ + " http://www.myocean.eu\n"
+ + " air, air_pressure_at_sea_level, atmosphere, atmospheric, ATMS, ATMS_DM, ATMS_QC, ATPT, ATPT_DM, ATPT_QC, data, depth, DEPTH_QC, earth, Earth Science > Atmosphere > Atmospheric Pressure > Atmospheric Pressure Measurements, Earth Science > Atmosphere > Atmospheric Pressure > Pressure Tendency, Earth Science > Atmosphere > Atmospheric Pressure > Sea Level Pressure, Earth Science > Atmosphere > Atmospheric Pressure > Static Pressure, Earth Science > Oceans > Ocean Temperature > Water Temperature, flag, hour, hourly, institution, latitude, level, local, longitude, measurements, method, ocean, oceans, pressure, processing, quality, science, sea, sea_water_temperature, seawater, source, static, TEMP, TEMP_DM, TEMP_QC, temperature, tendency, tendency_of_air_pressure, time, TIME_QC, water\n"
+ + " GCMD Science Keywords\n"
+ + " These data follow MyOcean standards; they are public and free of charge. User assumes all risk for use of data. User must display citation in any publication or product using data. User must contact PI prior to any commercial use of data. More on: http://www.myocean.eu/data_policy\n"
+ + " http://www.myocean.eu,http://www.coriolis.eu.org\n"
+ + " (local files)\n"
+ + " CF Standard Name Table v70\n"
+ + " TIME_QC, depth, DEPTH_QC, TEMP_QC, TEMP_DM, ATPT_QC, ATPT_DM, ATMS_QC, ATMS_DM\n"
+ + " Unknown institution data from a local source.\n"
+ + " Unknown institution data from a local source.\n"
+ + " LATITUDE, LONGITUDE, TIME\n"
+ + " \n"
+ + " \n"
+ + " TIME\n"
+ + " time\n"
+ + " double\n"
+ + " \n"
+ + " \n"
+ + " 8.0E9\n"
+ + " -2.0E9\n"
+ + " Time\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " TIME_QC\n"
+ + " TIME_QC\n"
+ + " byte\n"
+ + " \n"
+ + " \n"
+ + " 10.0\n"
+ + " 0.0\n"
+ + " Quality\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " DEPTH\n"
+ + " depth\n"
+ + " float\n"
+ + " \n"
+ + " \n"
+ + " 8000.0\n"
+ + " -8000.0\n"
+ + " TopographyDepth\n"
+ + " Location\n"
+ + " NaN\n"
+ + " null\n"
+ + " sea_level\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " DEPTH_QC\n"
+ + " DEPTH_QC\n"
+ + " byte\n"
+ + " \n"
+ + " \n"
+ + " 10.0\n"
+ + " 0.0\n"
+ + " Quality\n"
+ + " depth\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " LATITUDE\n"
+ + " latitude\n"
+ + " float\n"
+ + " \n"
+ + " \n"
+ + " 90.0\n"
+ + " -90.0\n"
+ + " Location\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " LONGITUDE\n"
+ + " longitude\n"
+ + " float\n"
+ + " \n"
+ + " \n"
+ + " 180.0\n"
+ + " -180.0\n"
+ + " Location\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " TEMP\n"
+ + " TEMP\n"
+ + " float\n"
+ + " \n"
+ + " \n"
+ + " 32.0\n"
+ + " 0.0\n"
+ + " Temperature\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " TEMP_QC\n"
+ + " TEMP_QC\n"
+ + " byte\n"
+ + " \n"
+ + " \n"
+ + " 10.0\n"
+ + " 0.0\n"
+ + " Quality\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " TEMP_DM\n"
+ + " TEMP_DM\n"
+ + " char\n"
+ + " \n"
+ + " \n"
+ + " 20.0\n"
+ + " 0.0\n"
+ + " Unknown\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " ATPT\n"
+ + " ATPT\n"
+ + " float\n"
+ + " \n"
+ + " \n"
+ + " 3.0\n"
+ + " -3.0\n"
+ + " Pressure\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " ATPT_QC\n"
+ + " ATPT_QC\n"
+ + " byte\n"
+ + " \n"
+ + " \n"
+ + " 10.0\n"
+ + " 0.0\n"
+ + " Quality\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " ATPT_DM\n"
+ + " ATPT_DM\n"
+ + " char\n"
+ + " \n"
+ + " \n"
+ + " 20.0\n"
+ + " 0.0\n"
+ + " Unknown\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " ATMS\n"
+ + " ATMS\n"
+ + " float\n"
+ + " \n"
+ + " \n"
+ + " 1050.0\n"
+ + " 950.0\n"
+ + " Pressure\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " ATMS_QC\n"
+ + " ATMS_QC\n"
+ + " byte\n"
+ + " \n"
+ + " \n"
+ + " 10.0\n"
+ + " 0.0\n"
+ + " Quality\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " ATMS_DM\n"
+ + " ATMS_DM\n"
+ + " char\n"
+ + " \n"
+ + " \n"
+ + " 20.0\n"
+ + " 0.0\n"
+ + " Unknown\n"
+ + " \n"
+ + " \n"
+ + "\n"
+ + "\n\n";
Test.ensureEqual(results, expected, "results=\n" + results);
// GenerateDatasetsXml
- results = (new GenerateDatasetsXml()).doIt(new String[] { "-verbose",
- "EDDTableFromMultidimNcFiles",
- dataDir, "GL_.*44761\\.nc", "",
- "TIME, DEPTH",
- "1440",
- "", "", "", "", // just for test purposes
- "false", // removeMVRows
- "TIME", // sort files by
- "", "", "", "",
- "4355", // standardizeWhat 1+2(numericTime)+256(catch numeric mv)+4096(units)
- "LATITUDE, LONGITUDE, TIME", // treatDimensionsAs
- "" }, // cacheFromUrl
- false); // doIt loop?
+ results =
+ (new GenerateDatasetsXml())
+ .doIt(
+ new String[] {
+ "-verbose",
+ "EDDTableFromMultidimNcFiles",
+ dataDir,
+ "GL_.*44761\\.nc",
+ "",
+ "TIME, DEPTH",
+ "1440",
+ "",
+ "",
+ "",
+ "", // just for test purposes
+ "false", // removeMVRows
+ "TIME", // sort files by
+ "",
+ "",
+ "",
+ "",
+ "4355", // standardizeWhat 1+2(numericTime)+256(catch numeric mv)+4096(units)
+ "LATITUDE, LONGITUDE, TIME", // treatDimensionsAs
+ ""
+ }, // cacheFromUrl
+ false); // doIt loop?
Test.ensureEqual(results, expected, "Unexpected results from GenerateDatasetsXml.doIt.");
// ensure it is ready-to-use by making a dataset from it
// with one small change to addAttributes:
- results = String2.replaceAll(results,
- " TimeSeries\n",
- " Point\n");
- results = String2.replaceAll(results,
- " station_id, latitude, longitude, ???\n",
- "");
+ results =
+ String2.replaceAll(
+ results,
+ " TimeSeries\n",
+ " Point\n");
+ results =
+ String2.replaceAll(
+ results,
+ " station_id, latitude, longitude, ???\n",
+ "");
// it could be made into valid TimeSeries by adding a few more atts
String2.log(results);
// String tDatasetID = "GL___44761_0171_3b6f_1e9c";
EDD.deleteCachedDatasetInfo(tDatasetID);
- EDDTableFromMultidimNcFiles edd = (EDDTableFromMultidimNcFiles) EDDTableFromMultidimNcFiles
- .oneFromXmlFragment(null, results);
+ EDDTableFromMultidimNcFiles edd =
+ (EDDTableFromMultidimNcFiles) EDDTableFromMultidimNcFiles.oneFromXmlFragment(null, results);
Test.ensureEqual(edd.datasetID(), tDatasetID, "");
Test.ensureEqual(edd.title(), "Unknown institution data from a local source.", "");
- Test.ensureEqual(String2.toCSSVString(edd.dataVariableDestinationNames()),
+ Test.ensureEqual(
+ String2.toCSSVString(edd.dataVariableDestinationNames()),
"time, TIME_QC, depth, DEPTH_QC, latitude, longitude, TEMP, TEMP_QC, TEMP_DM, ATPT, ATPT_QC, ATPT_DM, ATMS, ATMS_QC, ATMS_DM",
"");
- Test.ensureEqual(edd.treatDimensionsAs.length, 1, EDDTableFromMultidimNcFiles.TREAT_DIMENSIONS_AS);
- Test.ensureEqual(String2.toCSSVString(edd.treatDimensionsAs[0]),
- "LATITUDE, LONGITUDE, TIME", EDDTableFromMultidimNcFiles.TREAT_DIMENSIONS_AS);
+ Test.ensureEqual(
+ edd.treatDimensionsAs.length, 1, EDDTableFromMultidimNcFiles.TREAT_DIMENSIONS_AS);
+ Test.ensureEqual(
+ String2.toCSSVString(edd.treatDimensionsAs[0]),
+ "LATITUDE, LONGITUDE, TIME",
+ EDDTableFromMultidimNcFiles.TREAT_DIMENSIONS_AS);
}
/**
@@ -2491,7 +2550,7 @@ void testGenerateDatasetsXmlDimensions() throws Throwable {
* @throws Throwable if trouble
*/
@ParameterizedTest
- @ValueSource(booleans = { true, false })
+ @ValueSource(booleans = {true, false})
@TagFlaky
void testTreatDimensionsAs(boolean deleteCachedInfo) throws Throwable {
// String2.log("\n******************
@@ -2502,347 +2561,361 @@ void testTreatDimensionsAs(boolean deleteCachedInfo) throws Throwable {
String error = "";
EDV edv;
String dir = EDStatic.fullTestCacheDirectory;
- String today = Calendar2.getCurrentISODateTimeStringZulu().substring(0, 14); // 14 is enough to check hour. Hard
- // to check min:sec.
+ String today =
+ Calendar2.getCurrentISODateTimeStringZulu()
+ .substring(0, 14); // 14 is enough to check hour. Hard
+ // to check min:sec.
String id = "testTreatDimensionsAs";
- if (deleteCachedInfo)
- EDD.deleteCachedDatasetInfo(id);
+ if (deleteCachedInfo) EDD.deleteCachedDatasetInfo(id);
EDDTable eddTable = (EDDTable) EDDTestDataset.gettestTreatDimensionsAs();
// .das
- tName = eddTable.makeNewFileForDapQuery(language, null, null, "", dir,
- eddTable.className() + "_treatDimensionsAs", ".das");
+ tName =
+ eddTable.makeNewFileForDapQuery(
+ language, null, null, "", dir, eddTable.className() + "_treatDimensionsAs", ".das");
results = File2.directReadFrom88591File(dir + tName);
// String2.log(results);
- expected = "Attributes {\n" +
- " s {\n" +
- " time {\n" +
- " String _CoordinateAxisType \"Time\";\n" +
- " Float64 actual_range 1.3201056e+9, 1.3437756e+9;\n" +
- " String axis \"T\";\n" +
- " Float64 colorBarMaximum 8.0e+9;\n" +
- " Float64 colorBarMinimum -2.0e+9;\n" +
- " String ioos_category \"Time\";\n" +
- " String long_name \"Time\";\n" +
- " Int32 QC_indicator 1;\n" +
- " Int32 QC_procedure 1;\n" +
- " String standard_name \"time\";\n" +
- " String time_origin \"01-JAN-1970 00:00:00\";\n" +
- " String units \"seconds since 1970-01-01T00:00:00Z\";\n" +
- " Float64 valid_max 7.144848e+9;\n" +
- " Float64 valid_min -6.31152e+8;\n" +
- " }\n" +
- " TIME_QC {\n" +
- " Byte _FillValue 127;\n" +
- " String _Unsigned \"false\";\n" + // ERDDAP adds
- " Byte actual_range 1, 1;\n" +
- " Float64 colorBarMaximum 10.0;\n" +
- " Float64 colorBarMinimum 0.0;\n" +
- " String conventions \"OceanSites reference table 2\";\n" +
- " String flag_meanings \"no_qc_performed good_data probably_good_data bad_data_that_are_potentially_correctable bad_data value_changed not_used nominal_value interpolated_value missing_value\";\n"
- +
- " Byte flag_values 0, 1, 2, 3, 4, 5, 6, 7, 8, 9;\n" +
- " String ioos_category \"Quality\";\n" +
- " String long_name \"quality flag\";\n" +
- " Byte valid_max 9;\n" +
- " Byte valid_min 0;\n" +
- " }\n" +
- " depth {\n" +
- " String _CoordinateAxisType \"Height\";\n" +
- " String _CoordinateZisPositive \"down\";\n" +
- " Float32 _FillValue NaN;\n" +
- " String axis \"Z\";\n" +
- " Float64 colorBarMaximum 8000.0;\n" +
- " Float64 colorBarMinimum -8000.0;\n" +
- " String colorBarPalette \"TopographyDepth\";\n" +
- " String coordinate_reference_frame \"urn:ogc:crs:EPSG::5113\";\n" +
- " String ioos_category \"Location\";\n" +
- " String long_name \"Depth of each measurement\";\n" +
- " String positive \"down\";\n" +
- " Int32 QC_indicator 1;\n" +
- " Int32 QC_procedure 1;\n" +
- " String references \"sea_level\";\n" +
- " String standard_name \"depth\";\n" +
- " String units \"m\";\n" +
- " Float32 valid_max 12000.0;\n" +
- " Float32 valid_min 0.0;\n" +
- " }\n" +
- " DEPTH_QC {\n" +
- " Byte _FillValue 127;\n" +
- " String _Unsigned \"false\";\n" + // ERDDAP adds
- (deleteCachedInfo ? "" : " Byte actual_range 127, 127;\n") +
- " Float64 colorBarMaximum 10.0;\n" +
- " Float64 colorBarMinimum 0.0;\n" +
- " String conventions \"OceanSites reference table 2\";\n" +
- " String flag_meanings \"no_qc_performed good_data probably_good_data bad_data_that_are_potentially_correctable bad_data value_changed not_used nominal_value interpolated_value missing_value\";\n"
- +
- " Byte flag_values 0, 1, 2, 3, 4, 5, 6, 7, 8, 9;\n" +
- " String ioos_category \"Quality\";\n" +
- " String long_name \"quality flag\";\n" +
- " String standard_name \"depth\";\n" +
- " Byte valid_max 9;\n" +
- " Byte valid_min 0;\n" +
- " }\n" +
- " latitude {\n" +
- " String _CoordinateAxisType \"Lat\";\n" +
- " Float32 _FillValue NaN;\n" +
- " Float32 actual_range 47.763, 52.936;\n" +
- " String axis \"Y\";\n" +
- " Float64 colorBarMaximum 90.0;\n" +
- " Float64 colorBarMinimum -90.0;\n" +
- " String ioos_category \"Location\";\n" +
- " String long_name \"Latitude of each location\";\n" +
- " Int32 QC_indicator 1;\n" +
- " Int32 QC_procedure 1;\n" +
- " String standard_name \"latitude\";\n" +
- " String units \"degrees_north\";\n" +
- " Float32 valid_max 90.0;\n" +
- " Float32 valid_min -90.0;\n" +
- " }\n" +
- " longitude {\n" +
- " String _CoordinateAxisType \"Lon\";\n" +
- " Float32 _FillValue NaN;\n" +
- " Float32 actual_range -44.112, -30.196;\n" +
- " String axis \"X\";\n" +
- " Float64 colorBarMaximum 180.0;\n" +
- " Float64 colorBarMinimum -180.0;\n" +
- " String ioos_category \"Location\";\n" +
- " String long_name \"Longitude of each location\";\n" +
- " Int32 QC_indicator 1;\n" +
- " Int32 QC_procedure 1;\n" +
- " String standard_name \"longitude\";\n" +
- " String units \"degrees_east\";\n" +
- " Float32 valid_max 180.0;\n" +
- " Float32 valid_min -180.0;\n" +
- " }\n" +
- " TEMP {\n" +
- " Float32 _FillValue NaN;\n" +
- " Float32 actual_range 6.7, 16.7;\n" +
- " Float64 colorBarMaximum 32.0;\n" +
- " Float64 colorBarMinimum 0.0;\n" +
- " String ioos_category \"Temperature\";\n" +
- " String long_name \"Sea temperature\";\n" +
- " String standard_name \"sea_water_temperature\";\n" +
- " String units \"degree_C\";\n" +
- " }\n" +
- " TEMP_QC {\n" +
- " Byte _FillValue 127;\n" +
- " String _Unsigned \"false\";\n" + // ERDDAP adds
- " Byte actual_range 1, 1;\n" +
- " Float64 colorBarMaximum 10.0;\n" +
- " Float64 colorBarMinimum 0.0;\n" +
- " String conventions \"OceanSites reference table 2\";\n" +
- " String flag_meanings \"no_qc_performed good_data probably_good_data bad_data_that_are_potentially_correctable bad_data value_changed not_used nominal_value interpolated_value missing_value\";\n"
- +
- " Byte flag_values 0, 1, 2, 3, 4, 5, 6, 7, 8, 9;\n" +
- " String ioos_category \"Quality\";\n" +
- " String long_name \"quality flag\";\n" +
- " Byte valid_max 9;\n" +
- " Byte valid_min 0;\n" +
- " }\n" +
- " TEMP_DM {\n" +
- " String actual_range \"R\n" +
- "R\";\n" +
- " Float64 colorBarMaximum 20.0;\n" +
- " Float64 colorBarMinimum 0.0;\n" +
- " String conventions \"OceanSITES reference table 5\";\n" +
- " String flag_meanings \"realtime post-recovery delayed-mode mixed\";\n" +
- " String flag_values \"R, P, D, M\";\n" +
- " String ioos_category \"Unknown\";\n" +
- " String long_name \"method of data processing\";\n" +
- " }\n" +
- " ATPT {\n" +
- " Float32 _FillValue NaN;\n" +
- " Float32 actual_range -8.5, 4.166667;\n" +
- " Float64 colorBarMaximum 3.0;\n" +
- " Float64 colorBarMinimum -3.0;\n" +
- " String ioos_category \"Pressure\";\n" +
- " String long_name \"Atmospheric pressure hourly tendency\";\n" +
- " String standard_name \"tendency_of_air_pressure\";\n" +
- " String units \"hPa hour-1\";\n" +
- " }\n" +
- " ATPT_QC {\n" +
- " Byte _FillValue 127;\n" +
- " String _Unsigned \"false\";\n" + // ERDDAP adds
- " Byte actual_range 0, 0;\n" +
- " Float64 colorBarMaximum 10.0;\n" +
- " Float64 colorBarMinimum 0.0;\n" +
- " String conventions \"OceanSites reference table 2\";\n" +
- " String flag_meanings \"no_qc_performed good_data probably_good_data bad_data_that_are_potentially_correctable bad_data value_changed not_used nominal_value interpolated_value missing_value\";\n"
- +
- " Byte flag_values 0, 1, 2, 3, 4, 5, 6, 7, 8, 9;\n" +
- " String ioos_category \"Quality\";\n" +
- " String long_name \"quality flag\";\n" +
- " Byte valid_max 9;\n" +
- " Byte valid_min 0;\n" +
- " }\n" +
- " ATPT_DM {\n" +
- " String actual_range \"R\n" +
- "R\";\n" +
- " Float64 colorBarMaximum 20.0;\n" +
- " Float64 colorBarMinimum 0.0;\n" +
- " String conventions \"OceanSITES reference table 5\";\n" +
- " String flag_meanings \"realtime post-recovery delayed-mode mixed\";\n" +
- " String flag_values \"R, P, D, M\";\n" +
- " String ioos_category \"Unknown\";\n" +
- " String long_name \"method of data processing\";\n" +
- " }\n" +
- " ATMS {\n" +
- " Float32 _FillValue NaN;\n" +
- " Float32 actual_range 974.7, 1026.6;\n" +
- " Float64 colorBarMaximum 1050.0;\n" +
- " Float64 colorBarMinimum 950.0;\n" +
- " String ioos_category \"Pressure\";\n" +
- " String long_name \"Atmospheric pressure at sea level\";\n" +
- " String standard_name \"air_pressure_at_sea_level\";\n" +
- " String units \"hPa\";\n" +
- " }\n" +
- " ATMS_QC {\n" +
- " Byte _FillValue 127;\n" +
- " String _Unsigned \"false\";\n" + // ERDDAP adds
- " Byte actual_range 0, 0;\n" +
- " Float64 colorBarMaximum 10.0;\n" +
- " Float64 colorBarMinimum 0.0;\n" +
- " String conventions \"OceanSites reference table 2\";\n" +
- " String flag_meanings \"no_qc_performed good_data probably_good_data bad_data_that_are_potentially_correctable bad_data value_changed not_used nominal_value interpolated_value missing_value\";\n"
- +
- " Byte flag_values 0, 1, 2, 3, 4, 5, 6, 7, 8, 9;\n" +
- " String ioos_category \"Quality\";\n" +
- " String long_name \"quality flag\";\n" +
- " Byte valid_max 9;\n" +
- " Byte valid_min 0;\n" +
- " }\n" +
- " ATMS_DM {\n" +
- " String actual_range \"R\n" +
- "R\";\n" +
- " Float64 colorBarMaximum 20.0;\n" +
- " Float64 colorBarMinimum 0.0;\n" +
- " String conventions \"OceanSITES reference table 5\";\n" +
- " String flag_meanings \"realtime post-recovery delayed-mode mixed\";\n" +
- " String flag_values \"R, P, D, M\";\n" +
- " String ioos_category \"Unknown\";\n" +
- " String long_name \"method of data processing\";\n" +
- " }\n" +
- " }\n" +
- " NC_GLOBAL {\n" +
- " String area \"Global Ocean\";\n" +
- " String author \"Coriolis and MyOcean data provider\";\n" +
- " String cdm_data_type \"Point\";\n" +
- " String citation \"These data were collected and made freely available by the MyOcean project and the programs that contribute to it\";\n"
- +
- " String contact \"codac@ifremer.fr\";\n" +
- " String Conventions \"OceanSITES Manual 1.1, CF-1.6, COARDS, ACDD-1.3\";\n" +
- " String creator_email \"codac@ifremer.fr\";\n" +
- " String creator_name \"CODAC\";\n" +
- " String creator_type \"institution\";\n" +
- " String creator_url \"https://wwz.ifremer.fr/\";\n" +
- " String data_assembly_center \"Coriolis\";\n" +
- " String data_mode \"R\";\n" +
- " String data_type \"OceanSITES time-series data\";\n" +
- " String date_update \"2012-08-06T22:07:01Z\";\n" +
- " String distribution_statement \"These data follow MyOcean standards; they are public and free of charge. User assumes all risk for use of data. User must display citation in any publication or product using data. User must contact PI prior to any commercial use of data. More on: http://www.myocean.eu/data_policy\";\n"
- +
- " Float64 Easternmost_Easting -30.196;\n" +
- " String featureType \"Point\";\n" +
- " String format_version \"1.1\";\n" +
- " Float64 geospatial_lat_max 52.936;\n" +
- " Float64 geospatial_lat_min 47.763;\n" +
- " String geospatial_lat_units \"degrees_north\";\n" +
- " Float64 geospatial_lon_max -30.196;\n" +
- " Float64 geospatial_lon_min -44.112;\n" +
- " String geospatial_lon_units \"degrees_east\";\n" +
- " String geospatial_vertical_positive \"down\";\n" +
- " String geospatial_vertical_units \"m\";\n" +
- " String history \"2012-08-06T22:07:01Z : Creation\n";
+ expected =
+ "Attributes {\n"
+ + " s {\n"
+ + " time {\n"
+ + " String _CoordinateAxisType \"Time\";\n"
+ + " Float64 actual_range 1.3201056e+9, 1.3437756e+9;\n"
+ + " String axis \"T\";\n"
+ + " Float64 colorBarMaximum 8.0e+9;\n"
+ + " Float64 colorBarMinimum -2.0e+9;\n"
+ + " String ioos_category \"Time\";\n"
+ + " String long_name \"Time\";\n"
+ + " Int32 QC_indicator 1;\n"
+ + " Int32 QC_procedure 1;\n"
+ + " String standard_name \"time\";\n"
+ + " String time_origin \"01-JAN-1970 00:00:00\";\n"
+ + " String units \"seconds since 1970-01-01T00:00:00Z\";\n"
+ + " Float64 valid_max 7.144848e+9;\n"
+ + " Float64 valid_min -6.31152e+8;\n"
+ + " }\n"
+ + " TIME_QC {\n"
+ + " Byte _FillValue 127;\n"
+ + " String _Unsigned \"false\";\n"
+ + // ERDDAP adds
+ " Byte actual_range 1, 1;\n"
+ + " Float64 colorBarMaximum 10.0;\n"
+ + " Float64 colorBarMinimum 0.0;\n"
+ + " String conventions \"OceanSites reference table 2\";\n"
+ + " String flag_meanings \"no_qc_performed good_data probably_good_data bad_data_that_are_potentially_correctable bad_data value_changed not_used nominal_value interpolated_value missing_value\";\n"
+ + " Byte flag_values 0, 1, 2, 3, 4, 5, 6, 7, 8, 9;\n"
+ + " String ioos_category \"Quality\";\n"
+ + " String long_name \"quality flag\";\n"
+ + " Byte valid_max 9;\n"
+ + " Byte valid_min 0;\n"
+ + " }\n"
+ + " depth {\n"
+ + " String _CoordinateAxisType \"Height\";\n"
+ + " String _CoordinateZisPositive \"down\";\n"
+ + " Float32 _FillValue NaN;\n"
+ + " String axis \"Z\";\n"
+ + " Float64 colorBarMaximum 8000.0;\n"
+ + " Float64 colorBarMinimum -8000.0;\n"
+ + " String colorBarPalette \"TopographyDepth\";\n"
+ + " String coordinate_reference_frame \"urn:ogc:crs:EPSG::5113\";\n"
+ + " String ioos_category \"Location\";\n"
+ + " String long_name \"Depth of each measurement\";\n"
+ + " String positive \"down\";\n"
+ + " Int32 QC_indicator 1;\n"
+ + " Int32 QC_procedure 1;\n"
+ + " String references \"sea_level\";\n"
+ + " String standard_name \"depth\";\n"
+ + " String units \"m\";\n"
+ + " Float32 valid_max 12000.0;\n"
+ + " Float32 valid_min 0.0;\n"
+ + " }\n"
+ + " DEPTH_QC {\n"
+ + " Byte _FillValue 127;\n"
+ + " String _Unsigned \"false\";\n"
+ + // ERDDAP adds
+ (deleteCachedInfo ? "" : " Byte actual_range 127, 127;\n")
+ + " Float64 colorBarMaximum 10.0;\n"
+ + " Float64 colorBarMinimum 0.0;\n"
+ + " String conventions \"OceanSites reference table 2\";\n"
+ + " String flag_meanings \"no_qc_performed good_data probably_good_data bad_data_that_are_potentially_correctable bad_data value_changed not_used nominal_value interpolated_value missing_value\";\n"
+ + " Byte flag_values 0, 1, 2, 3, 4, 5, 6, 7, 8, 9;\n"
+ + " String ioos_category \"Quality\";\n"
+ + " String long_name \"quality flag\";\n"
+ + " String standard_name \"depth\";\n"
+ + " Byte valid_max 9;\n"
+ + " Byte valid_min 0;\n"
+ + " }\n"
+ + " latitude {\n"
+ + " String _CoordinateAxisType \"Lat\";\n"
+ + " Float32 _FillValue NaN;\n"
+ + " Float32 actual_range 47.763, 52.936;\n"
+ + " String axis \"Y\";\n"
+ + " Float64 colorBarMaximum 90.0;\n"
+ + " Float64 colorBarMinimum -90.0;\n"
+ + " String ioos_category \"Location\";\n"
+ + " String long_name \"Latitude of each location\";\n"
+ + " Int32 QC_indicator 1;\n"
+ + " Int32 QC_procedure 1;\n"
+ + " String standard_name \"latitude\";\n"
+ + " String units \"degrees_north\";\n"
+ + " Float32 valid_max 90.0;\n"
+ + " Float32 valid_min -90.0;\n"
+ + " }\n"
+ + " longitude {\n"
+ + " String _CoordinateAxisType \"Lon\";\n"
+ + " Float32 _FillValue NaN;\n"
+ + " Float32 actual_range -44.112, -30.196;\n"
+ + " String axis \"X\";\n"
+ + " Float64 colorBarMaximum 180.0;\n"
+ + " Float64 colorBarMinimum -180.0;\n"
+ + " String ioos_category \"Location\";\n"
+ + " String long_name \"Longitude of each location\";\n"
+ + " Int32 QC_indicator 1;\n"
+ + " Int32 QC_procedure 1;\n"
+ + " String standard_name \"longitude\";\n"
+ + " String units \"degrees_east\";\n"
+ + " Float32 valid_max 180.0;\n"
+ + " Float32 valid_min -180.0;\n"
+ + " }\n"
+ + " TEMP {\n"
+ + " Float32 _FillValue NaN;\n"
+ + " Float32 actual_range 6.7, 16.7;\n"
+ + " Float64 colorBarMaximum 32.0;\n"
+ + " Float64 colorBarMinimum 0.0;\n"
+ + " String ioos_category \"Temperature\";\n"
+ + " String long_name \"Sea temperature\";\n"
+ + " String standard_name \"sea_water_temperature\";\n"
+ + " String units \"degree_C\";\n"
+ + " }\n"
+ + " TEMP_QC {\n"
+ + " Byte _FillValue 127;\n"
+ + " String _Unsigned \"false\";\n"
+ + // ERDDAP adds
+ " Byte actual_range 1, 1;\n"
+ + " Float64 colorBarMaximum 10.0;\n"
+ + " Float64 colorBarMinimum 0.0;\n"
+ + " String conventions \"OceanSites reference table 2\";\n"
+ + " String flag_meanings \"no_qc_performed good_data probably_good_data bad_data_that_are_potentially_correctable bad_data value_changed not_used nominal_value interpolated_value missing_value\";\n"
+ + " Byte flag_values 0, 1, 2, 3, 4, 5, 6, 7, 8, 9;\n"
+ + " String ioos_category \"Quality\";\n"
+ + " String long_name \"quality flag\";\n"
+ + " Byte valid_max 9;\n"
+ + " Byte valid_min 0;\n"
+ + " }\n"
+ + " TEMP_DM {\n"
+ + " String actual_range \"R\n"
+ + "R\";\n"
+ + " Float64 colorBarMaximum 20.0;\n"
+ + " Float64 colorBarMinimum 0.0;\n"
+ + " String conventions \"OceanSITES reference table 5\";\n"
+ + " String flag_meanings \"realtime post-recovery delayed-mode mixed\";\n"
+ + " String flag_values \"R, P, D, M\";\n"
+ + " String ioos_category \"Unknown\";\n"
+ + " String long_name \"method of data processing\";\n"
+ + " }\n"
+ + " ATPT {\n"
+ + " Float32 _FillValue NaN;\n"
+ + " Float32 actual_range -8.5, 4.166667;\n"
+ + " Float64 colorBarMaximum 3.0;\n"
+ + " Float64 colorBarMinimum -3.0;\n"
+ + " String ioos_category \"Pressure\";\n"
+ + " String long_name \"Atmospheric pressure hourly tendency\";\n"
+ + " String standard_name \"tendency_of_air_pressure\";\n"
+ + " String units \"hPa hour-1\";\n"
+ + " }\n"
+ + " ATPT_QC {\n"
+ + " Byte _FillValue 127;\n"
+ + " String _Unsigned \"false\";\n"
+ + // ERDDAP adds
+ " Byte actual_range 0, 0;\n"
+ + " Float64 colorBarMaximum 10.0;\n"
+ + " Float64 colorBarMinimum 0.0;\n"
+ + " String conventions \"OceanSites reference table 2\";\n"
+ + " String flag_meanings \"no_qc_performed good_data probably_good_data bad_data_that_are_potentially_correctable bad_data value_changed not_used nominal_value interpolated_value missing_value\";\n"
+ + " Byte flag_values 0, 1, 2, 3, 4, 5, 6, 7, 8, 9;\n"
+ + " String ioos_category \"Quality\";\n"
+ + " String long_name \"quality flag\";\n"
+ + " Byte valid_max 9;\n"
+ + " Byte valid_min 0;\n"
+ + " }\n"
+ + " ATPT_DM {\n"
+ + " String actual_range \"R\n"
+ + "R\";\n"
+ + " Float64 colorBarMaximum 20.0;\n"
+ + " Float64 colorBarMinimum 0.0;\n"
+ + " String conventions \"OceanSITES reference table 5\";\n"
+ + " String flag_meanings \"realtime post-recovery delayed-mode mixed\";\n"
+ + " String flag_values \"R, P, D, M\";\n"
+ + " String ioos_category \"Unknown\";\n"
+ + " String long_name \"method of data processing\";\n"
+ + " }\n"
+ + " ATMS {\n"
+ + " Float32 _FillValue NaN;\n"
+ + " Float32 actual_range 974.7, 1026.6;\n"
+ + " Float64 colorBarMaximum 1050.0;\n"
+ + " Float64 colorBarMinimum 950.0;\n"
+ + " String ioos_category \"Pressure\";\n"
+ + " String long_name \"Atmospheric pressure at sea level\";\n"
+ + " String standard_name \"air_pressure_at_sea_level\";\n"
+ + " String units \"hPa\";\n"
+ + " }\n"
+ + " ATMS_QC {\n"
+ + " Byte _FillValue 127;\n"
+ + " String _Unsigned \"false\";\n"
+ + // ERDDAP adds
+ " Byte actual_range 0, 0;\n"
+ + " Float64 colorBarMaximum 10.0;\n"
+ + " Float64 colorBarMinimum 0.0;\n"
+ + " String conventions \"OceanSites reference table 2\";\n"
+ + " String flag_meanings \"no_qc_performed good_data probably_good_data bad_data_that_are_potentially_correctable bad_data value_changed not_used nominal_value interpolated_value missing_value\";\n"
+ + " Byte flag_values 0, 1, 2, 3, 4, 5, 6, 7, 8, 9;\n"
+ + " String ioos_category \"Quality\";\n"
+ + " String long_name \"quality flag\";\n"
+ + " Byte valid_max 9;\n"
+ + " Byte valid_min 0;\n"
+ + " }\n"
+ + " ATMS_DM {\n"
+ + " String actual_range \"R\n"
+ + "R\";\n"
+ + " Float64 colorBarMaximum 20.0;\n"
+ + " Float64 colorBarMinimum 0.0;\n"
+ + " String conventions \"OceanSITES reference table 5\";\n"
+ + " String flag_meanings \"realtime post-recovery delayed-mode mixed\";\n"
+ + " String flag_values \"R, P, D, M\";\n"
+ + " String ioos_category \"Unknown\";\n"
+ + " String long_name \"method of data processing\";\n"
+ + " }\n"
+ + " }\n"
+ + " NC_GLOBAL {\n"
+ + " String area \"Global Ocean\";\n"
+ + " String author \"Coriolis and MyOcean data provider\";\n"
+ + " String cdm_data_type \"Point\";\n"
+ + " String citation \"These data were collected and made freely available by the MyOcean project and the programs that contribute to it\";\n"
+ + " String contact \"codac@ifremer.fr\";\n"
+ + " String Conventions \"OceanSITES Manual 1.1, CF-1.6, COARDS, ACDD-1.3\";\n"
+ + " String creator_email \"codac@ifremer.fr\";\n"
+ + " String creator_name \"CODAC\";\n"
+ + " String creator_type \"institution\";\n"
+ + " String creator_url \"https://wwz.ifremer.fr/\";\n"
+ + " String data_assembly_center \"Coriolis\";\n"
+ + " String data_mode \"R\";\n"
+ + " String data_type \"OceanSITES time-series data\";\n"
+ + " String date_update \"2012-08-06T22:07:01Z\";\n"
+ + " String distribution_statement \"These data follow MyOcean standards; they are public and free of charge. User assumes all risk for use of data. User must display citation in any publication or product using data. User must contact PI prior to any commercial use of data. More on: http://www.myocean.eu/data_policy\";\n"
+ + " Float64 Easternmost_Easting -30.196;\n"
+ + " String featureType \"Point\";\n"
+ + " String format_version \"1.1\";\n"
+ + " Float64 geospatial_lat_max 52.936;\n"
+ + " Float64 geospatial_lat_min 47.763;\n"
+ + " String geospatial_lat_units \"degrees_north\";\n"
+ + " Float64 geospatial_lon_max -30.196;\n"
+ + " Float64 geospatial_lon_min -44.112;\n"
+ + " String geospatial_lon_units \"degrees_east\";\n"
+ + " String geospatial_vertical_positive \"down\";\n"
+ + " String geospatial_vertical_units \"m\";\n"
+ + " String history \"2012-08-06T22:07:01Z : Creation\n";
Test.ensureEqual(results.substring(0, expected.length()), expected, "results=\n" + results);
// 2018-06-14T18:48:29Z (local files)
// 2018-06-14T18:48:29Z
// "http://localhost:8080/cwexperimental/tabledap/testTreatDimensionsAs.das";
- expected = "String id \"GL_201207_TS_DB_44761\";\n" +
- " String infoUrl \"http://www.myocean.eu\";\n" +
- " String institution \"Unknown institution\";\n" +
- " String institution_references \"http://www.coriolis.eu.org\";\n" +
- " String keywords \"air, air_pressure_at_sea_level, atmosphere, atmospheric, ATMS, ATMS_DM, ATMS_QC, ATPT, ATPT_DM, ATPT_QC, data, depth, DEPTH_QC, earth, Earth Science > Atmosphere > Atmospheric Pressure > Atmospheric Pressure Measurements, Earth Science > Atmosphere > Atmospheric Pressure > Pressure Tendency, Earth Science > Atmosphere > Atmospheric Pressure > Sea Level Pressure, Earth Science > Atmosphere > Atmospheric Pressure > Static Pressure, Earth Science > Oceans > Ocean Temperature > Water Temperature, flag, hour, hourly, institution, latitude, level, local, longitude, measurements, method, ocean, oceans, pressure, processing, quality, science, sea, sea_water_temperature, seawater, source, static, TEMP, TEMP_DM, TEMP_QC, temperature, tendency, tendency_of_air_pressure, time, TIME_QC, water\";\n"
- +
- " String keywords_vocabulary \"GCMD Science Keywords\";\n" +
- " String license \"These data follow MyOcean standards; they are public and free of charge. User assumes all risk for use of data. User must display citation in any publication or product using data. User must contact PI prior to any commercial use of data. More on: http://www.myocean.eu/data_policy\";\n"
- +
- " String naming_authority \"OceanSITES\";\n" +
- " String netcdf_version \"3.5\";\n" +
- " Float64 Northernmost_Northing 52.936;\n" +
- " String platform_code \"44761\";\n" +
- " String qc_manual \"OceanSITES User's Manual v1.1\";\n" +
- " String quality_control_indicator \"6\";\n" +
- " String quality_index \"A\";\n" +
- " String references \"http://www.myocean.eu,http://www.coriolis.eu.org\";\n" +
- " String source \"BUOY/MOORING: SURFACE, DRIFTING : observation\";\n" +
- " String sourceUrl \"(local files)\";\n" +
- " Float64 Southernmost_Northing 47.763;\n" +
- " String standard_name_vocabulary \"CF Standard Name Table v70\";\n" +
- " String subsetVariables \"TIME_QC, depth, DEPTH_QC, TEMP_QC, TEMP_DM, ATPT_QC, ATPT_DM, ATMS_QC, ATMS_DM\";\n"
- +
- " String summary \"Unknown institution data from a local source.\";\n" +
- " String time_coverage_end \"2012-07-31T23:00:00Z\";\n" +
- " String time_coverage_start \"2011-11-01T00:00:00Z\";\n" +
- " String title \"The Title for testTreatDimensionsAs\";\n" +
- " String update_interval \"daily\";\n" +
- " Float64 Westernmost_Easting -44.112;\n" +
- " String wmo_platform_code \"44761\";\n" +
- " }\n" +
- "}\n";
+ expected =
+ "String id \"GL_201207_TS_DB_44761\";\n"
+ + " String infoUrl \"http://www.myocean.eu\";\n"
+ + " String institution \"Unknown institution\";\n"
+ + " String institution_references \"http://www.coriolis.eu.org\";\n"
+ + " String keywords \"air, air_pressure_at_sea_level, atmosphere, atmospheric, ATMS, ATMS_DM, ATMS_QC, ATPT, ATPT_DM, ATPT_QC, data, depth, DEPTH_QC, earth, Earth Science > Atmosphere > Atmospheric Pressure > Atmospheric Pressure Measurements, Earth Science > Atmosphere > Atmospheric Pressure > Pressure Tendency, Earth Science > Atmosphere > Atmospheric Pressure > Sea Level Pressure, Earth Science > Atmosphere > Atmospheric Pressure > Static Pressure, Earth Science > Oceans > Ocean Temperature > Water Temperature, flag, hour, hourly, institution, latitude, level, local, longitude, measurements, method, ocean, oceans, pressure, processing, quality, science, sea, sea_water_temperature, seawater, source, static, TEMP, TEMP_DM, TEMP_QC, temperature, tendency, tendency_of_air_pressure, time, TIME_QC, water\";\n"
+ + " String keywords_vocabulary \"GCMD Science Keywords\";\n"
+ + " String license \"These data follow MyOcean standards; they are public and free of charge. User assumes all risk for use of data. User must display citation in any publication or product using data. User must contact PI prior to any commercial use of data. More on: http://www.myocean.eu/data_policy\";\n"
+ + " String naming_authority \"OceanSITES\";\n"
+ + " String netcdf_version \"3.5\";\n"
+ + " Float64 Northernmost_Northing 52.936;\n"
+ + " String platform_code \"44761\";\n"
+ + " String qc_manual \"OceanSITES User's Manual v1.1\";\n"
+ + " String quality_control_indicator \"6\";\n"
+ + " String quality_index \"A\";\n"
+ + " String references \"http://www.myocean.eu,http://www.coriolis.eu.org\";\n"
+ + " String source \"BUOY/MOORING: SURFACE, DRIFTING : observation\";\n"
+ + " String sourceUrl \"(local files)\";\n"
+ + " Float64 Southernmost_Northing 47.763;\n"
+ + " String standard_name_vocabulary \"CF Standard Name Table v70\";\n"
+ + " String subsetVariables \"TIME_QC, depth, DEPTH_QC, TEMP_QC, TEMP_DM, ATPT_QC, ATPT_DM, ATMS_QC, ATMS_DM\";\n"
+ + " String summary \"Unknown institution data from a local source.\";\n"
+ + " String time_coverage_end \"2012-07-31T23:00:00Z\";\n"
+ + " String time_coverage_start \"2011-11-01T00:00:00Z\";\n"
+ + " String title \"The Title for testTreatDimensionsAs\";\n"
+ + " String update_interval \"daily\";\n"
+ + " Float64 Westernmost_Easting -44.112;\n"
+ + " String wmo_platform_code \"44761\";\n"
+ + " }\n"
+ + "}\n";
int po = results.indexOf(expected.substring(0, 40));
Test.ensureEqual(results.substring(po), expected, "results=\n" + results);
// .dds
- tName = eddTable.makeNewFileForDapQuery(language, null, null, "", dir,
- eddTable.className() + "_treatDimensionsAs", ".dds");
+ tName =
+ eddTable.makeNewFileForDapQuery(
+ language, null, null, "", dir, eddTable.className() + "_treatDimensionsAs", ".dds");
results = File2.directReadFrom88591File(dir + tName);
// String2.log(results);
- expected = "Dataset {\n" +
- " Sequence {\n" +
- " Float64 time;\n" +
- " Byte TIME_QC;\n" +
- " Float32 depth;\n" +
- " Byte DEPTH_QC;\n" +
- " Float32 latitude;\n" +
- " Float32 longitude;\n" +
- " Float32 TEMP;\n" +
- " Byte TEMP_QC;\n" +
- " String TEMP_DM;\n" +
- " Float32 ATPT;\n" +
- " Byte ATPT_QC;\n" +
- " String ATPT_DM;\n" +
- " Float32 ATMS;\n" +
- " Byte ATMS_QC;\n" +
- " String ATMS_DM;\n" +
- " } s;\n" +
- "} s;\n";
+ expected =
+ "Dataset {\n"
+ + " Sequence {\n"
+ + " Float64 time;\n"
+ + " Byte TIME_QC;\n"
+ + " Float32 depth;\n"
+ + " Byte DEPTH_QC;\n"
+ + " Float32 latitude;\n"
+ + " Float32 longitude;\n"
+ + " Float32 TEMP;\n"
+ + " Byte TEMP_QC;\n"
+ + " String TEMP_DM;\n"
+ + " Float32 ATPT;\n"
+ + " Byte ATPT_QC;\n"
+ + " String ATPT_DM;\n"
+ + " Float32 ATMS;\n"
+ + " Byte ATMS_QC;\n"
+ + " String ATMS_DM;\n"
+ + " } s;\n"
+ + "} s;\n";
Test.ensureEqual(results, expected, "results=\n" + results);
// .csv
// " Float64 actual_range 1.3201056e+9, 1.3437756e+9;\n" +
- tName = eddTable.makeNewFileForDapQuery(language, null, null, "&time=1.3201056e9", dir,
- eddTable.className() + "_treatDimensionsAs1", ".csv");
+ tName =
+ eddTable.makeNewFileForDapQuery(
+ language,
+ null,
+ null,
+ "&time=1.3201056e9",
+ dir,
+ eddTable.className() + "_treatDimensionsAs1",
+ ".csv");
results = File2.directReadFrom88591File(dir + tName);
// String2.log(results);
- expected = "time,TIME_QC,depth,DEPTH_QC,latitude,longitude,TEMP,TEMP_QC,TEMP_DM,ATPT,ATPT_QC,ATPT_DM,ATMS,ATMS_QC,ATMS_DM\n"
- +
- "UTC,,m,,degrees_north,degrees_east,degree_C,,,hPa hour-1,,,hPa,,\n" +
- "2011-11-01T00:00:00Z,1,NaN,NaN,52.33,-35.219,9.2,1,R,-1.6,0,R,985.6,0,R\n";
+ expected =
+ "time,TIME_QC,depth,DEPTH_QC,latitude,longitude,TEMP,TEMP_QC,TEMP_DM,ATPT,ATPT_QC,ATPT_DM,ATMS,ATMS_QC,ATMS_DM\n"
+ + "UTC,,m,,degrees_north,degrees_east,degree_C,,,hPa hour-1,,,hPa,,\n"
+ + "2011-11-01T00:00:00Z,1,NaN,NaN,52.33,-35.219,9.2,1,R,-1.6,0,R,985.6,0,R\n";
Test.ensureEqual(results, expected, "results=\n" + results);
- tName = eddTable.makeNewFileForDapQuery(language, null, null, "&time=1.3437756e9", dir,
- eddTable.className() + "_treatDimensionsAs2", ".csv");
+ tName =
+ eddTable.makeNewFileForDapQuery(
+ language,
+ null,
+ null,
+ "&time=1.3437756e9",
+ dir,
+ eddTable.className() + "_treatDimensionsAs2",
+ ".csv");
results = File2.directReadFrom88591File(dir + tName);
// String2.log(results);
- expected = "time,TIME_QC,depth,DEPTH_QC,latitude,longitude,TEMP,TEMP_QC,TEMP_DM,ATPT,ATPT_QC,ATPT_DM,ATMS,ATMS_QC,ATMS_DM\n"
- +
- "UTC,,m,,degrees_north,degrees_east,degree_C,,,hPa hour-1,,,hPa,,\n" +
- "2012-07-31T23:00:00Z,1,NaN,NaN,50.969,-40.416,16.5,1,R,0.13333334,0,R,1022.0,0,R\n";
+ expected =
+ "time,TIME_QC,depth,DEPTH_QC,latitude,longitude,TEMP,TEMP_QC,TEMP_DM,ATPT,ATPT_QC,ATPT_DM,ATMS,ATMS_QC,ATMS_DM\n"
+ + "UTC,,m,,degrees_north,degrees_east,degree_C,,,hPa hour-1,,,hPa,,\n"
+ + "2012-07-31T23:00:00Z,1,NaN,NaN,50.969,-40.416,16.5,1,R,0.13333334,0,R,1022.0,0,R\n";
Test.ensureEqual(results, expected, "results=\n" + results);
-
}
/**
@@ -2851,7 +2924,7 @@ void testTreatDimensionsAs(boolean deleteCachedInfo) throws Throwable {
* @throws Throwable if trouble
*/
@ParameterizedTest
- @ValueSource(booleans = { true, false })
+ @ValueSource(booleans = {true, false})
void testTreatDimensionsAs2(boolean deleteCachedInfo) throws Throwable {
// String2.log("\n******************
// EDDTableFromMultidimNcFiles.testTreatDimensionsAs2() *****************\n");
@@ -2861,510 +2934,520 @@ void testTreatDimensionsAs2(boolean deleteCachedInfo) throws Throwable {
String error = "";
EDV edv;
String dir = EDStatic.fullTestCacheDirectory;
- String today = Calendar2.getCurrentISODateTimeStringZulu().substring(0, 14); // 14 is enough to check hour. Hard
- // to check min:sec.
+ String today =
+ Calendar2.getCurrentISODateTimeStringZulu()
+ .substring(0, 14); // 14 is enough to check hour. Hard
+ // to check min:sec.
String id = "testTreatDimensionsAs2";
- if (deleteCachedInfo)
- EDD.deleteCachedDatasetInfo(id);
+ if (deleteCachedInfo) EDD.deleteCachedDatasetInfo(id);
EDDTable eddTable = (EDDTable) EDDTestDataset.gettestTreatDimensionsAs2();
// .das
- tName = eddTable.makeNewFileForDapQuery(language, null, null, "", dir,
- eddTable.className() + "_treatDimensionsAs2", ".das");
+ tName =
+ eddTable.makeNewFileForDapQuery(
+ language, null, null, "", dir, eddTable.className() + "_treatDimensionsAs2", ".das");
results = File2.directReadFrom88591File(dir + tName);
// String2.log(results);
expected = // long flag masks appear a float64
- "Attributes {\n" +
- " s {\n" +
- " time {\n" +
- " String _CoordinateAxisType \"Time\";\n" +
- " Float64 actual_range 1.506816e+9, 1.5119982e+9;\n" + // before Calendar2 rounded to nearest
- // second for "days since ", this was
- // 1.5119981999999967e+9;\n" +
- " String axis \"T\";\n" +
- " Float64 colorBarMaximum 8.0e+9;\n" +
- " Float64 colorBarMinimum -2.0e+9;\n" +
- " String ioos_category \"Time\";\n" +
- " String long_name \"Time\";\n" +
- " Int32 QC_indicator 1;\n" +
- " Int32 QC_procedure 1;\n" +
- " String standard_name \"time\";\n" +
- " String time_origin \"01-JAN-1970 00:00:00\";\n" +
- " String units \"seconds since 1970-01-01T00:00:00Z\";\n" +
- " Float64 valid_max 7.144848e+9;\n" +
- " Float64 valid_min -6.31152e+8;\n" +
- " }\n" +
- " TIME_QC {\n" +
- " Byte _FillValue 127;\n" +
- " String _Unsigned \"false\";\n" + // ERDDAP adds
- " Byte actual_range 1, 1;\n" +
- " Float64 colorBarMaximum 10.0;\n" +
- " Float64 colorBarMinimum 0.0;\n" +
- " String conventions \"OceanSITES reference table 2\";\n" +
- " String flag_meanings \"no_qc_performed good_data probably_good_data bad_data_that_are_potentially_correctable bad_data value_changed not_used nominal_value interpolated_value missing_value\";\n"
- +
- " Byte flag_values 0, 1, 2, 3, 4, 5, 6, 7, 8, 9;\n" +
- " String ioos_category \"Quality\";\n" +
- " String long_name \"quality flag\";\n" +
- " Byte valid_max 9;\n" +
- " Byte valid_min 0;\n" +
- " }\n" +
- " latitude {\n" +
- " String _CoordinateAxisType \"Lat\";\n" +
- " Float32 _FillValue NaN;\n" +
- " Float32 actual_range 42.5, 42.5;\n" +
- " String axis \"Y\";\n" +
- " Float64 colorBarMaximum 90.0;\n" +
- " Float64 colorBarMinimum -90.0;\n" +
- " String ioos_category \"Location\";\n" +
- " String long_name \"Latitude of each location\";\n" +
- " Int32 QC_indicator 1;\n" +
- " Int32 QC_procedure 1;\n" +
- " String standard_name \"latitude\";\n" +
- " String units \"degrees_north\";\n" +
- " Float32 valid_max 90.0;\n" +
- " Float32 valid_min MIN;\n" +
- " }\n" +
- " longitude {\n" +
- " String _CoordinateAxisType \"Lon\";\n" +
- " Float32 _FillValue NaN;\n" +
- " Float32 actual_range 27.4833, 27.4833;\n" +
- " String axis \"X\";\n" +
- " Float64 colorBarMaximum 180.0;\n" +
- " Float64 colorBarMinimum -180.0;\n" +
- " String ioos_category \"Location\";\n" +
- " String long_name \"Longitude of each location\";\n" +
- " Int32 QC_indicator 1;\n" +
- " Int32 QC_procedure 1;\n" +
- " String standard_name \"longitude\";\n" +
- " String units \"degrees_east\";\n" +
- " Float32 valid_max 180.0;\n" +
- " Float32 valid_min MIN;\n" +
- " }\n" +
- " depth {\n" +
- " String _CoordinateAxisType \"Height\";\n" +
- " String _CoordinateZisPositive \"down\";\n" +
- " Float32 _FillValue NaN;\n" +
- " Float32 actual_range -2.0, 0.0;\n" +
- " String axis \"Z\";\n" +
- " Float64 colorBarMaximum 8000.0;\n" +
- " Float64 colorBarMinimum -8000.0;\n" +
- " String colorBarPalette \"TopographyDepth\";\n" +
- " String ioos_category \"Location\";\n" +
- " String long_name \"Depth\";\n" +
- " String positive \"down\";\n" +
- " String source_name \"DEPH\";\n" +
- " String standard_name \"depth\";\n" +
- " String units \"m\";\n" +
- " Float32 valid_max 12000.0;\n" +
- " Float32 valid_min MIN;\n" +
- " }\n" +
- " DEPH_QC {\n" +
- " Byte _FillValue 127;\n" +
- " String _Unsigned \"false\";\n" + // ERDDAP adds
- " Byte actual_range 7, 7;\n" +
- " Float64 colorBarMaximum 10.0;\n" +
- " Float64 colorBarMinimum 0.0;\n" +
- " String conventions \"OceanSITES reference table 2\";\n" +
- " String flag_meanings \"no_qc_performed good_data probably_good_data bad_data_that_are_potentially_correctable bad_data value_changed not_used nominal_value interpolated_value missing_value\";\n"
- +
- " Byte flag_values 0, 1, 2, 3, 4, 5, 6, 7, 8, 9;\n" +
- " String ioos_category \"Quality\";\n" +
- " String long_name \"quality flag\";\n" +
- " Byte valid_max 9;\n" +
- " Byte valid_min 0;\n" +
- " }\n" +
- " DEPH_DM {\n" +
- " String actual_range \"R\n" +
- "R\";\n" +
- " Float64 colorBarMaximum 20.0;\n" +
- " Float64 colorBarMinimum 0.0;\n" +
- " String conventions \"OceanSITES reference table 5\";\n" +
- " String flag_meanings \"real-time provisional delayed-mode mixed\";\n" +
- " String flag_values \"R, P, D, M\";\n" +
- " String ioos_category \"Unknown\";\n" +
- " String long_name \"method of data processing\";\n" +
- " }\n" +
- " RELH {\n" +
- " Float32 _FillValue NaN;\n" +
- " Float32 actual_range 27.91, 100.0;\n" +
- " Float64 colorBarMaximum 100.0;\n" +
- " Float64 colorBarMinimum 0.0;\n" +
- " String ioos_category \"Meteorology\";\n" +
- " String long_name \"Relative humidity\";\n" +
- " String standard_name \"relative_humidity\";\n" +
- " String units \"percent\";\n" +
- " }\n" +
- " RELH_QC {\n" +
- " Byte _FillValue 127;\n" +
- " String _Unsigned \"false\";\n" + // ERDDAP adds
- " Byte actual_range 0, 0;\n" +
- " Float64 colorBarMaximum 10.0;\n" +
- " Float64 colorBarMinimum 0.0;\n" +
- " String conventions \"OceanSITES reference table 2\";\n" +
- " String flag_meanings \"no_qc_performed good_data probably_good_data bad_data_that_are_potentially_correctable bad_data value_changed not_used nominal_value interpolated_value missing_value\";\n"
- +
- " Byte flag_values 0, 1, 2, 3, 4, 5, 6, 7, 8, 9;\n" +
- " String ioos_category \"Quality\";\n" +
- " String long_name \"quality flag\";\n" +
- " Byte valid_max 9;\n" +
- " Byte valid_min 0;\n" +
- " }\n" +
- " RELH_DM {\n" +
- " String actual_range \"R\n" +
- "R\";\n" +
- " Float64 colorBarMaximum 20.0;\n" +
- " Float64 colorBarMinimum 0.0;\n" +
- " String conventions \"OceanSITES reference table 5\";\n" +
- " String flag_meanings \"real-time provisional delayed-mode mixed\";\n" +
- " String flag_values \"R, P, D, M\";\n" +
- " String ioos_category \"Unknown\";\n" +
- " String long_name \"method of data processing\";\n" +
- " }\n" +
- " ATMS {\n" +
- " Float32 _FillValue NaN;\n" +
- " Float64 colorBarMaximum 1050.0;\n" +
- " Float64 colorBarMinimum 950.0;\n" +
- " String ioos_category \"Pressure\";\n" +
- " String long_name \"Atmospheric pressure at sea level\";\n" +
- " String standard_name \"air_pressure_at_sea_level\";\n" +
- " String units \"hPa\";\n" +
- " }\n" +
- " ATMS_QC {\n" +
- " Byte _FillValue 127;\n" +
- " String _Unsigned \"false\";\n" + // ERDDAP adds
- " Byte actual_range 9, 9;\n" +
- " Float64 colorBarMaximum 10.0;\n" +
- " Float64 colorBarMinimum 0.0;\n" +
- " String conventions \"OceanSITES reference table 2\";\n" +
- " String flag_meanings \"no_qc_performed good_data probably_good_data bad_data_that_are_potentially_correctable bad_data value_changed not_used nominal_value interpolated_value missing_value\";\n"
- +
- " Byte flag_values 0, 1, 2, 3, 4, 5, 6, 7, 8, 9;\n" +
- " String ioos_category \"Quality\";\n" +
- " String long_name \"quality flag\";\n" +
- " Byte valid_max 9;\n" +
- " Byte valid_min 0;\n" +
- " }\n" +
- " ATMS_DM {\n" +
- " String actual_range \" \n" +
- "R\";\n" +
- " Float64 colorBarMaximum 20.0;\n" +
- " Float64 colorBarMinimum 0.0;\n" +
- " String conventions \"OceanSITES reference table 5\";\n" +
- " String flag_meanings \"real-time provisional delayed-mode mixed\";\n" +
- " String flag_values \"R, P, D, M\";\n" +
- " String ioos_category \"Unknown\";\n" +
- " String long_name \"method of data processing\";\n" +
- " }\n" +
- " DRYT {\n" +
- " Float32 _FillValue NaN;\n" +
- " Float32 actual_range 0.0, 27.0;\n" +
- " Float64 colorBarMaximum 40.0;\n" +
- " Float64 colorBarMinimum -10.0;\n" +
- " String ioos_category \"Temperature\";\n" +
- " String long_name \"Air temperature in dry bulb\";\n" +
- " String standard_name \"air_temperature\";\n" +
- " String units \"degree_C\";\n" +
- " }\n" +
- " DRYT_QC {\n" +
- " Byte _FillValue 127;\n" +
- " String _Unsigned \"false\";\n" + // ERDDAP adds
- " Byte actual_range 0, 0;\n" +
- " Float64 colorBarMaximum 10.0;\n" +
- " Float64 colorBarMinimum 0.0;\n" +
- " String conventions \"OceanSITES reference table 2\";\n" +
- " String flag_meanings \"no_qc_performed good_data probably_good_data bad_data_that_are_potentially_correctable bad_data value_changed not_used nominal_value interpolated_value missing_value\";\n"
- +
- " Byte flag_values 0, 1, 2, 3, 4, 5, 6, 7, 8, 9;\n" +
- " String ioos_category \"Quality\";\n" +
- " String long_name \"quality flag\";\n" +
- " Byte valid_max 9;\n" +
- " Byte valid_min 0;\n" +
- " }\n" +
- " DRYT_DM {\n" +
- " String actual_range \"R\n" +
- "R\";\n" +
- " Float64 colorBarMaximum 20.0;\n" +
- " Float64 colorBarMinimum 0.0;\n" +
- " String conventions \"OceanSITES reference table 5\";\n" +
- " String flag_meanings \"real-time provisional delayed-mode mixed\";\n" +
- " String flag_values \"R, P, D, M\";\n" +
- " String ioos_category \"Unknown\";\n" +
- " String long_name \"method of data processing\";\n" +
- " }\n" +
- " DEWT {\n" +
- " Float32 _FillValue NaN;\n" +
- " Float32 actual_range -6.0, 17.0;\n" +
- " Float64 colorBarMaximum 40.0;\n" +
- " Float64 colorBarMinimum 0.0;\n" +
- " String ioos_category \"Temperature\";\n" +
- " String long_name \"Dew point temperature\";\n" +
- " String standard_name \"dew_point_temperature\";\n" +
- " String units \"degree_C\";\n" +
- " }\n" +
- " DEWT_QC {\n" +
- " Byte _FillValue 127;\n" +
- " String _Unsigned \"false\";\n" + // ERDDAP adds
- " Byte actual_range 0, 0;\n" +
- " Float64 colorBarMaximum 10.0;\n" +
- " Float64 colorBarMinimum 0.0;\n" +
- " String conventions \"OceanSITES reference table 2\";\n" +
- " String flag_meanings \"no_qc_performed good_data probably_good_data bad_data_that_are_potentially_correctable bad_data value_changed not_used nominal_value interpolated_value missing_value\";\n"
- +
- " Byte flag_values 0, 1, 2, 3, 4, 5, 6, 7, 8, 9;\n" +
- " String ioos_category \"Quality\";\n" +
- " String long_name \"quality flag\";\n" +
- " Byte valid_max 9;\n" +
- " Byte valid_min 0;\n" +
- " }\n" +
- " DEWT_DM {\n" +
- " String actual_range \"R\n" +
- "R\";\n" +
- " Float64 colorBarMaximum 20.0;\n" +
- " Float64 colorBarMinimum 0.0;\n" +
- " String conventions \"OceanSITES reference table 5\";\n" +
- " String flag_meanings \"real-time provisional delayed-mode mixed\";\n" +
- " String flag_values \"R, P, D, M\";\n" +
- " String ioos_category \"Unknown\";\n" +
- " String long_name \"method of data processing\";\n" +
- " }\n" +
- " WSPD {\n" +
- " Float32 _FillValue NaN;\n" +
- " Float32 actual_range 0.0, 14.91889;\n" +
- " Float64 colorBarMaximum 15.0;\n" +
- " Float64 colorBarMinimum 0.0;\n" +
- " String ioos_category \"Wind\";\n" +
- " String long_name \"Horizontal wind speed\";\n" +
- " String standard_name \"wind_speed\";\n" +
- " String units \"m s-1\";\n" +
- " }\n" +
- " WSPD_QC {\n" +
- " Byte _FillValue 127;\n" +
- " String _Unsigned \"false\";\n" + // ERDDAP adds
- " Byte actual_range 0, 0;\n" +
- " Float64 colorBarMaximum 10.0;\n" +
- " Float64 colorBarMinimum 0.0;\n" +
- " String conventions \"OceanSITES reference table 2\";\n" +
- " String flag_meanings \"no_qc_performed good_data probably_good_data bad_data_that_are_potentially_correctable bad_data value_changed not_used nominal_value interpolated_value missing_value\";\n"
- +
- " Byte flag_values 0, 1, 2, 3, 4, 5, 6, 7, 8, 9;\n" +
- " String ioos_category \"Quality\";\n" +
- " String long_name \"quality flag\";\n" +
- " Byte valid_max 9;\n" +
- " Byte valid_min 0;\n" +
- " }\n" +
- " WSPD_DM {\n" +
- " String actual_range \"R\n" +
- "R\";\n" +
- " Float64 colorBarMaximum 20.0;\n" +
- " Float64 colorBarMinimum 0.0;\n" +
- " String conventions \"OceanSITES reference table 5\";\n" +
- " String flag_meanings \"real-time provisional delayed-mode mixed\";\n" +
- " String flag_values \"R, P, D, M\";\n" +
- " String ioos_category \"Wind\";\n" +
- " String long_name \"method of data processing\";\n" +
- " String standard_name \"wind_speed\";\n" +
- " }\n" +
- " WDIR {\n" +
- " Float32 _FillValue NaN;\n" +
- " Float32 actual_range 0.0, 360.0;\n" +
- " Float64 colorBarMaximum 360.0;\n" +
- " Float64 colorBarMinimum 0.0;\n" +
- " String ioos_category \"Wind\";\n" +
- " String long_name \"Wind from direction relative true north\";\n" +
- " String standard_name \"wind_from_direction\";\n" +
- " String units \"degree\";\n" +
- " }\n" +
- " WDIR_QC {\n" +
- " Byte _FillValue 127;\n" +
- " String _Unsigned \"false\";\n" + // ERDDAP adds
- " Byte actual_range 0, 9;\n" +
- " Float64 colorBarMaximum 10.0;\n" +
- " Float64 colorBarMinimum 0.0;\n" +
- " String conventions \"OceanSITES reference table 2\";\n" +
- " String flag_meanings \"no_qc_performed good_data probably_good_data bad_data_that_are_potentially_correctable bad_data value_changed not_used nominal_value interpolated_value missing_value\";\n"
- +
- " Byte flag_values 0, 1, 2, 3, 4, 5, 6, 7, 8, 9;\n" +
- " String ioos_category \"Quality\";\n" +
- " String long_name \"quality flag\";\n" +
- " Byte valid_max 9;\n" +
- " Byte valid_min 0;\n" +
- " }\n" +
- " WDIR_DM {\n" +
- " String actual_range \" \n" +
- "R\";\n" +
- " Float64 colorBarMaximum 20.0;\n" +
- " Float64 colorBarMinimum 0.0;\n" +
- " String conventions \"OceanSITES reference table 5\";\n" +
- " String flag_meanings \"real-time provisional delayed-mode mixed\";\n" +
- " String flag_values \"R, P, D, M\";\n" +
- " String ioos_category \"Wind\";\n" +
- " String long_name \"method of data processing\";\n" +
- " }\n" +
- " GSPD {\n" +
- " Float32 _FillValue NaN;\n" +
- " Float32 actual_range 6.173333, 20.06333;\n" +
- " Float64 colorBarMaximum 30.0;\n" +
- " Float64 colorBarMinimum 0.0;\n" +
- " String ioos_category \"Wind\";\n" +
- " String long_name \"Gust wind speed\";\n" +
- " String standard_name \"wind_speed_of_gust\";\n" +
- " String units \"m s-1\";\n" +
- " }\n" +
- " GSPD_QC {\n" +
- " Byte _FillValue 127;\n" +
- " String _Unsigned \"false\";\n" + // ERDDAP adds
- " Byte actual_range 0, 9;\n" +
- " Float64 colorBarMaximum 10.0;\n" +
- " Float64 colorBarMinimum 0.0;\n" +
- " String conventions \"OceanSITES reference table 2\";\n" +
- " String flag_meanings \"no_qc_performed good_data probably_good_data bad_data_that_are_potentially_correctable bad_data value_changed not_used nominal_value interpolated_value missing_value\";\n"
- +
- " Byte flag_values 0, 1, 2, 3, 4, 5, 6, 7, 8, 9;\n" +
- " String ioos_category \"Quality\";\n" +
- " String long_name \"quality flag\";\n" +
- " Byte valid_max 9;\n" +
- " Byte valid_min 0;\n" +
- " }\n" +
- " GSPD_DM {\n" +
- " String actual_range \" \n" +
- "R\";\n" +
- " Float64 colorBarMaximum 20.0;\n" +
- " Float64 colorBarMinimum 0.0;\n" +
- " String conventions \"OceanSITES reference table 5\";\n" +
- " String flag_meanings \"real-time provisional delayed-mode mixed\";\n" +
- " String flag_values \"R, P, D, M\";\n" +
- " String ioos_category \"Currents\";\n" +
- " String long_name \"method of data processing\";\n" +
- " }\n" +
- " }\n" +
- " NC_GLOBAL {\n" +
- " String area \"Black Sea\";\n" +
- " String cdm_data_type \"Point\";\n" +
- " String citation \"These data were collected and made freely available by the Copernicus project and the programs that contribute to it\";\n"
- +
- " String contact \"cmems-service@io-bas.bg\";\n" +
- " String Conventions \"CF-1.6 OceanSITES-Manual-1.2 Copernicus-InSituTAC-SRD-1.3 Copernicus-InSituTAC-ParametersList-3.0.0, COARDS, ACDD-1.3\";\n"
- +
- " String creator_email \"cmems-service@io-bas.bg\";\n" +
- " String creator_name \"Unknown institution\";\n" +
- " String creator_url \"http://www.oceansites.org\";\n" +
- " String data_assembly_center \"IOBAS\";\n" +
- " String data_mode \"R\";\n" +
- " String data_type \"OceanSITES time-series data\";\n" +
- " String date_update \"yyyy-MM-ddThh:mm:ssZ\";\n" +
- " String distribution_statement \"These data follow Copernicus standards; they are public and free of charge. User assumes all risk for use of data. User must display citation in any publication or product using data. User must contact PI prior to any commercial use of data.\";\n"
- +
- " Float64 Easternmost_Easting 27.4833;\n" +
- " String featureType \"Point\";\n" +
- " String format_version \"1.2\";\n" +
- " Float64 geospatial_lat_max 42.5;\n" +
- " Float64 geospatial_lat_min 42.5;\n" +
- " String geospatial_lat_units \"degrees_north\";\n" +
- " Float64 geospatial_lon_max 27.4833;\n" +
- " Float64 geospatial_lon_min 27.4833;\n" +
- " String geospatial_lon_units \"degrees_east\";\n" +
- " Float64 geospatial_vertical_max 0.0;\n" +
- " Float64 geospatial_vertical_min -2.0;\n" +
- " String geospatial_vertical_positive \"down\";\n" +
- " String geospatial_vertical_units \"m\";\n" +
- " String history \"2018-01-11T02:59:08Z : Creation\n";
- results = results.replaceAll("String date_update \\\"....-..-..T..:..:..Z", "String date_update \"yyyy-MM-ddThh:mm:ssZ");
+ "Attributes {\n"
+ + " s {\n"
+ + " time {\n"
+ + " String _CoordinateAxisType \"Time\";\n"
+ + " Float64 actual_range 1.506816e+9, 1.5119982e+9;\n"
+ + // before Calendar2 rounded to nearest
+ // second for "days since ", this was
+ // 1.5119981999999967e+9;\n" +
+ " String axis \"T\";\n"
+ + " Float64 colorBarMaximum 8.0e+9;\n"
+ + " Float64 colorBarMinimum -2.0e+9;\n"
+ + " String ioos_category \"Time\";\n"
+ + " String long_name \"Time\";\n"
+ + " Int32 QC_indicator 1;\n"
+ + " Int32 QC_procedure 1;\n"
+ + " String standard_name \"time\";\n"
+ + " String time_origin \"01-JAN-1970 00:00:00\";\n"
+ + " String units \"seconds since 1970-01-01T00:00:00Z\";\n"
+ + " Float64 valid_max 7.144848e+9;\n"
+ + " Float64 valid_min -6.31152e+8;\n"
+ + " }\n"
+ + " TIME_QC {\n"
+ + " Byte _FillValue 127;\n"
+ + " String _Unsigned \"false\";\n"
+ + // ERDDAP adds
+ " Byte actual_range 1, 1;\n"
+ + " Float64 colorBarMaximum 10.0;\n"
+ + " Float64 colorBarMinimum 0.0;\n"
+ + " String conventions \"OceanSITES reference table 2\";\n"
+ + " String flag_meanings \"no_qc_performed good_data probably_good_data bad_data_that_are_potentially_correctable bad_data value_changed not_used nominal_value interpolated_value missing_value\";\n"
+ + " Byte flag_values 0, 1, 2, 3, 4, 5, 6, 7, 8, 9;\n"
+ + " String ioos_category \"Quality\";\n"
+ + " String long_name \"quality flag\";\n"
+ + " Byte valid_max 9;\n"
+ + " Byte valid_min 0;\n"
+ + " }\n"
+ + " latitude {\n"
+ + " String _CoordinateAxisType \"Lat\";\n"
+ + " Float32 _FillValue NaN;\n"
+ + " Float32 actual_range 42.5, 42.5;\n"
+ + " String axis \"Y\";\n"
+ + " Float64 colorBarMaximum 90.0;\n"
+ + " Float64 colorBarMinimum -90.0;\n"
+ + " String ioos_category \"Location\";\n"
+ + " String long_name \"Latitude of each location\";\n"
+ + " Int32 QC_indicator 1;\n"
+ + " Int32 QC_procedure 1;\n"
+ + " String standard_name \"latitude\";\n"
+ + " String units \"degrees_north\";\n"
+ + " Float32 valid_max 90.0;\n"
+ + " Float32 valid_min MIN;\n"
+ + " }\n"
+ + " longitude {\n"
+ + " String _CoordinateAxisType \"Lon\";\n"
+ + " Float32 _FillValue NaN;\n"
+ + " Float32 actual_range 27.4833, 27.4833;\n"
+ + " String axis \"X\";\n"
+ + " Float64 colorBarMaximum 180.0;\n"
+ + " Float64 colorBarMinimum -180.0;\n"
+ + " String ioos_category \"Location\";\n"
+ + " String long_name \"Longitude of each location\";\n"
+ + " Int32 QC_indicator 1;\n"
+ + " Int32 QC_procedure 1;\n"
+ + " String standard_name \"longitude\";\n"
+ + " String units \"degrees_east\";\n"
+ + " Float32 valid_max 180.0;\n"
+ + " Float32 valid_min MIN;\n"
+ + " }\n"
+ + " depth {\n"
+ + " String _CoordinateAxisType \"Height\";\n"
+ + " String _CoordinateZisPositive \"down\";\n"
+ + " Float32 _FillValue NaN;\n"
+ + " Float32 actual_range -2.0, 0.0;\n"
+ + " String axis \"Z\";\n"
+ + " Float64 colorBarMaximum 8000.0;\n"
+ + " Float64 colorBarMinimum -8000.0;\n"
+ + " String colorBarPalette \"TopographyDepth\";\n"
+ + " String ioos_category \"Location\";\n"
+ + " String long_name \"Depth\";\n"
+ + " String positive \"down\";\n"
+ + " String source_name \"DEPH\";\n"
+ + " String standard_name \"depth\";\n"
+ + " String units \"m\";\n"
+ + " Float32 valid_max 12000.0;\n"
+ + " Float32 valid_min MIN;\n"
+ + " }\n"
+ + " DEPH_QC {\n"
+ + " Byte _FillValue 127;\n"
+ + " String _Unsigned \"false\";\n"
+ + // ERDDAP adds
+ " Byte actual_range 7, 7;\n"
+ + " Float64 colorBarMaximum 10.0;\n"
+ + " Float64 colorBarMinimum 0.0;\n"
+ + " String conventions \"OceanSITES reference table 2\";\n"
+ + " String flag_meanings \"no_qc_performed good_data probably_good_data bad_data_that_are_potentially_correctable bad_data value_changed not_used nominal_value interpolated_value missing_value\";\n"
+ + " Byte flag_values 0, 1, 2, 3, 4, 5, 6, 7, 8, 9;\n"
+ + " String ioos_category \"Quality\";\n"
+ + " String long_name \"quality flag\";\n"
+ + " Byte valid_max 9;\n"
+ + " Byte valid_min 0;\n"
+ + " }\n"
+ + " DEPH_DM {\n"
+ + " String actual_range \"R\n"
+ + "R\";\n"
+ + " Float64 colorBarMaximum 20.0;\n"
+ + " Float64 colorBarMinimum 0.0;\n"
+ + " String conventions \"OceanSITES reference table 5\";\n"
+ + " String flag_meanings \"real-time provisional delayed-mode mixed\";\n"
+ + " String flag_values \"R, P, D, M\";\n"
+ + " String ioos_category \"Unknown\";\n"
+ + " String long_name \"method of data processing\";\n"
+ + " }\n"
+ + " RELH {\n"
+ + " Float32 _FillValue NaN;\n"
+ + " Float32 actual_range 27.91, 100.0;\n"
+ + " Float64 colorBarMaximum 100.0;\n"
+ + " Float64 colorBarMinimum 0.0;\n"
+ + " String ioos_category \"Meteorology\";\n"
+ + " String long_name \"Relative humidity\";\n"
+ + " String standard_name \"relative_humidity\";\n"
+ + " String units \"percent\";\n"
+ + " }\n"
+ + " RELH_QC {\n"
+ + " Byte _FillValue 127;\n"
+ + " String _Unsigned \"false\";\n"
+ + // ERDDAP adds
+ " Byte actual_range 0, 0;\n"
+ + " Float64 colorBarMaximum 10.0;\n"
+ + " Float64 colorBarMinimum 0.0;\n"
+ + " String conventions \"OceanSITES reference table 2\";\n"
+ + " String flag_meanings \"no_qc_performed good_data probably_good_data bad_data_that_are_potentially_correctable bad_data value_changed not_used nominal_value interpolated_value missing_value\";\n"
+ + " Byte flag_values 0, 1, 2, 3, 4, 5, 6, 7, 8, 9;\n"
+ + " String ioos_category \"Quality\";\n"
+ + " String long_name \"quality flag\";\n"
+ + " Byte valid_max 9;\n"
+ + " Byte valid_min 0;\n"
+ + " }\n"
+ + " RELH_DM {\n"
+ + " String actual_range \"R\n"
+ + "R\";\n"
+ + " Float64 colorBarMaximum 20.0;\n"
+ + " Float64 colorBarMinimum 0.0;\n"
+ + " String conventions \"OceanSITES reference table 5\";\n"
+ + " String flag_meanings \"real-time provisional delayed-mode mixed\";\n"
+ + " String flag_values \"R, P, D, M\";\n"
+ + " String ioos_category \"Unknown\";\n"
+ + " String long_name \"method of data processing\";\n"
+ + " }\n"
+ + " ATMS {\n"
+ + " Float32 _FillValue NaN;\n"
+ + " Float64 colorBarMaximum 1050.0;\n"
+ + " Float64 colorBarMinimum 950.0;\n"
+ + " String ioos_category \"Pressure\";\n"
+ + " String long_name \"Atmospheric pressure at sea level\";\n"
+ + " String standard_name \"air_pressure_at_sea_level\";\n"
+ + " String units \"hPa\";\n"
+ + " }\n"
+ + " ATMS_QC {\n"
+ + " Byte _FillValue 127;\n"
+ + " String _Unsigned \"false\";\n"
+ + // ERDDAP adds
+ " Byte actual_range 9, 9;\n"
+ + " Float64 colorBarMaximum 10.0;\n"
+ + " Float64 colorBarMinimum 0.0;\n"
+ + " String conventions \"OceanSITES reference table 2\";\n"
+ + " String flag_meanings \"no_qc_performed good_data probably_good_data bad_data_that_are_potentially_correctable bad_data value_changed not_used nominal_value interpolated_value missing_value\";\n"
+ + " Byte flag_values 0, 1, 2, 3, 4, 5, 6, 7, 8, 9;\n"
+ + " String ioos_category \"Quality\";\n"
+ + " String long_name \"quality flag\";\n"
+ + " Byte valid_max 9;\n"
+ + " Byte valid_min 0;\n"
+ + " }\n"
+ + " ATMS_DM {\n"
+ + " String actual_range \" \n"
+ + "R\";\n"
+ + " Float64 colorBarMaximum 20.0;\n"
+ + " Float64 colorBarMinimum 0.0;\n"
+ + " String conventions \"OceanSITES reference table 5\";\n"
+ + " String flag_meanings \"real-time provisional delayed-mode mixed\";\n"
+ + " String flag_values \"R, P, D, M\";\n"
+ + " String ioos_category \"Unknown\";\n"
+ + " String long_name \"method of data processing\";\n"
+ + " }\n"
+ + " DRYT {\n"
+ + " Float32 _FillValue NaN;\n"
+ + " Float32 actual_range 0.0, 27.0;\n"
+ + " Float64 colorBarMaximum 40.0;\n"
+ + " Float64 colorBarMinimum -10.0;\n"
+ + " String ioos_category \"Temperature\";\n"
+ + " String long_name \"Air temperature in dry bulb\";\n"
+ + " String standard_name \"air_temperature\";\n"
+ + " String units \"degree_C\";\n"
+ + " }\n"
+ + " DRYT_QC {\n"
+ + " Byte _FillValue 127;\n"
+ + " String _Unsigned \"false\";\n"
+ + // ERDDAP adds
+ " Byte actual_range 0, 0;\n"
+ + " Float64 colorBarMaximum 10.0;\n"
+ + " Float64 colorBarMinimum 0.0;\n"
+ + " String conventions \"OceanSITES reference table 2\";\n"
+ + " String flag_meanings \"no_qc_performed good_data probably_good_data bad_data_that_are_potentially_correctable bad_data value_changed not_used nominal_value interpolated_value missing_value\";\n"
+ + " Byte flag_values 0, 1, 2, 3, 4, 5, 6, 7, 8, 9;\n"
+ + " String ioos_category \"Quality\";\n"
+ + " String long_name \"quality flag\";\n"
+ + " Byte valid_max 9;\n"
+ + " Byte valid_min 0;\n"
+ + " }\n"
+ + " DRYT_DM {\n"
+ + " String actual_range \"R\n"
+ + "R\";\n"
+ + " Float64 colorBarMaximum 20.0;\n"
+ + " Float64 colorBarMinimum 0.0;\n"
+ + " String conventions \"OceanSITES reference table 5\";\n"
+ + " String flag_meanings \"real-time provisional delayed-mode mixed\";\n"
+ + " String flag_values \"R, P, D, M\";\n"
+ + " String ioos_category \"Unknown\";\n"
+ + " String long_name \"method of data processing\";\n"
+ + " }\n"
+ + " DEWT {\n"
+ + " Float32 _FillValue NaN;\n"
+ + " Float32 actual_range -6.0, 17.0;\n"
+ + " Float64 colorBarMaximum 40.0;\n"
+ + " Float64 colorBarMinimum 0.0;\n"
+ + " String ioos_category \"Temperature\";\n"
+ + " String long_name \"Dew point temperature\";\n"
+ + " String standard_name \"dew_point_temperature\";\n"
+ + " String units \"degree_C\";\n"
+ + " }\n"
+ + " DEWT_QC {\n"
+ + " Byte _FillValue 127;\n"
+ + " String _Unsigned \"false\";\n"
+ + // ERDDAP adds
+ " Byte actual_range 0, 0;\n"
+ + " Float64 colorBarMaximum 10.0;\n"
+ + " Float64 colorBarMinimum 0.0;\n"
+ + " String conventions \"OceanSITES reference table 2\";\n"
+ + " String flag_meanings \"no_qc_performed good_data probably_good_data bad_data_that_are_potentially_correctable bad_data value_changed not_used nominal_value interpolated_value missing_value\";\n"
+ + " Byte flag_values 0, 1, 2, 3, 4, 5, 6, 7, 8, 9;\n"
+ + " String ioos_category \"Quality\";\n"
+ + " String long_name \"quality flag\";\n"
+ + " Byte valid_max 9;\n"
+ + " Byte valid_min 0;\n"
+ + " }\n"
+ + " DEWT_DM {\n"
+ + " String actual_range \"R\n"
+ + "R\";\n"
+ + " Float64 colorBarMaximum 20.0;\n"
+ + " Float64 colorBarMinimum 0.0;\n"
+ + " String conventions \"OceanSITES reference table 5\";\n"
+ + " String flag_meanings \"real-time provisional delayed-mode mixed\";\n"
+ + " String flag_values \"R, P, D, M\";\n"
+ + " String ioos_category \"Unknown\";\n"
+ + " String long_name \"method of data processing\";\n"
+ + " }\n"
+ + " WSPD {\n"
+ + " Float32 _FillValue NaN;\n"
+ + " Float32 actual_range 0.0, 14.91889;\n"
+ + " Float64 colorBarMaximum 15.0;\n"
+ + " Float64 colorBarMinimum 0.0;\n"
+ + " String ioos_category \"Wind\";\n"
+ + " String long_name \"Horizontal wind speed\";\n"
+ + " String standard_name \"wind_speed\";\n"
+ + " String units \"m s-1\";\n"
+ + " }\n"
+ + " WSPD_QC {\n"
+ + " Byte _FillValue 127;\n"
+ + " String _Unsigned \"false\";\n"
+ + // ERDDAP adds
+ " Byte actual_range 0, 0;\n"
+ + " Float64 colorBarMaximum 10.0;\n"
+ + " Float64 colorBarMinimum 0.0;\n"
+ + " String conventions \"OceanSITES reference table 2\";\n"
+ + " String flag_meanings \"no_qc_performed good_data probably_good_data bad_data_that_are_potentially_correctable bad_data value_changed not_used nominal_value interpolated_value missing_value\";\n"
+ + " Byte flag_values 0, 1, 2, 3, 4, 5, 6, 7, 8, 9;\n"
+ + " String ioos_category \"Quality\";\n"
+ + " String long_name \"quality flag\";\n"
+ + " Byte valid_max 9;\n"
+ + " Byte valid_min 0;\n"
+ + " }\n"
+ + " WSPD_DM {\n"
+ + " String actual_range \"R\n"
+ + "R\";\n"
+ + " Float64 colorBarMaximum 20.0;\n"
+ + " Float64 colorBarMinimum 0.0;\n"
+ + " String conventions \"OceanSITES reference table 5\";\n"
+ + " String flag_meanings \"real-time provisional delayed-mode mixed\";\n"
+ + " String flag_values \"R, P, D, M\";\n"
+ + " String ioos_category \"Wind\";\n"
+ + " String long_name \"method of data processing\";\n"
+ + " String standard_name \"wind_speed\";\n"
+ + " }\n"
+ + " WDIR {\n"
+ + " Float32 _FillValue NaN;\n"
+ + " Float32 actual_range 0.0, 360.0;\n"
+ + " Float64 colorBarMaximum 360.0;\n"
+ + " Float64 colorBarMinimum 0.0;\n"
+ + " String ioos_category \"Wind\";\n"
+ + " String long_name \"Wind from direction relative true north\";\n"
+ + " String standard_name \"wind_from_direction\";\n"
+ + " String units \"degree\";\n"
+ + " }\n"
+ + " WDIR_QC {\n"
+ + " Byte _FillValue 127;\n"
+ + " String _Unsigned \"false\";\n"
+ + // ERDDAP adds
+ " Byte actual_range 0, 9;\n"
+ + " Float64 colorBarMaximum 10.0;\n"
+ + " Float64 colorBarMinimum 0.0;\n"
+ + " String conventions \"OceanSITES reference table 2\";\n"
+ + " String flag_meanings \"no_qc_performed good_data probably_good_data bad_data_that_are_potentially_correctable bad_data value_changed not_used nominal_value interpolated_value missing_value\";\n"
+ + " Byte flag_values 0, 1, 2, 3, 4, 5, 6, 7, 8, 9;\n"
+ + " String ioos_category \"Quality\";\n"
+ + " String long_name \"quality flag\";\n"
+ + " Byte valid_max 9;\n"
+ + " Byte valid_min 0;\n"
+ + " }\n"
+ + " WDIR_DM {\n"
+ + " String actual_range \" \n"
+ + "R\";\n"
+ + " Float64 colorBarMaximum 20.0;\n"
+ + " Float64 colorBarMinimum 0.0;\n"
+ + " String conventions \"OceanSITES reference table 5\";\n"
+ + " String flag_meanings \"real-time provisional delayed-mode mixed\";\n"
+ + " String flag_values \"R, P, D, M\";\n"
+ + " String ioos_category \"Wind\";\n"
+ + " String long_name \"method of data processing\";\n"
+ + " }\n"
+ + " GSPD {\n"
+ + " Float32 _FillValue NaN;\n"
+ + " Float32 actual_range 6.173333, 20.06333;\n"
+ + " Float64 colorBarMaximum 30.0;\n"
+ + " Float64 colorBarMinimum 0.0;\n"
+ + " String ioos_category \"Wind\";\n"
+ + " String long_name \"Gust wind speed\";\n"
+ + " String standard_name \"wind_speed_of_gust\";\n"
+ + " String units \"m s-1\";\n"
+ + " }\n"
+ + " GSPD_QC {\n"
+ + " Byte _FillValue 127;\n"
+ + " String _Unsigned \"false\";\n"
+ + // ERDDAP adds
+ " Byte actual_range 0, 9;\n"
+ + " Float64 colorBarMaximum 10.0;\n"
+ + " Float64 colorBarMinimum 0.0;\n"
+ + " String conventions \"OceanSITES reference table 2\";\n"
+ + " String flag_meanings \"no_qc_performed good_data probably_good_data bad_data_that_are_potentially_correctable bad_data value_changed not_used nominal_value interpolated_value missing_value\";\n"
+ + " Byte flag_values 0, 1, 2, 3, 4, 5, 6, 7, 8, 9;\n"
+ + " String ioos_category \"Quality\";\n"
+ + " String long_name \"quality flag\";\n"
+ + " Byte valid_max 9;\n"
+ + " Byte valid_min 0;\n"
+ + " }\n"
+ + " GSPD_DM {\n"
+ + " String actual_range \" \n"
+ + "R\";\n"
+ + " Float64 colorBarMaximum 20.0;\n"
+ + " Float64 colorBarMinimum 0.0;\n"
+ + " String conventions \"OceanSITES reference table 5\";\n"
+ + " String flag_meanings \"real-time provisional delayed-mode mixed\";\n"
+ + " String flag_values \"R, P, D, M\";\n"
+ + " String ioos_category \"Currents\";\n"
+ + " String long_name \"method of data processing\";\n"
+ + " }\n"
+ + " }\n"
+ + " NC_GLOBAL {\n"
+ + " String area \"Black Sea\";\n"
+ + " String cdm_data_type \"Point\";\n"
+ + " String citation \"These data were collected and made freely available by the Copernicus project and the programs that contribute to it\";\n"
+ + " String contact \"cmems-service@io-bas.bg\";\n"
+ + " String Conventions \"CF-1.6 OceanSITES-Manual-1.2 Copernicus-InSituTAC-SRD-1.3 Copernicus-InSituTAC-ParametersList-3.0.0, COARDS, ACDD-1.3\";\n"
+ + " String creator_email \"cmems-service@io-bas.bg\";\n"
+ + " String creator_name \"Unknown institution\";\n"
+ + " String creator_url \"http://www.oceansites.org\";\n"
+ + " String data_assembly_center \"IOBAS\";\n"
+ + " String data_mode \"R\";\n"
+ + " String data_type \"OceanSITES time-series data\";\n"
+ + " String date_update \"yyyy-MM-ddThh:mm:ssZ\";\n"
+ + " String distribution_statement \"These data follow Copernicus standards; they are public and free of charge. User assumes all risk for use of data. User must display citation in any publication or product using data. User must contact PI prior to any commercial use of data.\";\n"
+ + " Float64 Easternmost_Easting 27.4833;\n"
+ + " String featureType \"Point\";\n"
+ + " String format_version \"1.2\";\n"
+ + " Float64 geospatial_lat_max 42.5;\n"
+ + " Float64 geospatial_lat_min 42.5;\n"
+ + " String geospatial_lat_units \"degrees_north\";\n"
+ + " Float64 geospatial_lon_max 27.4833;\n"
+ + " Float64 geospatial_lon_min 27.4833;\n"
+ + " String geospatial_lon_units \"degrees_east\";\n"
+ + " Float64 geospatial_vertical_max 0.0;\n"
+ + " Float64 geospatial_vertical_min -2.0;\n"
+ + " String geospatial_vertical_positive \"down\";\n"
+ + " String geospatial_vertical_units \"m\";\n";
+ // " String history \"2018-01-11T02:59:08Z : Creation\n";
+ results =
+ results.replaceAll(
+ "String date_update \\\"....-..-..T..:..:..Z",
+ "String date_update \"yyyy-MM-ddThh:mm:ssZ");
results = results.replaceAll("Float32 valid_min -?[0-9]+.[0-9]+;", "Float32 valid_min MIN;");
Test.ensureEqual(results.substring(0, expected.length()), expected, "results=\n" + results);
// 2018-06-14T18:48:29Z (local files)
// 2018-06-14T18:48:29Z
// "http://localhost:8080/cwexperimental/tabledap/testTreatDimensionsAs.das";
- expected = "String id \"BS_201711_TS_MO_LBBG\";\n" +
- " String infoUrl \"http://www.oceansites.org\";\n" +
- " String institution \"Unknown institution\";\n" +
- " String institution_references \"http://www.io-bas.bg/\";\n" +
- " String keywords \"air, air_pressure_at_sea_level, air_temperature, atmosphere, atmospheric, ATMS, ATMS_DM, ATMS_QC, bulb, currents, data, DEPH_DM, DEPH_QC, depth, dew, dew point, dew_point_temperature, DEWT, DEWT_DM, DEWT_QC, direction, dry, DRYT, DRYT_DM, DRYT_QC, earth, Earth Science > Atmosphere > Atmospheric Pressure > Atmospheric Pressure Measurements, Earth Science > Atmosphere > Atmospheric Pressure > Sea Level Pressure, Earth Science > Atmosphere > Atmospheric Pressure > Static Pressure, Earth Science > Atmosphere > Atmospheric Temperature > Air Temperature, Earth Science > Atmosphere > Atmospheric Temperature > Dew Point Temperature, Earth Science > Atmosphere > Atmospheric Temperature > Surface Air Temperature, Earth Science > Atmosphere > Atmospheric Water Vapor > Dew Point Temperature, Earth Science > Atmosphere > Atmospheric Water Vapor > Humidity, Earth Science > Atmosphere > Atmospheric Winds > Surface Winds, flag, GSPD, GSPD_DM, GSPD_QC, gust, horizontal, humidity, institution, latitude, level, local, longitude, measurements, meteorology, method, north, point, pressure, processing, quality, relative, relative_humidity, RELH, RELH_DM, RELH_QC, science, sea, seawater, source, speed, static, surface, temperature, time, TIME_QC, true, vapor, water, WDIR, WDIR_DM, WDIR_QC, wind, wind_from_direction, wind_speed, wind_speed_of_gust, winds, WSPD, WSPD_DM, WSPD_QC\";\n"
- +
- " String keywords_vocabulary \"GCMD Science Keywords\";\n" +
- " String last_date_observation \"2017-11-29T23:30:00Z\";\n" +
- " String last_latitude_observation \"42.5\";\n" +
- " String last_longitude_observation \"27.4833\";\n" +
- " String license \"These data follow Copernicus standards; they are public and free of charge. User assumes all risk for use of data. User must display citation in any publication or product using data. User must contact PI prior to any commercial use of data.\";\n"
- +
- " String naming_authority \"OceanSITES\";\n" +
- " String netcdf_version \"3.5\";\n" +
- " Float64 Northernmost_Northing 42.5;\n" +
- " String platform_code \"LBBG\";\n" +
- " String qc_manual \"OceanSITES User\\\\'s Manual v1.1\";\n" + // it is incorrect in source
- " String quality_control_indicator \"6\";\n" +
- " String quality_index \"A\";\n" +
- " String references \"http://www.oceansites.org, http://marine.copernicus.eu\";\n" +
- " String source \"land/onshore structure\";\n" +
- " String sourceUrl \"(local files)\";\n" +
- " Float64 Southernmost_Northing 42.5;\n" +
- " String standard_name_vocabulary \"CF Standard Name Table v70\";\n" +
- " String subsetVariables \"latitude, longitude, depth\";\n" +
- " String summary \"Unknown institution data from a local source.\";\n" +
- " String time_coverage_end \"2017-11-29T23:30:00Z\";\n" +
- " String time_coverage_start \"2017-10-01T00:00:00Z\";\n" +
- " String title \"Unknown institution data from a local source.\";\n" +
- " String update_interval \"daily\";\n" +
- " Float64 Westernmost_Easting 27.4833;\n" +
- " }\n" +
- "}\n";
+ expected = // "String id \"BS_201711_TS_MO_LBBG\";\n" +
+ " String infoUrl \"http://www.oceansites.org\";\n"
+ + " String institution \"Unknown institution\";\n"
+ + " String institution_references \"http://www.io-bas.bg/\";\n"
+ + " String keywords \"air, air_pressure_at_sea_level, air_temperature, atmosphere, atmospheric, ATMS, ATMS_DM, ATMS_QC, bulb, currents, data, DEPH_DM, DEPH_QC, depth, dew, dew point, dew_point_temperature, DEWT, DEWT_DM, DEWT_QC, direction, dry, DRYT, DRYT_DM, DRYT_QC, earth, Earth Science > Atmosphere > Atmospheric Pressure > Atmospheric Pressure Measurements, Earth Science > Atmosphere > Atmospheric Pressure > Sea Level Pressure, Earth Science > Atmosphere > Atmospheric Pressure > Static Pressure, Earth Science > Atmosphere > Atmospheric Temperature > Air Temperature, Earth Science > Atmosphere > Atmospheric Temperature > Dew Point Temperature, Earth Science > Atmosphere > Atmospheric Temperature > Surface Air Temperature, Earth Science > Atmosphere > Atmospheric Water Vapor > Dew Point Temperature, Earth Science > Atmosphere > Atmospheric Water Vapor > Humidity, Earth Science > Atmosphere > Atmospheric Winds > Surface Winds, flag, GSPD, GSPD_DM, GSPD_QC, gust, horizontal, humidity, institution, latitude, level, local, longitude, measurements, meteorology, method, north, point, pressure, processing, quality, relative, relative_humidity, RELH, RELH_DM, RELH_QC, science, sea, seawater, source, speed, static, surface, temperature, time, TIME_QC, true, vapor, water, WDIR, WDIR_DM, WDIR_QC, wind, wind_from_direction, wind_speed, wind_speed_of_gust, winds, WSPD, WSPD_DM, WSPD_QC\";\n"
+ + " String keywords_vocabulary \"GCMD Science Keywords\";\n"
+ + " String last_date_observation \"YYYY-MM-DDThh:mm:ssZ\";\n"
+ + " String last_latitude_observation \"42.5\";\n"
+ + " String last_longitude_observation \"27.4833\";\n"
+ + " String license \"These data follow Copernicus standards; they are public and free of charge. User assumes all risk for use of data. User must display citation in any publication or product using data. User must contact PI prior to any commercial use of data.\";\n"
+ + " String naming_authority \"OceanSITES\";\n"
+ + " String netcdf_version \"3.5\";\n"
+ + " Float64 Northernmost_Northing 42.5;\n"
+ + " String platform_code \"LBBG\";\n"
+ + " String qc_manual \"OceanSITES User\\\\'s Manual v1.1\";\n"
+ + // it is incorrect in source
+ " String quality_control_indicator \"6\";\n"
+ + " String quality_index \"A\";\n"
+ + " String references \"http://www.oceansites.org, http://marine.copernicus.eu\";\n"
+ + " String source \"land/onshore structure\";\n"
+ + " String sourceUrl \"(local files)\";\n"
+ + " Float64 Southernmost_Northing 42.5;\n"
+ + " String standard_name_vocabulary \"CF Standard Name Table v70\";\n"
+ + " String subsetVariables \"latitude, longitude, depth\";\n"
+ + " String summary \"Unknown institution data from a local source.\";\n"
+ + " String time_coverage_end \"YYYY-MM-DDThh:mm:ssZ\";\n"
+ + " String time_coverage_start \"YYYY-MM-DDThh:mm:ssZ\";\n"
+ + " String title \"Unknown institution data from a local source.\";\n"
+ + " String update_interval \"daily\";\n"
+ + " Float64 Westernmost_Easting 27.4833;\n"
+ + " }\n"
+ + "}\n";
+ results = results.replaceAll("....-..-..T..:..:..Z", "YYYY-MM-DDThh:mm:ssZ");
int po = results.indexOf(expected.substring(0, 40));
- Test.ensureEqual(results.substring(po), expected, "results=\n" + results);
+ Test.ensureEqual(results.substring(Math.max(0, po)), expected, "results=\n" + results);
// .dds
- tName = eddTable.makeNewFileForDapQuery(language, null, null, "", dir,
- eddTable.className() + "_treatDimensionsAs2", ".dds");
+ tName =
+ eddTable.makeNewFileForDapQuery(
+ language, null, null, "", dir, eddTable.className() + "_treatDimensionsAs2", ".dds");
results = File2.directReadFrom88591File(dir + tName);
// String2.log(results);
- expected = "Dataset {\n" +
- " Sequence {\n" +
- " Float64 time;\n" +
- " Byte TIME_QC;\n" +
- " Float32 latitude;\n" +
- " Float32 longitude;\n" +
- " Float32 depth;\n" +
- " Byte DEPH_QC;\n" +
- " String DEPH_DM;\n" +
- " Float32 RELH;\n" +
- " Byte RELH_QC;\n" +
- " String RELH_DM;\n" +
- " Float32 ATMS;\n" +
- " Byte ATMS_QC;\n" +
- " String ATMS_DM;\n" +
- " Float32 DRYT;\n" +
- " Byte DRYT_QC;\n" +
- " String DRYT_DM;\n" +
- " Float32 DEWT;\n" +
- " Byte DEWT_QC;\n" +
- " String DEWT_DM;\n" +
- " Float32 WSPD;\n" +
- " Byte WSPD_QC;\n" +
- " String WSPD_DM;\n" +
- " Float32 WDIR;\n" +
- " Byte WDIR_QC;\n" +
- " String WDIR_DM;\n" +
- " Float32 GSPD;\n" +
- " Byte GSPD_QC;\n" +
- " String GSPD_DM;\n" +
- " } s;\n" +
- "} s;\n";
+ expected =
+ "Dataset {\n"
+ + " Sequence {\n"
+ + " Float64 time;\n"
+ + " Byte TIME_QC;\n"
+ + " Float32 latitude;\n"
+ + " Float32 longitude;\n"
+ + " Float32 depth;\n"
+ + " Byte DEPH_QC;\n"
+ + " String DEPH_DM;\n"
+ + " Float32 RELH;\n"
+ + " Byte RELH_QC;\n"
+ + " String RELH_DM;\n"
+ + " Float32 ATMS;\n"
+ + " Byte ATMS_QC;\n"
+ + " String ATMS_DM;\n"
+ + " Float32 DRYT;\n"
+ + " Byte DRYT_QC;\n"
+ + " String DRYT_DM;\n"
+ + " Float32 DEWT;\n"
+ + " Byte DEWT_QC;\n"
+ + " String DEWT_DM;\n"
+ + " Float32 WSPD;\n"
+ + " Byte WSPD_QC;\n"
+ + " String WSPD_DM;\n"
+ + " Float32 WDIR;\n"
+ + " Byte WDIR_QC;\n"
+ + " String WDIR_DM;\n"
+ + " Float32 GSPD;\n"
+ + " Byte GSPD_QC;\n"
+ + " String GSPD_DM;\n"
+ + " } s;\n"
+ + "} s;\n";
Test.ensureEqual(results, expected, "results=\n" + results);
// .csv
// " Float64 actual_range 1.3201056e+9, 1.3437756e+9;\n" +
- tName = eddTable.makeNewFileForDapQuery(language, null, null,
- "&time=\"2017-10-01T00:00:00Z\"", // in quotes
- dir, eddTable.className() + "_treatDimensionsAs21", ".csv");
+ tName =
+ eddTable.makeNewFileForDapQuery(
+ language,
+ null,
+ null,
+ "&time=\"2017-10-01T00:00:00Z\"", // in quotes
+ dir,
+ eddTable.className() + "_treatDimensionsAs21",
+ ".csv");
results = File2.directReadFrom88591File(dir + tName);
// String2.log(results);
- expected = "time,TIME_QC,latitude,longitude,depth,DEPH_QC,DEPH_DM,RELH,RELH_QC,RELH_DM,ATMS,ATMS_QC,ATMS_DM,DRYT,DRYT_QC,DRYT_DM,DEWT,DEWT_QC,DEWT_DM,WSPD,WSPD_QC,WSPD_DM,WDIR,WDIR_QC,WDIR_DM,GSPD,GSPD_QC,GSPD_DM\n"
- +
- "UTC,,degrees_north,degrees_east,m,,,percent,,,hPa,,,degree_C,,,degree_C,,,m s-1,,,degree,,,m s-1,,\n" +
- "2017-10-01T00:00:00Z,1,42.5,27.4833,-2.0,7,R,71.45,0,R,NaN,NaN,R,12.0,0,R,7.0,0,R,4.115556,0,R,10.0,0,R,NaN,9,R\n"
- +
- "2017-10-01T00:00:00Z,1,42.5,27.4833,0.0,7,R,NaN,NaN,R,NaN,9,R,NaN,NaN,R,NaN,NaN,R,NaN,NaN,R,NaN,NaN,R,NaN,NaN,R\n";
+ expected =
+ "time,TIME_QC,latitude,longitude,depth,DEPH_QC,DEPH_DM,RELH,RELH_QC,RELH_DM,ATMS,ATMS_QC,ATMS_DM,DRYT,DRYT_QC,DRYT_DM,DEWT,DEWT_QC,DEWT_DM,WSPD,WSPD_QC,WSPD_DM,WDIR,WDIR_QC,WDIR_DM,GSPD,GSPD_QC,GSPD_DM\n"
+ + "UTC,,degrees_north,degrees_east,m,,,percent,,,hPa,,,degree_C,,,degree_C,,,m s-1,,,degree,,,m s-1,,\n"
+ + "2017-10-01T00:00:00Z,1,42.5,27.4833,-2.0,7,R,71.45,0,R,NaN,NaN,R,12.0,0,R,7.0,0,R,4.115556,0,R,10.0,0,R,NaN,9,R\n"
+ + "2017-10-01T00:00:00Z,1,42.5,27.4833,0.0,7,R,NaN,NaN,R,NaN,9,R,NaN,NaN,R,NaN,NaN,R,NaN,NaN,R,NaN,NaN,R,NaN,NaN,R\n";
Test.ensureEqual(results, expected, "results=\n" + results);
// all data
@@ -3373,21 +3456,26 @@ void testTreatDimensionsAs2(boolean deleteCachedInfo) throws Throwable {
// results = File2.directReadFrom88591File(dir + tName);
// String2.log(results);
- tName = eddTable.makeNewFileForDapQuery(language, null, null,
- // !!! INTERESTING TEST. Originally, specific max iso time fails
- // because it is to nearest second and actual max is off by tiny amount
- // I added a little fudge to the test to make it to the nearest second.
- // See EDDTable line 2302 and above
- "&time=2017-11-29T23:30:00Z",
- dir, eddTable.className() + "_treatDimensionsAs23", ".csv");
+ tName =
+ eddTable.makeNewFileForDapQuery(
+ language,
+ null,
+ null,
+ // !!! INTERESTING TEST. Originally, specific max iso time fails
+ // because it is to nearest second and actual max is off by tiny amount
+ // I added a little fudge to the test to make it to the nearest second.
+ // See EDDTable line 2302 and above
+ "&time=2017-11-29T23:30:00Z",
+ dir,
+ eddTable.className() + "_treatDimensionsAs23",
+ ".csv");
results = File2.directReadFrom88591File(dir + tName);
// String2.log(results);
- expected = "time,TIME_QC,latitude,longitude,depth,DEPH_QC,DEPH_DM,RELH,RELH_QC,RELH_DM,ATMS,ATMS_QC,ATMS_DM,DRYT,DRYT_QC,DRYT_DM,DEWT,DEWT_QC,DEWT_DM,WSPD,WSPD_QC,WSPD_DM,WDIR,WDIR_QC,WDIR_DM,GSPD,GSPD_QC,GSPD_DM\n"
- +
- "UTC,,degrees_north,degrees_east,m,,,percent,,,hPa,,,degree_C,,,degree_C,,,m s-1,,,degree,,,m s-1,,\n" +
- "2017-11-29T23:30:00Z,1,42.5,27.4833,0.0,7,R,93.14,0,R,NaN,9,\" \",3.0,0,R,2.0,0,R,0.0,0,R,0.0,0,R,NaN,9,\" \"\n";
+ expected =
+ "time,TIME_QC,latitude,longitude,depth,DEPH_QC,DEPH_DM,RELH,RELH_QC,RELH_DM,ATMS,ATMS_QC,ATMS_DM,DRYT,DRYT_QC,DRYT_DM,DEWT,DEWT_QC,DEWT_DM,WSPD,WSPD_QC,WSPD_DM,WDIR,WDIR_QC,WDIR_DM,GSPD,GSPD_QC,GSPD_DM\n"
+ + "UTC,,degrees_north,degrees_east,m,,,percent,,,hPa,,,degree_C,,,degree_C,,,m s-1,,,degree,,,m s-1,,\n"
+ + "2017-11-29T23:30:00Z,1,42.5,27.4833,0.0,7,R,93.14,0,R,NaN,9,\" \",3.0,0,R,2.0,0,R,0.0,0,R,0.0,0,R,NaN,9,\" \"\n";
Test.ensureEqual(results, expected, "results=\n" + results);
-
}
/**
@@ -3406,15 +3494,17 @@ void testLongAndNetcdf4() throws Throwable {
String error = "";
EDV edv;
String dir = EDStatic.fullTestCacheDirectory;
- String today = Calendar2.getCurrentISODateTimeStringZulu().substring(0, 14); // 14 is enough to check hour. Hard
- // to check min:sec.
+ String today =
+ Calendar2.getCurrentISODateTimeStringZulu()
+ .substring(0, 14); // 14 is enough to check hour. Hard
+ // to check min:sec.
String id = "testLong";
EDDTable eddTable = (EDDTable) EDDTestDataset.gettestLong();
/*
- *
+ *
* //*** test getting das for entire dataset
* String2.
* log("\n*** EDDTableFromMultidimNcFiles test das and dds for entire dataset\n"
@@ -3633,7 +3723,7 @@ void testLongAndNetcdf4() throws Throwable {
* tResults = results.substring(0, Math.min(results.length(),
* expected.length()));
* Test.ensureEqual(tResults, expected, "\nresults=\n" + results);
- *
+ *
* expected=
* " String infoUrl \"http://oceanobservatories.org\";\n" +
* " String institution \"CGSN\";\n" +
@@ -3671,7 +3761,7 @@ void testLongAndNetcdf4() throws Throwable {
* Test.ensureEqual(
* results.substring(tPo, Math.min(results.length(), tPo + expected.length())),
* expected, "results=\n" + results);
- *
+ *
* //*** test getting dds for entire dataset
* tName = eddTable.makeNewFileForDapQuery(language, null, null, "", dir,
* eddTable.className() + "_LongEntire", ".dds");
@@ -3697,10 +3787,10 @@ void testLongAndNetcdf4() throws Throwable {
* " } s;\n" +
* "} s;\n";
* Test.ensureEqual(results, expected, "\nresults=\n" + results);
- *
- *
+ *
+ *
* //*** test make data files
- *
+ *
* //.csv
* userDapQuery = "&time<=2016-09-28T00:03";
* tName = eddTable.makeNewFileForDapQuery(language, null, null, userDapQuery,
@@ -3718,7 +3808,7 @@ void testLongAndNetcdf4() throws Throwable {
* +
* "ce02shsm,44.63893,-124.30379,-2147483647,-2147483647,2016-09-28T00:02:19.852Z,0.0,-1356.0,14.94,2016-09-28T00:02:19.852Z,0,4202496,253755392,D00004\n";
* Test.ensureEqual(results, expected, "\nresults=\n" + results);
- *
+ *
* //.csv constrain long
* userDapQuery = "&error_flag2!=4202496";
* tName = eddTable.makeNewFileForDapQuery(language, null, null, userDapQuery,
@@ -3734,8 +3824,8 @@ void testLongAndNetcdf4() throws Throwable {
* +
* "ce02shsm,44.63893,-124.30379,-2147483647,-2147483647,2016-09-28T00:21:19.825Z,0.0,519.0,14.94,2016-09-28T00:21:19.825Z,0,12591104,253755392,D00004\n";
* Test.ensureEqual(results, expected, "\nresults=\n" + results);
- *
- *
+ *
+ *
* //.csv for test requesting distinct
* userDapQuery = "error_flag2&distinct()";
* tName = eddTable.makeNewFileForDapQuery(language, null, null, userDapQuery,
@@ -3749,7 +3839,7 @@ void testLongAndNetcdf4() throws Throwable {
* "4202496\n" +
* "12591104\n";
* Test.ensureEqual(results, expected, "\nresults=\n" + results);
- *
+ *
* //make nc3
* userDapQuery =
* "feature_type_instance,latitude,longitude,error_flag3&time<=2016-09-28T00:03";
@@ -3844,7 +3934,7 @@ void testLongAndNetcdf4() throws Throwable {
* ;
* Test.ensureEqual(results.substring(0, Math.min(results.length(),
* expected.length())), expected, "\nresults=\n" + results);
- *
+ *
* //"2017-03-28T22:21:20Z (local files)\n" +
* //"2017-03-28T22:21:20Z http://localhost:8080/cwexperimental/tabledap/testLong.nc?feature_type_instance,latitude,longitude,error_flag3&time<=2016-09-28T00:03\";\n"
* +
@@ -3888,19 +3978,21 @@ void testLongAndNetcdf4() throws Throwable {
* "}\n";
* int po = results.indexOf(expected.substring(0, 30));
* Test.ensureEqual(results.substring(po), expected, "\nresults=\n" + results);
- *
+ *
*/
// make nc4
- Test.ensureTrue(String2.indexOf(EDDTableFromMultidimNcFiles.dataFileTypeNames, ".nc4") >= 0, "Enable .nc4?");
+ Test.ensureTrue(
+ String2.indexOf(EDDTableFromMultidimNcFiles.dataFileTypeNames, ".nc4") >= 0,
+ "Enable .nc4?");
userDapQuery = "feature_type_instance,latitude,longitude,error_flag3&time<=2016-09-28T00:03";
- tName = eddTable.makeNewFileForDapQuery(language, null, null, userDapQuery, dir,
- eddTable.className() + "_Longnc4", ".nc4");
+ tName =
+ eddTable.makeNewFileForDapQuery(
+ language, null, null, userDapQuery, dir, eddTable.className() + "_Longnc4", ".nc4");
results = NcHelper.ncdump(dir + tName, "");
// String2.log(results);
expected = "zztop\n";
Test.ensureEqual(results, expected, "\nresults=\n" + results);
-
}
/**
@@ -3909,7 +4001,7 @@ void testLongAndNetcdf4() throws Throwable {
* @throws Throwable if trouble
*/
@ParameterizedTest
- @ValueSource(booleans = { true, false })
+ @ValueSource(booleans = {true, false})
@TagMissingDataset // Errors loading dataset files, nothing valid?
void testW1M3A(boolean deleteCachedInfo) throws Throwable {
// String2.log("\n****************** EDDTableFromMultidimNcFiles.testW1M3A()
@@ -3921,13 +4013,20 @@ void testW1M3A(boolean deleteCachedInfo) throws Throwable {
EDV edv;
int po, po2;
String dir = EDStatic.fullTestCacheDirectory;
- String today = Calendar2.getCurrentISODateTimeStringZulu().substring(0, 14); // 14 is enough to check hour. Hard
- // to check min:sec.
+ String today =
+ Calendar2.getCurrentISODateTimeStringZulu()
+ .substring(0, 14); // 14 is enough to check hour. Hard
+ // to check min:sec.
// test the floats work as expected
float f = String2.parseFloat("-3.4E38");
- String2.log(">> parse -3.4E38 => " + f + " isFinite=" + Float.isFinite(f) + " equal5? "
- + Math2.almostEqual(5, -3.4e38, f));
+ String2.log(
+ ">> parse -3.4E38 => "
+ + f
+ + " isFinite="
+ + Float.isFinite(f)
+ + " equal5? "
+ + Math2.almostEqual(5, -3.4e38, f));
Test.ensureTrue(Float.isFinite(f), "");
Test.ensureTrue(Math2.almostEqual(5, -3.4e38, f), "");
@@ -3963,32 +4062,34 @@ void testW1M3A(boolean deleteCachedInfo) throws Throwable {
// make the dataset
String id = "W1M3A";
- if (deleteCachedInfo)
- EDD.deleteCachedDatasetInfo(id);
+ if (deleteCachedInfo) EDD.deleteCachedDatasetInfo(id);
EDDTable eddTable = (EDDTable) EDDTestDataset.getW1M3A();
// reported problem
userDapQuery = "time,depth,TEMP&time>=2011-01-03T00&time<=2011-01-03T03";
- tName = eddTable.makeNewFileForDapQuery(language, null, null, userDapQuery, dir,
- eddTable.className() + "_1profile", ".csv");
+ tName =
+ eddTable.makeNewFileForDapQuery(
+ language, null, null, userDapQuery, dir, eddTable.className() + "_1profile", ".csv");
results = File2.directReadFrom88591File(dir + tName);
// String2.log(results);
- expected = "time,depth,TEMP\n" +
- "UTC,m,\n" +
- "2011-01-03T00:00:00Z,0.0,\n" + // was -3.4E38
- "2011-01-03T00:00:00Z,1.0,13.151\n" +
- "2011-01-03T00:00:00Z,6.0,13.168\n" +
- "2011-01-03T00:00:00Z,12.0,13.165\n" +
- "2011-01-03T00:00:00Z,20.0,13.166\n" +
- "2011-01-03T00:00:00Z,36.0,13.395\n" +
- "2011-01-03T03:00:00Z,0.0,\n" + // was -3.4E38
- "2011-01-03T03:00:00Z,1.0,13.194\n" +
- "2011-01-03T03:00:00Z,6.0,13.241\n" +
- "2011-01-03T03:00:00Z,12.0,13.186\n" +
- "2011-01-03T03:00:00Z,20.0,13.514\n" +
- "2011-01-03T03:00:00Z,36.0,13.927\n";
+ expected =
+ "time,depth,TEMP\n"
+ + "UTC,m,\n"
+ + "2011-01-03T00:00:00Z,0.0,\n"
+ + // was -3.4E38
+ "2011-01-03T00:00:00Z,1.0,13.151\n"
+ + "2011-01-03T00:00:00Z,6.0,13.168\n"
+ + "2011-01-03T00:00:00Z,12.0,13.165\n"
+ + "2011-01-03T00:00:00Z,20.0,13.166\n"
+ + "2011-01-03T00:00:00Z,36.0,13.395\n"
+ + "2011-01-03T03:00:00Z,0.0,\n"
+ + // was -3.4E38
+ "2011-01-03T03:00:00Z,1.0,13.194\n"
+ + "2011-01-03T03:00:00Z,6.0,13.241\n"
+ + "2011-01-03T03:00:00Z,12.0,13.186\n"
+ + "2011-01-03T03:00:00Z,20.0,13.514\n"
+ + "2011-01-03T03:00:00Z,36.0,13.927\n";
Test.ensureEqual(results, expected, "\nresults=\n" + results);
-
}
/**
@@ -3997,7 +4098,7 @@ void testW1M3A(boolean deleteCachedInfo) throws Throwable {
* @throws Throwable if trouble
*/
@ParameterizedTest
- @ValueSource(booleans = { true, false })
+ @ValueSource(booleans = {true, false})
void testCharAsString(boolean deleteCachedInfo) throws Throwable {
// String2.log("\n******************
// EDDTableFromMultidimNcFiles.testCharAsString() *****************\n");
@@ -4007,468 +4108,468 @@ void testCharAsString(boolean deleteCachedInfo) throws Throwable {
String error = "";
EDV edv;
String dir = EDStatic.fullTestCacheDirectory;
- String today = Calendar2.getCurrentISODateTimeStringZulu().substring(0, 14); // 14 is enough to check hour. Hard
- // to check min:sec.
+ String today =
+ Calendar2.getCurrentISODateTimeStringZulu()
+ .substring(0, 14); // 14 is enough to check hour. Hard
+ // to check min:sec.
// print dumpString of one of the data files
// String2.log(NcHelper.ncdump(EDStatic.unitTestDataDir +
// "nccf/testCharAsString/7900364_prof.nc", "-h"));
String id = "testCharAsString";
- if (deleteCachedInfo)
- EDD.deleteCachedDatasetInfo(id);
+ if (deleteCachedInfo) EDD.deleteCachedDatasetInfo(id);
EDDTable eddTable = (EDDTable) EDDTestDataset.gettestCharAsString();
// *** test getting das for entire dataset
String2.log("\n****************** test das and dds for entire dataset\n");
- tName = eddTable.makeNewFileForDapQuery(language, null, null, "", dir,
- eddTable.className() + "_Entire", ".das");
+ tName =
+ eddTable.makeNewFileForDapQuery(
+ language, null, null, "", dir, eddTable.className() + "_Entire", ".das");
results = File2.directReadFrom88591File(dir + tName);
// String2.log(results);
- expected = "Attributes {\n" +
- " s {\n" +
- " fileNumber {\n" +
- " String ioos_category \"Identifier\";\n" +
- " String long_name \"File Number\";\n" +
- " }\n" +
- " data_type {\n" +
- " String conventions \"Argo reference table 1\";\n" +
- " String ioos_category \"Unknown\";\n" +
- " String long_name \"Data type\";\n" +
- " }\n" +
- " format_version {\n" +
- " String ioos_category \"Unknown\";\n" +
- " String long_name \"File format version\";\n" +
- " }\n" +
- " handbook_version {\n" +
- " String ioos_category \"Unknown\";\n" +
- " String long_name \"Data handbook version\";\n" +
- " }\n" +
- " reference_date_time {\n" +
- " Float64 actual_range -6.31152e+8, -6.31152e+8;\n" +
- " String ioos_category \"Time\";\n" +
- " String long_name \"Date of reference for Julian days\";\n" +
- " String time_origin \"01-JAN-1970 00:00:00\";\n" +
- " String units \"seconds since 1970-01-01T00:00:00Z\";\n" +
- " }\n" +
- " date_creation {\n" +
- " Float64 actual_range 1.369414924e+9, 1.446162171e+9;\n" +
- " String ioos_category \"Time\";\n" +
- " String long_name \"Date of file creation\";\n" +
- " String time_origin \"01-JAN-1970 00:00:00\";\n" +
- " String units \"seconds since 1970-01-01T00:00:00Z\";\n" +
- " }\n" +
- " date_update {\n" +
- " Float64 actual_range 1.499448499e+9, 1.542981342e+9;\n" +
- " String ioos_category \"Time\";\n" +
- " String long_name \"Date of update of this file\";\n" +
- " String time_origin \"01-JAN-1970 00:00:00\";\n" +
- " String units \"seconds since 1970-01-01T00:00:00Z\";\n" +
- " }\n" +
- " platform_number {\n" +
- " String cf_role \"trajectory_id\";\n" +
- " String conventions \"WMO float identifier : A9IIIII\";\n" +
- " String ioos_category \"Identifier\";\n" +
- " String long_name \"Float unique identifier\";\n" +
- " }\n" +
- " project_name {\n" +
- " String ioos_category \"Identifier\";\n" +
- " String long_name \"Name of the project\";\n" +
- " }\n" +
- " pi_name {\n" +
- " String ioos_category \"Identifier\";\n" +
- " String long_name \"Name of the principal investigator\";\n" +
- " }\n" +
- " cycle_number {\n" +
- " Int32 _FillValue 99999;\n" +
- " Int32 actual_range 1, 142;\n" +
- " String cf_role \"profile_id\";\n" +
- " Float64 colorBarMaximum 200.0;\n" +
- " Float64 colorBarMinimum 0.0;\n" +
- " String conventions \"0...N, 0 : launch cycle (if exists), 1 : first complete cycle\";\n" +
- " String ioos_category \"Statistics\";\n" +
- " String long_name \"Float cycle number\";\n" +
- " }\n" +
- " direction {\n" +
- " Float64 colorBarMaximum 360.0;\n" +
- " Float64 colorBarMinimum 0.0;\n" +
- " String conventions \"A: ascending profiles, D: descending profiles\";\n" +
- " String ioos_category \"Currents\";\n" +
- " String long_name \"Direction of the station profiles\";\n" +
- " }\n" +
- " data_center {\n" +
- " String conventions \"Argo reference table 4\";\n" +
- " String ioos_category \"Unknown\";\n" +
- " String long_name \"Data centre in charge of float data processing\";\n" +
- " }\n" +
- " dc_reference {\n" +
- " String conventions \"Data centre convention\";\n" +
- " String ioos_category \"Identifier\";\n" +
- " String long_name \"Station unique identifier in data centre\";\n" +
- " }\n" +
- " data_state_indicator {\n" +
- " String conventions \"Argo reference table 6\";\n" +
- " String ioos_category \"Location\";\n" +
- " String long_name \"Degree of processing the data have passed through\";\n" +
- " }\n" +
- " data_mode {\n" +
- " String conventions \"R : real time; D : delayed mode; A : real time with adjustment\";\n" +
- " String ioos_category \"Time\";\n" +
- " String long_name \"Delayed mode or real time data\";\n" +
- " }\n" +
- " platform_type {\n" +
- " String conventions \"Argo reference table 23\";\n" +
- " String ioos_category \"Unknown\";\n" +
- " String long_name \"Type of float\";\n" +
- " }\n" +
- " float_serial_no {\n" +
- " Float64 colorBarMaximum 100.0;\n" +
- " Float64 colorBarMinimum 0.0;\n" +
- " String ioos_category \"Statistics\";\n" +
- " String long_name \"Serial number of the float\";\n" +
- " }\n" +
- " firmware_version {\n" +
- " String ioos_category \"Unknown\";\n" +
- " String long_name \"Instrument firmware version\";\n" +
- " }\n" +
- " wmo_inst_type {\n" +
- " String conventions \"Argo reference table 8\";\n" +
- " String ioos_category \"Unknown\";\n" +
- " String long_name \"Coded instrument type\";\n" +
- " }\n" +
- " time {\n" +
- " String _CoordinateAxisType \"Time\";\n" +
- " Float64 actual_range 1.356599997e+9, 1.4963616e+9;\n" +
- " String axis \"T\";\n" +
- " String ioos_category \"Time\";\n" +
- " String long_name \"Julian day (UTC) of the station relative to REFERENCE_DATE_TIME\";\n" +
- " String standard_name \"time\";\n" +
- " String time_origin \"01-JAN-1970 00:00:00\";\n" +
- " String units \"seconds since 1970-01-01T00:00:00Z\";\n" +
- " }\n" +
- " time_qc {\n" +
- " Float64 colorBarMaximum 150.0;\n" +
- " Float64 colorBarMinimum 0.0;\n" +
- " String conventions \"Argo reference table 2\";\n" +
- " String ioos_category \"Quality\";\n" +
- " String long_name \"Quality on date and time\";\n" +
- " }\n" +
- " time_location {\n" +
- " Float64 actual_range 1.356599997e+9, 1.496362576e+9;\n" +
- " String ioos_category \"Time\";\n" +
- " String long_name \"Julian day (UTC) of the location relative to REFERENCE_DATE_TIME\";\n" +
- " String standard_name \"time\";\n" +
- " String time_origin \"01-JAN-1970 00:00:00\";\n" +
- " String units \"seconds since 1970-01-01T00:00:00Z\";\n" +
- " }\n" +
- " latitude {\n" +
- " String _CoordinateAxisType \"Lat\";\n" +
- " Float64 _FillValue 99999.0;\n" +
- " Float64 actual_range -66.6667, 43.81645;\n" +
- " String axis \"Y\";\n" +
- " Float64 colorBarMaximum 90.0;\n" +
- " Float64 colorBarMinimum -90.0;\n" +
- " String ioos_category \"Location\";\n" +
- " String long_name \"Latitude of the station, best estimate\";\n" +
- " String standard_name \"latitude\";\n" +
- " String units \"degrees_north\";\n" +
- " Float64 valid_max 90.0;\n" +
- " Float64 valid_min -90.0;\n" +
- " }\n" +
- " longitude {\n" +
- " String _CoordinateAxisType \"Lon\";\n" +
- " Float64 _FillValue 99999.0;\n" +
- " Float64 actual_range -26.250239999999998, 36.42373;\n" +
- " String axis \"X\";\n" +
- " Float64 colorBarMaximum 180.0;\n" +
- " Float64 colorBarMinimum -180.0;\n" +
- " String ioos_category \"Location\";\n" +
- " String long_name \"Longitude of the station, best estimate\";\n" +
- " String standard_name \"longitude\";\n" +
- " String units \"degrees_east\";\n" +
- " Float64 valid_max 180.0;\n" +
- " Float64 valid_min -180.0;\n" +
- " }\n" +
- " position_qc {\n" +
- " Float64 colorBarMaximum 150.0;\n" +
- " Float64 colorBarMinimum 0.0;\n" +
- " String conventions \"Argo reference table 2\";\n" +
- " String ioos_category \"Quality\";\n" +
- " String long_name \"Quality on position (latitude and longitude)\";\n" +
- " }\n" +
- " positioning_system {\n" +
- " String ioos_category \"Unknown\";\n" +
- " String long_name \"Positioning system\";\n" +
- " }\n" +
- " profile_pres_qc {\n" +
- " Float64 colorBarMaximum 150.0;\n" +
- " Float64 colorBarMinimum 0.0;\n" +
- " String conventions \"Argo reference table 2a\";\n" +
- " String ioos_category \"Quality\";\n" +
- " String long_name \"Global quality flag of PRES profile\";\n" +
- " }\n" +
- " profile_temp_qc {\n" +
- " Float64 colorBarMaximum 150.0;\n" +
- " Float64 colorBarMinimum 0.0;\n" +
- " String conventions \"Argo reference table 2a\";\n" +
- " String ioos_category \"Quality\";\n" +
- " String long_name \"Global quality flag of TEMP profile\";\n" +
- " }\n" +
- " profile_psal_qc {\n" +
- " Float64 colorBarMaximum 150.0;\n" +
- " Float64 colorBarMinimum 0.0;\n" +
- " String conventions \"Argo reference table 2a\";\n" +
- " String ioos_category \"Quality\";\n" +
- " String long_name \"Global quality flag of PSAL profile\";\n" +
- " }\n" +
- " vertical_sampling_scheme {\n" +
- " String conventions \"Argo reference table 16\";\n" +
- " String ioos_category \"Unknown\";\n" +
- " String long_name \"Vertical sampling scheme\";\n" +
- " }\n" +
- " config_mission_number {\n" +
- " Int32 _FillValue 99999;\n" +
- " Int32 actual_range 1, 2;\n" +
- " Float64 colorBarMaximum 100.0;\n" +
- " Float64 colorBarMinimum 0.0;\n" +
- " String conventions \"1...N, 1 : first complete mission\";\n" +
- " String ioos_category \"Statistics\";\n" +
- " String long_name \"Unique number denoting the missions performed by the float\";\n" +
- " }\n" +
- " pres {\n" +
- " String _CoordinateAxisType \"Height\";\n" +
- " Float32 _FillValue 99999.0;\n" +
- " Float32 actual_range -0.2, 1999.9;\n" +
- " String axis \"Z\";\n" +
- " String C_format \"%7.1f\";\n" +
- " Float64 colorBarMaximum 5000.0;\n" +
- " Float64 colorBarMinimum 0.0;\n" +
- " String FORTRAN_format \"F7.1\";\n" +
- " String ioos_category \"Sea Level\";\n" +
- " String long_name \"Sea water pressure, equals 0 at sea-level\";\n" +
- " String standard_name \"sea_water_pressure\";\n" +
- " String units \"decibar\";\n" +
- " Float32 valid_max 12000.0;\n" +
- " Float32 valid_min 0.0;\n" +
- " }\n" +
- " pres_qc {\n" +
- " Float64 colorBarMaximum 150.0;\n" +
- " Float64 colorBarMinimum 0.0;\n" +
- " String conventions \"Argo reference table 2\";\n" +
- " String ioos_category \"Quality\";\n" +
- " String long_name \"quality flag\";\n" +
- " }\n" +
- " pres_adjusted {\n" +
- " Float32 _FillValue 99999.0;\n" +
- " String axis \"Z\";\n" +
- " String C_format \"%7.1f\";\n" +
- " Float64 colorBarMaximum 5000.0;\n" +
- " Float64 colorBarMinimum 0.0;\n" +
- " String FORTRAN_format \"F7.1\";\n" +
- " String ioos_category \"Sea Level\";\n" +
- " String long_name \"Sea water pressure, equals 0 at sea-level\";\n" +
- " String standard_name \"sea_water_pressure\";\n" +
- " String units \"decibar\";\n" +
- " Float32 valid_max 12000.0;\n" +
- " Float32 valid_min 0.0;\n" +
- " }\n" +
- " pres_adjusted_qc {\n" +
- " Float64 colorBarMaximum 150.0;\n" +
- " Float64 colorBarMinimum 0.0;\n" +
- " String conventions \"Argo reference table 2\";\n" +
- " String ioos_category \"Quality\";\n" +
- " String long_name \"quality flag\";\n" +
- " }\n" +
- " pres_aqdjusted_error {\n" +
- " Float32 _FillValue 99999.0;\n" +
- " String C_format \"%7.1f\";\n" +
- " Float64 colorBarMaximum 50.0;\n" +
- " Float64 colorBarMinimum 0.0;\n" +
- " String FORTRAN_format \"F7.1\";\n" +
- " String ioos_category \"Statistics\";\n" +
- " String long_name \"Contains the error on the adjusted values as determined by the delayed mode QC process\";\n"
- +
- " String units \"decibar\";\n" +
- " }\n" +
- " temp {\n" +
- " Float32 _FillValue 99999.0;\n" +
- " Float32 actual_range -1.855, 27.185;\n" +
- " String C_format \"%9.3f\";\n" +
- " Float64 colorBarMaximum 32.0;\n" +
- " Float64 colorBarMinimum 0.0;\n" +
- " String FORTRAN_format \"F9.3\";\n" +
- " String ioos_category \"Temperature\";\n" +
- " String long_name \"Sea temperature in-situ ITS-90 scale\";\n" +
- " String standard_name \"sea_water_temperature\";\n" +
- " String units \"degree_Celsius\";\n" +
- " Float32 valid_max 40.0;\n" +
- " Float32 valid_min -2.5;\n" +
- " }\n" +
- " temp_qc {\n" +
- " Float64 colorBarMaximum 150.0;\n" +
- " Float64 colorBarMinimum 0.0;\n" +
- " String conventions \"Argo reference table 2\";\n" +
- " String ioos_category \"Quality\";\n" +
- " String long_name \"quality flag\";\n" +
- " }\n" +
- " temp_adjusted {\n" +
- " Float32 _FillValue 99999.0;\n" +
- " String C_format \"%9.3f\";\n" +
- " Float64 colorBarMaximum 32.0;\n" +
- " Float64 colorBarMinimum 0.0;\n" +
- " String FORTRAN_format \"F9.3\";\n" +
- " String ioos_category \"Temperature\";\n" +
- " String long_name \"Sea temperature in-situ ITS-90 scale\";\n" +
- " String standard_name \"sea_water_temperature\";\n" +
- " String units \"degree_Celsius\";\n" +
- " Float32 valid_max 40.0;\n" +
- " Float32 valid_min -2.5;\n" +
- " }\n" +
- " temp_adjusted_qc {\n" +
- " Float64 colorBarMaximum 150.0;\n" +
- " Float64 colorBarMinimum 0.0;\n" +
- " String conventions \"Argo reference table 2\";\n" +
- " String ioos_category \"Quality\";\n" +
- " String long_name \"quality flag\";\n" +
- " }\n" +
- " temp_adjusted_error {\n" +
- " Float32 _FillValue 99999.0;\n" +
- " String C_format \"%9.3f\";\n" +
- " Float64 colorBarMaximum 1.0;\n" +
- " Float64 colorBarMinimum 0.0;\n" +
- " String FORTRAN_format \"F9.3\";\n" +
- " String ioos_category \"Statistics\";\n" +
- " String long_name \"Contains the error on the adjusted values as determined by the delayed mode QC process\";\n"
- +
- " String units \"degree_Celsius\";\n" +
- " }\n" +
- " psal {\n" +
- " Float32 _FillValue 99999.0;\n" +
- " Float32 actual_range 15.829, 34.691;\n" +
- " String C_format \"%9.3f\";\n" +
- " Float64 colorBarMaximum 37.0;\n" +
- " Float64 colorBarMinimum 32.0;\n" +
- " String FORTRAN_format \"F9.3\";\n" +
- " String ioos_category \"Salinity\";\n" +
- " String long_name \"Practical salinity\";\n" +
- " String standard_name \"sea_water_practical_salinity\";\n" +
- " String units \"PSU\";\n" +
- " Float32 valid_max 41.0;\n" +
- " Float32 valid_min 2.0;\n" +
- " }\n" +
- " psal_qc {\n" +
- " Float64 colorBarMaximum 150.0;\n" +
- " Float64 colorBarMinimum 0.0;\n" +
- " String conventions \"Argo reference table 2\";\n" +
- " String ioos_category \"Quality\";\n" +
- " String long_name \"quality flag\";\n" +
- " }\n" +
- " psal_adjusted {\n" +
- " Float32 _FillValue 99999.0;\n" +
- " String C_format \"%9.3f\";\n" +
- " Float64 colorBarMaximum 37.0;\n" +
- " Float64 colorBarMinimum 32.0;\n" +
- " String FORTRAN_format \"F9.3\";\n" +
- " String ioos_category \"Salinity\";\n" +
- " String long_name \"Practical salinity\";\n" +
- " String standard_name \"sea_water_practical_salinity\";\n" +
- " String units \"PSU\";\n" +
- " Float32 valid_max 41.0;\n" +
- " Float32 valid_min 2.0;\n" +
- " }\n" +
- " psal_adjusted_qc {\n" +
- " Float64 colorBarMaximum 150.0;\n" +
- " Float64 colorBarMinimum 0.0;\n" +
- " String conventions \"Argo reference table 2\";\n" +
- " String ioos_category \"Quality\";\n" +
- " String long_name \"quality flag\";\n" +
- " }\n" +
- " psal_adjusted_error {\n" +
- " Float32 _FillValue 99999.0;\n" +
- " String C_format \"%9.3f\";\n" +
- " Float64 colorBarMaximum 1.0;\n" +
- " Float64 colorBarMinimum 0.0;\n" +
- " String FORTRAN_format \"F9.3\";\n" +
- " String ioos_category \"Statistics\";\n" +
- " String long_name \"Contains the error on the adjusted values as determined by the delayed mode QC process\";\n"
- +
- " String units \"psu\";\n" +
- " }\n" +
- " }\n" +
- " NC_GLOBAL {\n" +
- " String cdm_altitude_proxy \"pres\";\n" +
- " String cdm_data_type \"TrajectoryProfile\";\n" +
- " String cdm_profile_variables \"cycle_number, data_type, format_version, handbook_version, reference_date_time, date_creation, date_update, direction, data_center, dc_reference, data_state_indicator, data_mode, firmware_version, wmo_inst_type, time, time_qc, time_location, latitude, longitude, position_qc, positioning_system, profile_pres_qc, profile_temp_qc, profile_psal_qc, vertical_sampling_scheme\";\n"
- +
- " String cdm_trajectory_variables \"platform_number, project_name, pi_name, platform_type, float_serial_no\";\n"
- +
- " String Conventions \"Argo-3.1, CF-1.6, COARDS, ACDD-1.3\";\n" +
- " String creator_email \"support@argo.net\";\n" +
- " String creator_name \"Argo\";\n" +
- " String creator_url \"http://www.argo.net/\";\n" +
- " Float64 Easternmost_Easting 36.42373;\n" +
- " String featureType \"TrajectoryProfile\";\n" +
- " Float64 geospatial_lat_max 43.81645;\n" +
- " Float64 geospatial_lat_min -66.6667;\n" +
- " String geospatial_lat_units \"degrees_north\";\n" +
- " Float64 geospatial_lon_max 36.42373;\n" +
- " Float64 geospatial_lon_min -26.250239999999998;\n" +
- " String geospatial_lon_units \"degrees_east\";\n" +
- " String history \"" + today;
+ expected =
+ "Attributes {\n"
+ + " s {\n"
+ + " fileNumber {\n"
+ + " String ioos_category \"Identifier\";\n"
+ + " String long_name \"File Number\";\n"
+ + " }\n"
+ + " data_type {\n"
+ + " String conventions \"Argo reference table 1\";\n"
+ + " String ioos_category \"Unknown\";\n"
+ + " String long_name \"Data type\";\n"
+ + " }\n"
+ + " format_version {\n"
+ + " String ioos_category \"Unknown\";\n"
+ + " String long_name \"File format version\";\n"
+ + " }\n"
+ + " handbook_version {\n"
+ + " String ioos_category \"Unknown\";\n"
+ + " String long_name \"Data handbook version\";\n"
+ + " }\n"
+ + " reference_date_time {\n"
+ + " Float64 actual_range -6.31152e+8, -6.31152e+8;\n"
+ + " String ioos_category \"Time\";\n"
+ + " String long_name \"Date of reference for Julian days\";\n"
+ + " String time_origin \"01-JAN-1970 00:00:00\";\n"
+ + " String units \"seconds since 1970-01-01T00:00:00Z\";\n"
+ + " }\n"
+ + " date_creation {\n"
+ + " Float64 actual_range 1.369414924e+9, 1.446162171e+9;\n"
+ + " String ioos_category \"Time\";\n"
+ + " String long_name \"Date of file creation\";\n"
+ + " String time_origin \"01-JAN-1970 00:00:00\";\n"
+ + " String units \"seconds since 1970-01-01T00:00:00Z\";\n"
+ + " }\n"
+ + " date_update {\n"
+ + " Float64 actual_range 1.499448499e+9, 1.542981342e+9;\n"
+ + " String ioos_category \"Time\";\n"
+ + " String long_name \"Date of update of this file\";\n"
+ + " String time_origin \"01-JAN-1970 00:00:00\";\n"
+ + " String units \"seconds since 1970-01-01T00:00:00Z\";\n"
+ + " }\n"
+ + " platform_number {\n"
+ + " String cf_role \"trajectory_id\";\n"
+ + " String conventions \"WMO float identifier : A9IIIII\";\n"
+ + " String ioos_category \"Identifier\";\n"
+ + " String long_name \"Float unique identifier\";\n"
+ + " }\n"
+ + " project_name {\n"
+ + " String ioos_category \"Identifier\";\n"
+ + " String long_name \"Name of the project\";\n"
+ + " }\n"
+ + " pi_name {\n"
+ + " String ioos_category \"Identifier\";\n"
+ + " String long_name \"Name of the principal investigator\";\n"
+ + " }\n"
+ + " cycle_number {\n"
+ + " Int32 _FillValue 99999;\n"
+ + " Int32 actual_range 1, 142;\n"
+ + " String cf_role \"profile_id\";\n"
+ + " Float64 colorBarMaximum 200.0;\n"
+ + " Float64 colorBarMinimum 0.0;\n"
+ + " String conventions \"0...N, 0 : launch cycle (if exists), 1 : first complete cycle\";\n"
+ + " String ioos_category \"Statistics\";\n"
+ + " String long_name \"Float cycle number\";\n"
+ + " }\n"
+ + " direction {\n"
+ + " Float64 colorBarMaximum 360.0;\n"
+ + " Float64 colorBarMinimum 0.0;\n"
+ + " String conventions \"A: ascending profiles, D: descending profiles\";\n"
+ + " String ioos_category \"Currents\";\n"
+ + " String long_name \"Direction of the station profiles\";\n"
+ + " }\n"
+ + " data_center {\n"
+ + " String conventions \"Argo reference table 4\";\n"
+ + " String ioos_category \"Unknown\";\n"
+ + " String long_name \"Data centre in charge of float data processing\";\n"
+ + " }\n"
+ + " dc_reference {\n"
+ + " String conventions \"Data centre convention\";\n"
+ + " String ioos_category \"Identifier\";\n"
+ + " String long_name \"Station unique identifier in data centre\";\n"
+ + " }\n"
+ + " data_state_indicator {\n"
+ + " String conventions \"Argo reference table 6\";\n"
+ + " String ioos_category \"Location\";\n"
+ + " String long_name \"Degree of processing the data have passed through\";\n"
+ + " }\n"
+ + " data_mode {\n"
+ + " String conventions \"R : real time; D : delayed mode; A : real time with adjustment\";\n"
+ + " String ioos_category \"Time\";\n"
+ + " String long_name \"Delayed mode or real time data\";\n"
+ + " }\n"
+ + " platform_type {\n"
+ + " String conventions \"Argo reference table 23\";\n"
+ + " String ioos_category \"Unknown\";\n"
+ + " String long_name \"Type of float\";\n"
+ + " }\n"
+ + " float_serial_no {\n"
+ + " Float64 colorBarMaximum 100.0;\n"
+ + " Float64 colorBarMinimum 0.0;\n"
+ + " String ioos_category \"Statistics\";\n"
+ + " String long_name \"Serial number of the float\";\n"
+ + " }\n"
+ + " firmware_version {\n"
+ + " String ioos_category \"Unknown\";\n"
+ + " String long_name \"Instrument firmware version\";\n"
+ + " }\n"
+ + " wmo_inst_type {\n"
+ + " String conventions \"Argo reference table 8\";\n"
+ + " String ioos_category \"Unknown\";\n"
+ + " String long_name \"Coded instrument type\";\n"
+ + " }\n"
+ + " time {\n"
+ + " String _CoordinateAxisType \"Time\";\n"
+ + " Float64 actual_range 1.356599997e+9, 1.4963616e+9;\n"
+ + " String axis \"T\";\n"
+ + " String ioos_category \"Time\";\n"
+ + " String long_name \"Julian day (UTC) of the station relative to REFERENCE_DATE_TIME\";\n"
+ + " String standard_name \"time\";\n"
+ + " String time_origin \"01-JAN-1970 00:00:00\";\n"
+ + " String units \"seconds since 1970-01-01T00:00:00Z\";\n"
+ + " }\n"
+ + " time_qc {\n"
+ + " Float64 colorBarMaximum 150.0;\n"
+ + " Float64 colorBarMinimum 0.0;\n"
+ + " String conventions \"Argo reference table 2\";\n"
+ + " String ioos_category \"Quality\";\n"
+ + " String long_name \"Quality on date and time\";\n"
+ + " }\n"
+ + " time_location {\n"
+ + " Float64 actual_range 1.356599997e+9, 1.496362576e+9;\n"
+ + " String ioos_category \"Time\";\n"
+ + " String long_name \"Julian day (UTC) of the location relative to REFERENCE_DATE_TIME\";\n"
+ + " String standard_name \"time\";\n"
+ + " String time_origin \"01-JAN-1970 00:00:00\";\n"
+ + " String units \"seconds since 1970-01-01T00:00:00Z\";\n"
+ + " }\n"
+ + " latitude {\n"
+ + " String _CoordinateAxisType \"Lat\";\n"
+ + " Float64 _FillValue 99999.0;\n"
+ + " Float64 actual_range -66.6667, 43.81645;\n"
+ + " String axis \"Y\";\n"
+ + " Float64 colorBarMaximum 90.0;\n"
+ + " Float64 colorBarMinimum -90.0;\n"
+ + " String ioos_category \"Location\";\n"
+ + " String long_name \"Latitude of the station, best estimate\";\n"
+ + " String standard_name \"latitude\";\n"
+ + " String units \"degrees_north\";\n"
+ + " Float64 valid_max 90.0;\n"
+ + " Float64 valid_min -90.0;\n"
+ + " }\n"
+ + " longitude {\n"
+ + " String _CoordinateAxisType \"Lon\";\n"
+ + " Float64 _FillValue 99999.0;\n"
+ + " Float64 actual_range -26.250239999999998, 36.42373;\n"
+ + " String axis \"X\";\n"
+ + " Float64 colorBarMaximum 180.0;\n"
+ + " Float64 colorBarMinimum -180.0;\n"
+ + " String ioos_category \"Location\";\n"
+ + " String long_name \"Longitude of the station, best estimate\";\n"
+ + " String standard_name \"longitude\";\n"
+ + " String units \"degrees_east\";\n"
+ + " Float64 valid_max 180.0;\n"
+ + " Float64 valid_min -180.0;\n"
+ + " }\n"
+ + " position_qc {\n"
+ + " Float64 colorBarMaximum 150.0;\n"
+ + " Float64 colorBarMinimum 0.0;\n"
+ + " String conventions \"Argo reference table 2\";\n"
+ + " String ioos_category \"Quality\";\n"
+ + " String long_name \"Quality on position (latitude and longitude)\";\n"
+ + " }\n"
+ + " positioning_system {\n"
+ + " String ioos_category \"Unknown\";\n"
+ + " String long_name \"Positioning system\";\n"
+ + " }\n"
+ + " profile_pres_qc {\n"
+ + " Float64 colorBarMaximum 150.0;\n"
+ + " Float64 colorBarMinimum 0.0;\n"
+ + " String conventions \"Argo reference table 2a\";\n"
+ + " String ioos_category \"Quality\";\n"
+ + " String long_name \"Global quality flag of PRES profile\";\n"
+ + " }\n"
+ + " profile_temp_qc {\n"
+ + " Float64 colorBarMaximum 150.0;\n"
+ + " Float64 colorBarMinimum 0.0;\n"
+ + " String conventions \"Argo reference table 2a\";\n"
+ + " String ioos_category \"Quality\";\n"
+ + " String long_name \"Global quality flag of TEMP profile\";\n"
+ + " }\n"
+ + " profile_psal_qc {\n"
+ + " Float64 colorBarMaximum 150.0;\n"
+ + " Float64 colorBarMinimum 0.0;\n"
+ + " String conventions \"Argo reference table 2a\";\n"
+ + " String ioos_category \"Quality\";\n"
+ + " String long_name \"Global quality flag of PSAL profile\";\n"
+ + " }\n"
+ + " vertical_sampling_scheme {\n"
+ + " String conventions \"Argo reference table 16\";\n"
+ + " String ioos_category \"Unknown\";\n"
+ + " String long_name \"Vertical sampling scheme\";\n"
+ + " }\n"
+ + " config_mission_number {\n"
+ + " Int32 _FillValue 99999;\n"
+ + " Int32 actual_range 1, 2;\n"
+ + " Float64 colorBarMaximum 100.0;\n"
+ + " Float64 colorBarMinimum 0.0;\n"
+ + " String conventions \"1...N, 1 : first complete mission\";\n"
+ + " String ioos_category \"Statistics\";\n"
+ + " String long_name \"Unique number denoting the missions performed by the float\";\n"
+ + " }\n"
+ + " pres {\n"
+ + " String _CoordinateAxisType \"Height\";\n"
+ + " Float32 _FillValue 99999.0;\n"
+ + " Float32 actual_range -0.2, 1999.9;\n"
+ + " String axis \"Z\";\n"
+ + " String C_format \"%7.1f\";\n"
+ + " Float64 colorBarMaximum 5000.0;\n"
+ + " Float64 colorBarMinimum 0.0;\n"
+ + " String FORTRAN_format \"F7.1\";\n"
+ + " String ioos_category \"Sea Level\";\n"
+ + " String long_name \"Sea water pressure, equals 0 at sea-level\";\n"
+ + " String standard_name \"sea_water_pressure\";\n"
+ + " String units \"decibar\";\n"
+ + " Float32 valid_max 12000.0;\n"
+ + " Float32 valid_min 0.0;\n"
+ + " }\n"
+ + " pres_qc {\n"
+ + " Float64 colorBarMaximum 150.0;\n"
+ + " Float64 colorBarMinimum 0.0;\n"
+ + " String conventions \"Argo reference table 2\";\n"
+ + " String ioos_category \"Quality\";\n"
+ + " String long_name \"quality flag\";\n"
+ + " }\n"
+ + " pres_adjusted {\n"
+ + " Float32 _FillValue 99999.0;\n"
+ + " String axis \"Z\";\n"
+ + " String C_format \"%7.1f\";\n"
+ + " Float64 colorBarMaximum 5000.0;\n"
+ + " Float64 colorBarMinimum 0.0;\n"
+ + " String FORTRAN_format \"F7.1\";\n"
+ + " String ioos_category \"Sea Level\";\n"
+ + " String long_name \"Sea water pressure, equals 0 at sea-level\";\n"
+ + " String standard_name \"sea_water_pressure\";\n"
+ + " String units \"decibar\";\n"
+ + " Float32 valid_max 12000.0;\n"
+ + " Float32 valid_min 0.0;\n"
+ + " }\n"
+ + " pres_adjusted_qc {\n"
+ + " Float64 colorBarMaximum 150.0;\n"
+ + " Float64 colorBarMinimum 0.0;\n"
+ + " String conventions \"Argo reference table 2\";\n"
+ + " String ioos_category \"Quality\";\n"
+ + " String long_name \"quality flag\";\n"
+ + " }\n"
+ + " pres_aqdjusted_error {\n"
+ + " Float32 _FillValue 99999.0;\n"
+ + " String C_format \"%7.1f\";\n"
+ + " Float64 colorBarMaximum 50.0;\n"
+ + " Float64 colorBarMinimum 0.0;\n"
+ + " String FORTRAN_format \"F7.1\";\n"
+ + " String ioos_category \"Statistics\";\n"
+ + " String long_name \"Contains the error on the adjusted values as determined by the delayed mode QC process\";\n"
+ + " String units \"decibar\";\n"
+ + " }\n"
+ + " temp {\n"
+ + " Float32 _FillValue 99999.0;\n"
+ + " Float32 actual_range -1.855, 27.185;\n"
+ + " String C_format \"%9.3f\";\n"
+ + " Float64 colorBarMaximum 32.0;\n"
+ + " Float64 colorBarMinimum 0.0;\n"
+ + " String FORTRAN_format \"F9.3\";\n"
+ + " String ioos_category \"Temperature\";\n"
+ + " String long_name \"Sea temperature in-situ ITS-90 scale\";\n"
+ + " String standard_name \"sea_water_temperature\";\n"
+ + " String units \"degree_Celsius\";\n"
+ + " Float32 valid_max 40.0;\n"
+ + " Float32 valid_min -2.5;\n"
+ + " }\n"
+ + " temp_qc {\n"
+ + " Float64 colorBarMaximum 150.0;\n"
+ + " Float64 colorBarMinimum 0.0;\n"
+ + " String conventions \"Argo reference table 2\";\n"
+ + " String ioos_category \"Quality\";\n"
+ + " String long_name \"quality flag\";\n"
+ + " }\n"
+ + " temp_adjusted {\n"
+ + " Float32 _FillValue 99999.0;\n"
+ + " String C_format \"%9.3f\";\n"
+ + " Float64 colorBarMaximum 32.0;\n"
+ + " Float64 colorBarMinimum 0.0;\n"
+ + " String FORTRAN_format \"F9.3\";\n"
+ + " String ioos_category \"Temperature\";\n"
+ + " String long_name \"Sea temperature in-situ ITS-90 scale\";\n"
+ + " String standard_name \"sea_water_temperature\";\n"
+ + " String units \"degree_Celsius\";\n"
+ + " Float32 valid_max 40.0;\n"
+ + " Float32 valid_min -2.5;\n"
+ + " }\n"
+ + " temp_adjusted_qc {\n"
+ + " Float64 colorBarMaximum 150.0;\n"
+ + " Float64 colorBarMinimum 0.0;\n"
+ + " String conventions \"Argo reference table 2\";\n"
+ + " String ioos_category \"Quality\";\n"
+ + " String long_name \"quality flag\";\n"
+ + " }\n"
+ + " temp_adjusted_error {\n"
+ + " Float32 _FillValue 99999.0;\n"
+ + " String C_format \"%9.3f\";\n"
+ + " Float64 colorBarMaximum 1.0;\n"
+ + " Float64 colorBarMinimum 0.0;\n"
+ + " String FORTRAN_format \"F9.3\";\n"
+ + " String ioos_category \"Statistics\";\n"
+ + " String long_name \"Contains the error on the adjusted values as determined by the delayed mode QC process\";\n"
+ + " String units \"degree_Celsius\";\n"
+ + " }\n"
+ + " psal {\n"
+ + " Float32 _FillValue 99999.0;\n"
+ + " Float32 actual_range 15.829, 34.691;\n"
+ + " String C_format \"%9.3f\";\n"
+ + " Float64 colorBarMaximum 37.0;\n"
+ + " Float64 colorBarMinimum 32.0;\n"
+ + " String FORTRAN_format \"F9.3\";\n"
+ + " String ioos_category \"Salinity\";\n"
+ + " String long_name \"Practical salinity\";\n"
+ + " String standard_name \"sea_water_practical_salinity\";\n"
+ + " String units \"PSU\";\n"
+ + " Float32 valid_max 41.0;\n"
+ + " Float32 valid_min 2.0;\n"
+ + " }\n"
+ + " psal_qc {\n"
+ + " Float64 colorBarMaximum 150.0;\n"
+ + " Float64 colorBarMinimum 0.0;\n"
+ + " String conventions \"Argo reference table 2\";\n"
+ + " String ioos_category \"Quality\";\n"
+ + " String long_name \"quality flag\";\n"
+ + " }\n"
+ + " psal_adjusted {\n"
+ + " Float32 _FillValue 99999.0;\n"
+ + " String C_format \"%9.3f\";\n"
+ + " Float64 colorBarMaximum 37.0;\n"
+ + " Float64 colorBarMinimum 32.0;\n"
+ + " String FORTRAN_format \"F9.3\";\n"
+ + " String ioos_category \"Salinity\";\n"
+ + " String long_name \"Practical salinity\";\n"
+ + " String standard_name \"sea_water_practical_salinity\";\n"
+ + " String units \"PSU\";\n"
+ + " Float32 valid_max 41.0;\n"
+ + " Float32 valid_min 2.0;\n"
+ + " }\n"
+ + " psal_adjusted_qc {\n"
+ + " Float64 colorBarMaximum 150.0;\n"
+ + " Float64 colorBarMinimum 0.0;\n"
+ + " String conventions \"Argo reference table 2\";\n"
+ + " String ioos_category \"Quality\";\n"
+ + " String long_name \"quality flag\";\n"
+ + " }\n"
+ + " psal_adjusted_error {\n"
+ + " Float32 _FillValue 99999.0;\n"
+ + " String C_format \"%9.3f\";\n"
+ + " Float64 colorBarMaximum 1.0;\n"
+ + " Float64 colorBarMinimum 0.0;\n"
+ + " String FORTRAN_format \"F9.3\";\n"
+ + " String ioos_category \"Statistics\";\n"
+ + " String long_name \"Contains the error on the adjusted values as determined by the delayed mode QC process\";\n"
+ + " String units \"psu\";\n"
+ + " }\n"
+ + " }\n"
+ + " NC_GLOBAL {\n"
+ + " String cdm_altitude_proxy \"pres\";\n"
+ + " String cdm_data_type \"TrajectoryProfile\";\n"
+ + " String cdm_profile_variables \"cycle_number, data_type, format_version, handbook_version, reference_date_time, date_creation, date_update, direction, data_center, dc_reference, data_state_indicator, data_mode, firmware_version, wmo_inst_type, time, time_qc, time_location, latitude, longitude, position_qc, positioning_system, profile_pres_qc, profile_temp_qc, profile_psal_qc, vertical_sampling_scheme\";\n"
+ + " String cdm_trajectory_variables \"platform_number, project_name, pi_name, platform_type, float_serial_no\";\n"
+ + " String Conventions \"Argo-3.1, CF-1.6, COARDS, ACDD-1.3\";\n"
+ + " String creator_email \"support@argo.net\";\n"
+ + " String creator_name \"Argo\";\n"
+ + " String creator_url \"http://www.argo.net/\";\n"
+ + " Float64 Easternmost_Easting 36.42373;\n"
+ + " String featureType \"TrajectoryProfile\";\n"
+ + " Float64 geospatial_lat_max 43.81645;\n"
+ + " Float64 geospatial_lat_min -66.6667;\n"
+ + " String geospatial_lat_units \"degrees_north\";\n"
+ + " Float64 geospatial_lon_max 36.42373;\n"
+ + " Float64 geospatial_lon_min -26.250239999999998;\n"
+ + " String geospatial_lon_units \"degrees_east\";\n"
+ + " String history \""
+ + today;
tResults = results.substring(0, Math.min(results.length(), expected.length()));
Test.ensureEqual(tResults, expected, "\nresults=\n" + results);
// 2016-05-09T15:34:11Z (local files)
// 2016-05-09T15:34:11Z
// http://localhost:8080/cwexperimental/tabledap/testMultidimNc.das\";
- expected = "String infoUrl \"http://www.argo.net/\";\n" +
- " String institution \"Argo\";\n" +
- " String keywords \"adjusted, argo, array, assembly, best, centre, centres, charge, coded, CONFIG_MISSION_NUMBER, contains, coriolis, creation, currents, cycle, CYCLE_NUMBER, data, DATA_CENTRE, DATA_MODE, DATA_STATE_INDICATOR, DATA_TYPE, date, DATE_CREATION, DATE_UPDATE, day, days, DC_REFERENCE, degree, delayed, denoting, density, determined, direction, Earth Science > Oceans > Ocean Pressure > Water Pressure, Earth Science > Oceans > Ocean Temperature > Water Temperature, Earth Science > Oceans > Salinity/Density > Salinity, equals, error, estimate, file, firmware, FIRMWARE_VERSION, flag, float, FLOAT_SERIAL_NO, format, FORMAT_VERSION, gdac, geostrophic, global, handbook, HANDBOOK_VERSION, have, identifier, in-situ, instrument, investigator, its, its-90, JULD, JULD_LOCATION, JULD_QC, julian, latitude, level, longitude, missions, mode, name, number, ocean, oceanography, oceans, passed, performed, PI_NAME, PLATFORM_NUMBER, PLATFORM_TYPE, position, POSITION_QC, positioning, POSITIONING_SYSTEM, practical, pres, PRES_ADJUSTED, PRES_ADJUSTED_ERROR, PRES_ADJUSTED_QC, PRES_QC, pressure, principal, process, processing, profile, PROFILE_PRES_QC, PROFILE_PSAL_QC, PROFILE_TEMP_QC, profiles, project, PROJECT_NAME, psal, PSAL_ADJUSTED, PSAL_ADJUSTED_ERROR, PSAL_ADJUSTED_QC, PSAL_QC, quality, rdac, real, real time, real-time, realtime, reference, REFERENCE_DATE_TIME, regional, relative, salinity, sampling, scale, scheme, sea, sea level, sea-level, sea_water_practical_salinity, sea_water_pressure, sea_water_temperature, seawater, serial, situ, station, statistics, system, TEMP, TEMP_ADJUSTED, TEMP_ADJUSTED_ERROR, TEMP_ADJUSTED_QC, TEMP_QC, temperature, through, time, type, unique, update, values, version, vertical, VERTICAL_SAMPLING_SCHEME, water, WMO_INST_TYPE\";\n"
- +
- " String keywords_vocabulary \"GCMD Science Keywords\";\n" +
- " String license \"The data may be used and redistributed for free but is not intended\n" +
- "for legal use, since it may contain inaccuracies. Neither the data\n" +
- "Contributor, ERD, NOAA, nor the United States Government, nor any\n" +
- "of their employees or contractors, makes any warranty, express or\n" +
- "implied, including warranties of merchantability and fitness for a\n" +
- "particular purpose, or assumes any legal liability for the accuracy,\n" +
- "completeness, or usefulness, of this information.\";\n" +
- " Float64 Northernmost_Northing 43.81645;\n" +
- " String references \"http://www.argodatamgt.org/Documentation\";\n" +
- " String source \"Argo float\";\n" +
- " String sourceUrl \"(local files)\";\n" +
- " Float64 Southernmost_Northing -66.6667;\n" +
- " String standard_name_vocabulary \"CF Standard Name Table v29\";\n" +
- " String summary \"Argo float vertical profiles from Coriolis Global Data Assembly Centres\n" +
- "(GDAC). Argo is an international collaboration that collects high-quality\n" +
- "temperature and salinity profiles from the upper 2000m of the ice-free\n" +
- "global ocean and currents from intermediate depths. The data come from\n" +
- "battery-powered autonomous floats that spend most of their life drifting\n" +
- "at depth where they are stabilised by being neutrally buoyant at the\n" +
- "\\\"parking depth\\\" pressure by having a density equal to the ambient pressure\n" +
- "and a compressibility that is less than that of sea water. At present there\n" +
- "are several models of profiling float used in Argo. All work in a similar\n" +
- "fashion but differ somewhat in their design characteristics. At typically\n" +
- "10-day intervals, the floats pump fluid into an external bladder and rise\n" +
- "to the surface over about 6 hours while measuring temperature and salinity.\n" +
- "Satellites or GPS determine the position of the floats when they surface,\n" +
- "and the floats transmit their data to the satellites. The bladder then\n" +
- "deflates and the float returns to its original density and sinks to drift\n" +
- "until the cycle is repeated. Floats are designed to make about 150 such\n" +
- "cycles.\n" +
- "Data Management URL: http://www.argodatamgt.org/Documentation\";\n" +
- " String time_coverage_end \"2017-06-02T00:00:00Z\";\n" +
- " String time_coverage_start \"2012-12-27T09:19:57Z\";\n" +
- " String title \"Argo Float Vertical Profiles\";\n" +
- " String user_manual_version \"3.1\";\n" +
- " Float64 Westernmost_Easting -26.250239999999998;\n" +
- " }\n" +
- "}\n";
+ expected =
+ "String infoUrl \"http://www.argo.net/\";\n"
+ + " String institution \"Argo\";\n"
+ + " String keywords \"adjusted, argo, array, assembly, best, centre, centres, charge, coded, CONFIG_MISSION_NUMBER, contains, coriolis, creation, currents, cycle, CYCLE_NUMBER, data, DATA_CENTRE, DATA_MODE, DATA_STATE_INDICATOR, DATA_TYPE, date, DATE_CREATION, DATE_UPDATE, day, days, DC_REFERENCE, degree, delayed, denoting, density, determined, direction, Earth Science > Oceans > Ocean Pressure > Water Pressure, Earth Science > Oceans > Ocean Temperature > Water Temperature, Earth Science > Oceans > Salinity/Density > Salinity, equals, error, estimate, file, firmware, FIRMWARE_VERSION, flag, float, FLOAT_SERIAL_NO, format, FORMAT_VERSION, gdac, geostrophic, global, handbook, HANDBOOK_VERSION, have, identifier, in-situ, instrument, investigator, its, its-90, JULD, JULD_LOCATION, JULD_QC, julian, latitude, level, longitude, missions, mode, name, number, ocean, oceanography, oceans, passed, performed, PI_NAME, PLATFORM_NUMBER, PLATFORM_TYPE, position, POSITION_QC, positioning, POSITIONING_SYSTEM, practical, pres, PRES_ADJUSTED, PRES_ADJUSTED_ERROR, PRES_ADJUSTED_QC, PRES_QC, pressure, principal, process, processing, profile, PROFILE_PRES_QC, PROFILE_PSAL_QC, PROFILE_TEMP_QC, profiles, project, PROJECT_NAME, psal, PSAL_ADJUSTED, PSAL_ADJUSTED_ERROR, PSAL_ADJUSTED_QC, PSAL_QC, quality, rdac, real, real time, real-time, realtime, reference, REFERENCE_DATE_TIME, regional, relative, salinity, sampling, scale, scheme, sea, sea level, sea-level, sea_water_practical_salinity, sea_water_pressure, sea_water_temperature, seawater, serial, situ, station, statistics, system, TEMP, TEMP_ADJUSTED, TEMP_ADJUSTED_ERROR, TEMP_ADJUSTED_QC, TEMP_QC, temperature, through, time, type, unique, update, values, version, vertical, VERTICAL_SAMPLING_SCHEME, water, WMO_INST_TYPE\";\n"
+ + " String keywords_vocabulary \"GCMD Science Keywords\";\n"
+ + " String license \"The data may be used and redistributed for free but is not intended\n"
+ + "for legal use, since it may contain inaccuracies. Neither the data\n"
+ + "Contributor, ERD, NOAA, nor the United States Government, nor any\n"
+ + "of their employees or contractors, makes any warranty, express or\n"
+ + "implied, including warranties of merchantability and fitness for a\n"
+ + "particular purpose, or assumes any legal liability for the accuracy,\n"
+ + "completeness, or usefulness, of this information.\";\n"
+ + " Float64 Northernmost_Northing 43.81645;\n"
+ + " String references \"http://www.argodatamgt.org/Documentation\";\n"
+ + " String source \"Argo float\";\n"
+ + " String sourceUrl \"(local files)\";\n"
+ + " Float64 Southernmost_Northing -66.6667;\n"
+ + " String standard_name_vocabulary \"CF Standard Name Table v29\";\n"
+ + " String summary \"Argo float vertical profiles from Coriolis Global Data Assembly Centres\n"
+ + "(GDAC). Argo is an international collaboration that collects high-quality\n"
+ + "temperature and salinity profiles from the upper 2000m of the ice-free\n"
+ + "global ocean and currents from intermediate depths. The data come from\n"
+ + "battery-powered autonomous floats that spend most of their life drifting\n"
+ + "at depth where they are stabilised by being neutrally buoyant at the\n"
+ + "\\\"parking depth\\\" pressure by having a density equal to the ambient pressure\n"
+ + "and a compressibility that is less than that of sea water. At present there\n"
+ + "are several models of profiling float used in Argo. All work in a similar\n"
+ + "fashion but differ somewhat in their design characteristics. At typically\n"
+ + "10-day intervals, the floats pump fluid into an external bladder and rise\n"
+ + "to the surface over about 6 hours while measuring temperature and salinity.\n"
+ + "Satellites or GPS determine the position of the floats when they surface,\n"
+ + "and the floats transmit their data to the satellites. The bladder then\n"
+ + "deflates and the float returns to its original density and sinks to drift\n"
+ + "until the cycle is repeated. Floats are designed to make about 150 such\n"
+ + "cycles.\n"
+ + "Data Management URL: http://www.argodatamgt.org/Documentation\";\n"
+ + " String time_coverage_end \"2017-06-02T00:00:00Z\";\n"
+ + " String time_coverage_start \"2012-12-27T09:19:57Z\";\n"
+ + " String title \"Argo Float Vertical Profiles\";\n"
+ + " String user_manual_version \"3.1\";\n"
+ + " Float64 Westernmost_Easting -26.250239999999998;\n"
+ + " }\n"
+ + "}\n";
int tPo = results.indexOf(expected.substring(0, 15));
Test.ensureTrue(tPo >= 0, "tPo=-1 results=\n" + results);
Test.ensureEqual(
results.substring(tPo, Math.min(results.length(), tPo + expected.length())),
- expected, "results=\n" + results);
+ expected,
+ "results=\n" + results);
// char vars that are now strings include these destinationNames:
// direction (A|D), data_mode (A|D),
@@ -4476,139 +4577,140 @@ void testCharAsString(boolean deleteCachedInfo) throws Throwable {
// pres_qc.
// dds
- tName = eddTable.makeNewFileForDapQuery(language, null, null, "", dir,
- eddTable.className() + "_1", ".dds");
+ tName =
+ eddTable.makeNewFileForDapQuery(
+ language, null, null, "", dir, eddTable.className() + "_1", ".dds");
results = File2.directReadFrom88591File(dir + tName);
- expected = "Dataset {\n" +
- " Sequence {\n" +
- " String fileNumber;\n" +
- " String data_type;\n" +
- " String format_version;\n" +
- " String handbook_version;\n" +
- " Float64 reference_date_time;\n" +
- " Float64 date_creation;\n" +
- " Float64 date_update;\n" +
- " String platform_number;\n" +
- " String project_name;\n" +
- " String pi_name;\n" +
- " Int32 cycle_number;\n" +
- " String direction;\n" +
- " String data_center;\n" +
- " String dc_reference;\n" +
- " String data_state_indicator;\n" +
- " String data_mode;\n" +
- " String platform_type;\n" +
- " String float_serial_no;\n" +
- " String firmware_version;\n" +
- " String wmo_inst_type;\n" +
- " Float64 time;\n" +
- " String time_qc;\n" +
- " Float64 time_location;\n" +
- " Float64 latitude;\n" +
- " Float64 longitude;\n" +
- " String position_qc;\n" +
- " String positioning_system;\n" +
- " String profile_pres_qc;\n" +
- " String profile_temp_qc;\n" +
- " String profile_psal_qc;\n" +
- " String vertical_sampling_scheme;\n" +
- " Int32 config_mission_number;\n" +
- " Float32 pres;\n" +
- " String pres_qc;\n" +
- " Float32 pres_adjusted;\n" +
- " String pres_adjusted_qc;\n" +
- " Float32 pres_aqdjusted_error;\n" +
- " Float32 temp;\n" +
- " String temp_qc;\n" +
- " Float32 temp_adjusted;\n" +
- " String temp_adjusted_qc;\n" +
- " Float32 temp_adjusted_error;\n" +
- " Float32 psal;\n" +
- " String psal_qc;\n" +
- " Float32 psal_adjusted;\n" +
- " String psal_adjusted_qc;\n" +
- " Float32 psal_adjusted_error;\n" +
- " } s;\n" +
- "} s;\n";
+ expected =
+ "Dataset {\n"
+ + " Sequence {\n"
+ + " String fileNumber;\n"
+ + " String data_type;\n"
+ + " String format_version;\n"
+ + " String handbook_version;\n"
+ + " Float64 reference_date_time;\n"
+ + " Float64 date_creation;\n"
+ + " Float64 date_update;\n"
+ + " String platform_number;\n"
+ + " String project_name;\n"
+ + " String pi_name;\n"
+ + " Int32 cycle_number;\n"
+ + " String direction;\n"
+ + " String data_center;\n"
+ + " String dc_reference;\n"
+ + " String data_state_indicator;\n"
+ + " String data_mode;\n"
+ + " String platform_type;\n"
+ + " String float_serial_no;\n"
+ + " String firmware_version;\n"
+ + " String wmo_inst_type;\n"
+ + " Float64 time;\n"
+ + " String time_qc;\n"
+ + " Float64 time_location;\n"
+ + " Float64 latitude;\n"
+ + " Float64 longitude;\n"
+ + " String position_qc;\n"
+ + " String positioning_system;\n"
+ + " String profile_pres_qc;\n"
+ + " String profile_temp_qc;\n"
+ + " String profile_psal_qc;\n"
+ + " String vertical_sampling_scheme;\n"
+ + " Int32 config_mission_number;\n"
+ + " Float32 pres;\n"
+ + " String pres_qc;\n"
+ + " Float32 pres_adjusted;\n"
+ + " String pres_adjusted_qc;\n"
+ + " Float32 pres_aqdjusted_error;\n"
+ + " Float32 temp;\n"
+ + " String temp_qc;\n"
+ + " Float32 temp_adjusted;\n"
+ + " String temp_adjusted_qc;\n"
+ + " Float32 temp_adjusted_error;\n"
+ + " Float32 psal;\n"
+ + " String psal_qc;\n"
+ + " Float32 psal_adjusted;\n"
+ + " String psal_adjusted_qc;\n"
+ + " Float32 psal_adjusted_error;\n"
+ + " } s;\n"
+ + "} s;\n";
Test.ensureEqual(results, expected, "\nresults=\n" + results);
// view some data
- userDapQuery = "fileNumber,data_type,format_version,handbook_version,reference_date_time,date_creation,date_update,platform_number,project_name,pi_name,cycle_number,direction,data_center,dc_reference,data_state_indicator,data_mode,platform_type,float_serial_no,firmware_version,wmo_inst_type,time,time_qc,time_location,latitude,longitude,position_qc"
- +
- "&cycle_number<3";
- tName = eddTable.makeNewFileForDapQuery(language, null, null, userDapQuery, dir,
- eddTable.className() + "_2", ".csv");
+ userDapQuery =
+ "fileNumber,data_type,format_version,handbook_version,reference_date_time,date_creation,date_update,platform_number,project_name,pi_name,cycle_number,direction,data_center,dc_reference,data_state_indicator,data_mode,platform_type,float_serial_no,firmware_version,wmo_inst_type,time,time_qc,time_location,latitude,longitude,position_qc"
+ + "&cycle_number<3";
+ tName =
+ eddTable.makeNewFileForDapQuery(
+ language, null, null, userDapQuery, dir, eddTable.className() + "_2", ".csv");
results = File2.directReadFrom88591File(dir + tName);
// String2.log(results);
- expected = "fileNumber,data_type,format_version,handbook_version,reference_date_time,date_creation,date_update,platform_number,project_name,pi_name,cycle_number,direction,data_center,dc_reference,data_state_indicator,data_mode,platform_type,float_serial_no,firmware_version,wmo_inst_type,time,time_qc,time_location,latitude,longitude,position_qc\n"
- +
- ",,,,UTC,UTC,UTC,,,,,,,,,,,,,,UTC,,UTC,degrees_north,degrees_east,\n" +
- "7900364,Argo profile,3.1,1.2,1950-01-01T00:00:00Z,2013-05-24T17:02:04Z,2017-07-07T17:28:19Z,7900364,AWI,Gerd ROHARDT,1,A,IF,29532210,2B,R,NEMO,185,,860,2012-12-27T09:19:57Z,1,2012-12-27T09:19:57Z,-66.3326,-11.662600000000001,1\n"
- +
- "7900364,Argo profile,3.1,1.2,1950-01-01T00:00:00Z,2013-05-24T17:02:04Z,2017-07-07T17:28:19Z,7900364,AWI,Gerd ROHARDT,2,A,IF,29532211,2B,R,NEMO,185,,860,2012-12-30T05:57:58Z,1,2012-12-30T05:57:58Z,-66.3135,-11.6555,1\n";
+ expected =
+ "fileNumber,data_type,format_version,handbook_version,reference_date_time,date_creation,date_update,platform_number,project_name,pi_name,cycle_number,direction,data_center,dc_reference,data_state_indicator,data_mode,platform_type,float_serial_no,firmware_version,wmo_inst_type,time,time_qc,time_location,latitude,longitude,position_qc\n"
+ + ",,,,UTC,UTC,UTC,,,,,,,,,,,,,,UTC,,UTC,degrees_north,degrees_east,\n"
+ + "7900364,Argo profile,3.1,1.2,1950-01-01T00:00:00Z,2013-05-24T17:02:04Z,2017-07-07T17:28:19Z,7900364,AWI,Gerd ROHARDT,1,A,IF,29532210,2B,R,NEMO,185,,860,2012-12-27T09:19:57Z,1,2012-12-27T09:19:57Z,-66.3326,-11.662600000000001,1\n"
+ + "7900364,Argo profile,3.1,1.2,1950-01-01T00:00:00Z,2013-05-24T17:02:04Z,2017-07-07T17:28:19Z,7900364,AWI,Gerd ROHARDT,2,A,IF,29532211,2B,R,NEMO,185,,860,2012-12-30T05:57:58Z,1,2012-12-30T05:57:58Z,-66.3135,-11.6555,1\n";
Test.ensureEqual(results, expected, "\nresults=\n" + results);
// adding &direction="A" should yield the same results
- userDapQuery = "fileNumber,data_type,format_version,handbook_version,reference_date_time,date_creation,date_update,platform_number,project_name,pi_name,cycle_number,direction,data_center,dc_reference,data_state_indicator,data_mode,platform_type,float_serial_no,firmware_version,wmo_inst_type,time,time_qc,time_location,latitude,longitude,position_qc"
- +
- "&cycle_number<3&direction=\"A\"";
- tName = eddTable.makeNewFileForDapQuery(language, null, null, userDapQuery, dir,
- eddTable.className() + "_3", ".csv");
+ userDapQuery =
+ "fileNumber,data_type,format_version,handbook_version,reference_date_time,date_creation,date_update,platform_number,project_name,pi_name,cycle_number,direction,data_center,dc_reference,data_state_indicator,data_mode,platform_type,float_serial_no,firmware_version,wmo_inst_type,time,time_qc,time_location,latitude,longitude,position_qc"
+ + "&cycle_number<3&direction=\"A\"";
+ tName =
+ eddTable.makeNewFileForDapQuery(
+ language, null, null, userDapQuery, dir, eddTable.className() + "_3", ".csv");
results = File2.directReadFrom88591File(dir + tName);
// String2.log(results);
Test.ensureEqual(results, expected, "\nresults=\n" + results);
// test distinct for a char var
- userDapQuery = "position_qc" +
- "&distinct()";
- tName = eddTable.makeNewFileForDapQuery(language, null, null, userDapQuery, dir,
- eddTable.className() + "_4", ".csv");
+ userDapQuery = "position_qc" + "&distinct()";
+ tName =
+ eddTable.makeNewFileForDapQuery(
+ language, null, null, userDapQuery, dir, eddTable.className() + "_4", ".csv");
results = File2.directReadFrom88591File(dir + tName);
- expected = "position_qc\n" +
- "\n" +
- "1\n" +
- "2\n" +
- "4\n" +
- "8\n";
+ expected = "position_qc\n" + "\n" + "1\n" + "2\n" + "4\n" + "8\n";
// String2.log(results);
Test.ensureEqual(results, expected, "\nresults=\n" + results);
// test > char var
userDapQuery = "platform_number,cycle_number,position_qc&position_qc>\"3\"&position_qc<\"8\"";
- tName = eddTable.makeNewFileForDapQuery(language, null, null, userDapQuery, dir,
- eddTable.className() + "_5", ".csv");
+ tName =
+ eddTable.makeNewFileForDapQuery(
+ language, null, null, userDapQuery, dir, eddTable.className() + "_5", ".csv");
results = File2.directReadFrom88591File(dir + tName);
- expected = "platform_number,cycle_number,position_qc\n" +
- ",,\n" +
- "7900364,17,4\n" +
- "7900364,18,4\n" +
- "7900364,21,4\n" +
- "7900364,23,4\n" +
- "7900364,25,4\n" +
- "7900364,27,4\n" +
- "7900364,28,4\n" +
- "7900364,29,4\n";
+ expected =
+ "platform_number,cycle_number,position_qc\n"
+ + ",,\n"
+ + "7900364,17,4\n"
+ + "7900364,18,4\n"
+ + "7900364,21,4\n"
+ + "7900364,23,4\n"
+ + "7900364,25,4\n"
+ + "7900364,27,4\n"
+ + "7900364,28,4\n"
+ + "7900364,29,4\n";
// String2.log(results);
Test.ensureEqual(results, expected, "\nresults=\n" + results);
// test = for a char var
userDapQuery = "platform_number,cycle_number,position_qc&position_qc=\"2\"";
- tName = eddTable.makeNewFileForDapQuery(language, null, null, userDapQuery, dir,
- eddTable.className() + "_6", ".csv");
+ tName =
+ eddTable.makeNewFileForDapQuery(
+ language, null, null, userDapQuery, dir, eddTable.className() + "_6", ".csv");
results = File2.directReadFrom88591File(dir + tName);
- expected = "platform_number,cycle_number,position_qc\n" +
- ",,\n" +
- "7900594,96,2\n" +
- "7900594,97,2\n" +
- "7900594,101,2\n" +
- "7900594,102,2\n";
+ expected =
+ "platform_number,cycle_number,position_qc\n"
+ + ",,\n"
+ + "7900594,96,2\n"
+ + "7900594,97,2\n"
+ + "7900594,101,2\n"
+ + "7900594,102,2\n";
// String2.log(results);
Test.ensureEqual(results, expected, "\nresults=\n" + results);
// **************************** testCharAsChar
id = "testCharAsChar";
- if (deleteCachedInfo)
- EDD.deleteCachedDatasetInfo(id);
+ if (deleteCachedInfo) EDD.deleteCachedDatasetInfo(id);
eddTable = (EDDTable) EDDTestDataset.gettestCharAsChar();
// char vars that are now strings include these destinationNames:
@@ -4617,133 +4719,178 @@ void testCharAsString(boolean deleteCachedInfo) throws Throwable {
// pres_qc.
// dds
- tName = eddTable.makeNewFileForDapQuery(language, null, null, "", dir,
- eddTable.className() + "_1b", ".dds");
+ tName =
+ eddTable.makeNewFileForDapQuery(
+ language, null, null, "", dir, eddTable.className() + "_1b", ".dds");
results = File2.directReadFrom88591File(dir + tName);
- expected = "Dataset {\n" +
- " Sequence {\n" +
- " String fileNumber;\n" +
- " String data_type;\n" +
- " String format_version;\n" +
- " String handbook_version;\n" +
- " Float64 reference_date_time;\n" +
- " Float64 date_creation;\n" +
- " Float64 date_update;\n" +
- " String platform_number;\n" +
- " String project_name;\n" +
- " String pi_name;\n" +
- " Int32 cycle_number;\n" +
- " String direction;\n" + // char vars show up as String, I think because .dds doesn't support char
- " String data_center;\n" +
- " String dc_reference;\n" +
- " String data_state_indicator;\n" +
- " String data_mode;\n" +
- " String platform_type;\n" +
- " String float_serial_no;\n" +
- " String firmware_version;\n" +
- " String wmo_inst_type;\n" +
- " Float64 time;\n" +
- " String time_qc;\n" +
- " Float64 time_location;\n" +
- " Float64 latitude;\n" +
- " Float64 longitude;\n" +
- " String position_qc;\n" +
- " String positioning_system;\n" +
- " String profile_pres_qc;\n" +
- " String profile_temp_qc;\n" +
- " String profile_psal_qc;\n" +
- " String vertical_sampling_scheme;\n" +
- " Int32 config_mission_number;\n" +
- " Float32 pres;\n" +
- " String pres_qc;\n" +
- " Float32 pres_adjusted;\n" +
- " String pres_adjusted_qc;\n" +
- " Float32 pres_aqdjusted_error;\n" +
- " Float32 temp;\n" +
- " String temp_qc;\n" +
- " Float32 temp_adjusted;\n" +
- " String temp_adjusted_qc;\n" +
- " Float32 temp_adjusted_error;\n" +
- " Float32 psal;\n" +
- " String psal_qc;\n" +
- " Float32 psal_adjusted;\n" +
- " String psal_adjusted_qc;\n" +
- " Float32 psal_adjusted_error;\n" +
- " } s;\n" +
- "} s;\n";
+ expected =
+ "Dataset {\n"
+ + " Sequence {\n"
+ + " String fileNumber;\n"
+ + " String data_type;\n"
+ + " String format_version;\n"
+ + " String handbook_version;\n"
+ + " Float64 reference_date_time;\n"
+ + " Float64 date_creation;\n"
+ + " Float64 date_update;\n"
+ + " String platform_number;\n"
+ + " String project_name;\n"
+ + " String pi_name;\n"
+ + " Int32 cycle_number;\n"
+ + " String direction;\n"
+ + // char vars show up as String, I think because .dds doesn't support char
+ " String data_center;\n"
+ + " String dc_reference;\n"
+ + " String data_state_indicator;\n"
+ + " String data_mode;\n"
+ + " String platform_type;\n"
+ + " String float_serial_no;\n"
+ + " String firmware_version;\n"
+ + " String wmo_inst_type;\n"
+ + " Float64 time;\n"
+ + " String time_qc;\n"
+ + " Float64 time_location;\n"
+ + " Float64 latitude;\n"
+ + " Float64 longitude;\n"
+ + " String position_qc;\n"
+ + " String positioning_system;\n"
+ + " String profile_pres_qc;\n"
+ + " String profile_temp_qc;\n"
+ + " String profile_psal_qc;\n"
+ + " String vertical_sampling_scheme;\n"
+ + " Int32 config_mission_number;\n"
+ + " Float32 pres;\n"
+ + " String pres_qc;\n"
+ + " Float32 pres_adjusted;\n"
+ + " String pres_adjusted_qc;\n"
+ + " Float32 pres_aqdjusted_error;\n"
+ + " Float32 temp;\n"
+ + " String temp_qc;\n"
+ + " Float32 temp_adjusted;\n"
+ + " String temp_adjusted_qc;\n"
+ + " Float32 temp_adjusted_error;\n"
+ + " Float32 psal;\n"
+ + " String psal_qc;\n"
+ + " Float32 psal_adjusted;\n"
+ + " String psal_adjusted_qc;\n"
+ + " Float32 psal_adjusted_error;\n"
+ + " } s;\n"
+ + "} s;\n";
Test.ensureEqual(results, expected, "\nresults=\n" + results);
// view some data
- userDapQuery = "fileNumber,data_type,format_version,handbook_version,reference_date_time,date_creation,date_update,platform_number,project_name,pi_name,cycle_number,direction,data_center,dc_reference,data_state_indicator,data_mode,platform_type,float_serial_no,firmware_version,wmo_inst_type,time,time_qc,time_location,latitude,longitude,position_qc"
- +
- "&cycle_number<3";
- tName = eddTable.makeNewFileForDapQuery(language, null, null, userDapQuery, dir,
- eddTable.className() + "_2b", ".csv");
+ userDapQuery =
+ "fileNumber,data_type,format_version,handbook_version,reference_date_time,date_creation,date_update,platform_number,project_name,pi_name,cycle_number,direction,data_center,dc_reference,data_state_indicator,data_mode,platform_type,float_serial_no,firmware_version,wmo_inst_type,time,time_qc,time_location,latitude,longitude,position_qc"
+ + "&cycle_number<3";
+ tName =
+ eddTable.makeNewFileForDapQuery(
+ language, null, null, userDapQuery, dir, eddTable.className() + "_2b", ".csv");
results = File2.directReadFrom88591File(dir + tName);
// String2.log(results);
- expected = "fileNumber,data_type,format_version,handbook_version,reference_date_time,date_creation,date_update,platform_number,project_name,pi_name,cycle_number,direction,data_center,dc_reference,data_state_indicator,data_mode,platform_type,float_serial_no,firmware_version,wmo_inst_type,time,time_qc,time_location,latitude,longitude,position_qc\n"
- +
- ",,,,UTC,UTC,UTC,,,,,,,,,,,,,,UTC,,UTC,degrees_north,degrees_east,\n" +
- "7900364,Argo profile,3.1,1.2,1950-01-01T00:00:00Z,2013-05-24T17:02:04Z,2017-07-07T17:28:19Z,7900364,AWI,Gerd ROHARDT,1,A,IF,29532210,2B,R,NEMO,185,,860,2012-12-27T09:19:57Z,1,2012-12-27T09:19:57Z,-66.3326,-11.662600000000001,1\n"
- +
- "7900364,Argo profile,3.1,1.2,1950-01-01T00:00:00Z,2013-05-24T17:02:04Z,2017-07-07T17:28:19Z,7900364,AWI,Gerd ROHARDT,2,A,IF,29532211,2B,R,NEMO,185,,860,2012-12-30T05:57:58Z,1,2012-12-30T05:57:58Z,-66.3135,-11.6555,1\n";
+ expected =
+ "fileNumber,data_type,format_version,handbook_version,reference_date_time,date_creation,date_update,platform_number,project_name,pi_name,cycle_number,direction,data_center,dc_reference,data_state_indicator,data_mode,platform_type,float_serial_no,firmware_version,wmo_inst_type,time,time_qc,time_location,latitude,longitude,position_qc\n"
+ + ",,,,UTC,UTC,UTC,,,,,,,,,,,,,,UTC,,UTC,degrees_north,degrees_east,\n"
+ + "7900364,Argo profile,3.1,1.2,1950-01-01T00:00:00Z,2013-05-24T17:02:04Z,2017-07-07T17:28:19Z,7900364,AWI,Gerd ROHARDT,1,A,IF,29532210,2B,R,NEMO,185,,860,2012-12-27T09:19:57Z,1,2012-12-27T09:19:57Z,-66.3326,-11.662600000000001,1\n"
+ + "7900364,Argo profile,3.1,1.2,1950-01-01T00:00:00Z,2013-05-24T17:02:04Z,2017-07-07T17:28:19Z,7900364,AWI,Gerd ROHARDT,2,A,IF,29532211,2B,R,NEMO,185,,860,2012-12-30T05:57:58Z,1,2012-12-30T05:57:58Z,-66.3135,-11.6555,1\n";
Test.ensureEqual(results, expected, "\nresults=\n" + results);
// adding &direction="A" should yield the same results
- userDapQuery = "fileNumber,data_type,format_version,handbook_version,reference_date_time,date_creation,date_update,platform_number,project_name,pi_name,cycle_number,direction,data_center,dc_reference,data_state_indicator,data_mode,platform_type,float_serial_no,firmware_version,wmo_inst_type,time,time_qc,time_location,latitude,longitude,position_qc"
- +
- "&cycle_number<3&direction=\"A\"";
- tName = eddTable.makeNewFileForDapQuery(language, null, null, userDapQuery, dir,
- eddTable.className() + "_3b", ".csv");
+ userDapQuery =
+ "fileNumber,data_type,format_version,handbook_version,reference_date_time,date_creation,date_update,platform_number,project_name,pi_name,cycle_number,direction,data_center,dc_reference,data_state_indicator,data_mode,platform_type,float_serial_no,firmware_version,wmo_inst_type,time,time_qc,time_location,latitude,longitude,position_qc"
+ + "&cycle_number<3&direction=\"A\"";
+ tName =
+ eddTable.makeNewFileForDapQuery(
+ language, null, null, userDapQuery, dir, eddTable.className() + "_3b", ".csv");
results = File2.directReadFrom88591File(dir + tName);
// String2.log(results);
Test.ensureEqual(results, expected, "\nresults=\n" + results);
// test distinct for a char var
- userDapQuery = "position_qc" +
- "&distinct()";
- tName = eddTable.makeNewFileForDapQuery(language, null, null, userDapQuery, dir,
- eddTable.className() + "_4b", ".csv");
+ userDapQuery = "position_qc" + "&distinct()";
+ tName =
+ eddTable.makeNewFileForDapQuery(
+ language, null, null, userDapQuery, dir, eddTable.className() + "_4b", ".csv");
results = File2.directReadFrom88591File(dir + tName);
- expected = "position_qc\n" +
- "\n" +
- "1\n" +
- "2\n" +
- "4\n" +
- "8\n";
+ expected = "position_qc\n" + "\n" + "1\n" + "2\n" + "4\n" + "8\n";
// String2.log(results);
Test.ensureEqual(results, expected, "\nresults=\n" + results);
// test > char var
userDapQuery = "platform_number,cycle_number,position_qc&position_qc>\"3\"&position_qc<\"8\"";
- tName = eddTable.makeNewFileForDapQuery(language, null, null, userDapQuery, dir,
- eddTable.className() + "_5b", ".csv");
+ tName =
+ eddTable.makeNewFileForDapQuery(
+ language, null, null, userDapQuery, dir, eddTable.className() + "_5b", ".csv");
results = File2.directReadFrom88591File(dir + tName);
- expected = "platform_number,cycle_number,position_qc\n" +
- ",,\n" +
- "7900364,17,4\n" +
- "7900364,18,4\n" +
- "7900364,21,4\n" +
- "7900364,23,4\n" +
- "7900364,25,4\n" +
- "7900364,27,4\n" +
- "7900364,28,4\n" +
- "7900364,29,4\n";
+ expected =
+ "platform_number,cycle_number,position_qc\n"
+ + ",,\n"
+ + "7900364,17,4\n"
+ + "7900364,18,4\n"
+ + "7900364,21,4\n"
+ + "7900364,23,4\n"
+ + "7900364,25,4\n"
+ + "7900364,27,4\n"
+ + "7900364,28,4\n"
+ + "7900364,29,4\n";
// String2.log(results);
Test.ensureEqual(results, expected, "\nresults=\n" + results);
// test = for a char var
userDapQuery = "platform_number,cycle_number,position_qc&position_qc=\"2\"";
- tName = eddTable.makeNewFileForDapQuery(language, null, null, userDapQuery, dir,
- eddTable.className() + "_6c", ".csv");
+ tName =
+ eddTable.makeNewFileForDapQuery(
+ language, null, null, userDapQuery, dir, eddTable.className() + "_6c", ".csv");
results = File2.directReadFrom88591File(dir + tName);
- expected = "platform_number,cycle_number,position_qc\n" +
- ",,\n" +
- "7900594,96,2\n" +
- "7900594,97,2\n" +
- "7900594,101,2\n" +
- "7900594,102,2\n";
+ expected =
+ "platform_number,cycle_number,position_qc\n"
+ + ",,\n"
+ + "7900594,96,2\n"
+ + "7900594,97,2\n"
+ + "7900594,101,2\n"
+ + "7900594,102,2\n";
// String2.log(results);
Test.ensureEqual(results, expected, "\nresults=\n" + results);
}
+
+ @org.junit.jupiter.api.Test
+ void testScriptOnlyRequest() throws Throwable {
+
+ EDDTableFromMultidimNcFiles edd = (EDDTableFromMultidimNcFiles) EDDTestDataset.getTS_ATMP_AAD();
+ String dir = EDStatic.fullTestCacheDirectory;
+ // edd.makeNewFileForDapQuery(0, null, null, )
+ String fileTypeExtension = ".csv";
+ String fileName = "testScriptOnlyRequest" + fileTypeExtension;
+ String fullName = dir + fileName;
+ OutputStreamSource outputStreamSource =
+ new OutputStreamSourceSimple(new BufferedOutputStream(new FileOutputStream(fullName)));
+ edd.respondToDapQuery(
+ 0,
+ null,
+ null,
+ null,
+ null,
+ "erddap/tabledap/TS_ATMP_AAD.csv",
+ "TS_ATMP_AAD.csv",
+ "url_metadata&distinct()",
+ outputStreamSource,
+ dir,
+ fileName,
+ fileTypeExtension);
+ String results = File2.directReadFrom88591File(fullName);
+ assertEquals(
+ "url_metadata\n"
+ + //
+ "\n"
+ + //
+ "https://data-erddap.emodnet-physics.eu/erddap/tabledap/EP_PLATFORMS_METADATA.htmlTable?&PLATFORMCODE=%22Casey Skiway%22&integrator_id=%22aad%22&distinct()\n"
+ + //
+ "https://data-erddap.emodnet-physics.eu/erddap/tabledap/EP_PLATFORMS_METADATA.htmlTable?&PLATFORMCODE=%22Casey%22&integrator_id=%22aad%22&distinct()\n"
+ + //
+ "https://data-erddap.emodnet-physics.eu/erddap/tabledap/EP_PLATFORMS_METADATA.htmlTable?&PLATFORMCODE=%22Davis%22&integrator_id=%22aad%22&distinct()\n"
+ + //
+ "https://data-erddap.emodnet-physics.eu/erddap/tabledap/EP_PLATFORMS_METADATA.htmlTable?&PLATFORMCODE=%22Macquarie Island%22&integrator_id=%22aad%22&distinct()\n"
+ + //
+ "https://data-erddap.emodnet-physics.eu/erddap/tabledap/EP_PLATFORMS_METADATA.htmlTable?&PLATFORMCODE=%22Mawson%22&integrator_id=%22aad%22&distinct()\n",
+ results);
+ }
}
diff --git a/src/test/java/gov/noaa/pfel/erddap/dataset/EDDTableFromNcCFFilesTests.java b/src/test/java/gov/noaa/pfel/erddap/dataset/EDDTableFromNcCFFilesTests.java
index 8470dabe..acb3bd46 100644
--- a/src/test/java/gov/noaa/pfel/erddap/dataset/EDDTableFromNcCFFilesTests.java
+++ b/src/test/java/gov/noaa/pfel/erddap/dataset/EDDTableFromNcCFFilesTests.java
@@ -1,23 +1,20 @@
package gov.noaa.pfel.erddap.dataset;
-import java.nio.file.Path;
-
-import org.junit.jupiter.api.BeforeAll;
-import org.junit.jupiter.params.ParameterizedTest;
-import org.junit.jupiter.params.provider.ValueSource;
-
import com.cohort.array.StringArray;
import com.cohort.util.Calendar2;
import com.cohort.util.File2;
import com.cohort.util.SimpleException;
import com.cohort.util.String2;
import com.cohort.util.Test;
-
import gov.noaa.pfel.coastwatch.griddata.NcHelper;
import gov.noaa.pfel.coastwatch.pointdata.Table;
import gov.noaa.pfel.erddap.GenerateDatasetsXml;
import gov.noaa.pfel.erddap.util.EDStatic;
import gov.noaa.pfel.erddap.variable.EDV;
+import java.nio.file.Path;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.ValueSource;
import tags.TagIncompleteTest;
import tags.TagMissingFile;
import testDataset.EDDTestDataset;
@@ -32,9 +29,8 @@ static void init() {
}
/**
- * testGenerateDatasetsXml.
- * This doesn't test suggestTestOutOfDate, except that for old data
- * it doesn't suggest anything.
+ * testGenerateDatasetsXml. This doesn't test suggestTestOutOfDate, except that for old data it
+ * doesn't suggest anything.
*/
@org.junit.jupiter.api.Test
void testGenerateDatasetsXml() throws Throwable {
@@ -49,243 +45,267 @@ void testGenerateDatasetsXml() throws Throwable {
// String tSortFilesBySourceNames,
// String tInfoUrl, String tInstitution, String tSummary, String tTitle,
// Attributes externalAddGlobalAttributes) throws Throwable {
- String dataDir = File2.addSlash(Path.of(
- EDDTableFromNcCFFilesTests.class.getResource("/data/nccf/").toURI()).toString());
+ String dataDir =
+ File2.addSlash(
+ Path.of(EDDTableFromNcCFFilesTests.class.getResource("/data/nccf/").toURI())
+ .toString());
String fileNameRegex = "ncCF1b\\.nc";
- String results = EDDTableFromNcCFFiles.generateDatasetsXml(
- dataDir,
- fileNameRegex,
- dataDir + "/ncCF1b.nc",
- 1440,
- "", "", "", "", // just for test purposes; station is already a column in the file
- "line_station time",
- "", "", "", "",
- -1, null, // defaultStandardizeWhat
- null) + "\n";
+ String results =
+ EDDTableFromNcCFFiles.generateDatasetsXml(
+ dataDir,
+ fileNameRegex,
+ dataDir + "/ncCF1b.nc",
+ 1440,
+ "",
+ "",
+ "",
+ "", // just for test purposes; station is already a column in the file
+ "line_station time",
+ "",
+ "",
+ "",
+ "",
+ -1,
+ null, // defaultStandardizeWhat
+ null)
+ + "\n";
// GenerateDatasetsXml
- String gdxResults = (new GenerateDatasetsXml()).doIt(new String[] { "-verbose",
- "EDDTableFromNcCFFiles",
- dataDir,
- fileNameRegex,
- dataDir + "/ncCF1b.nc",
- "1440",
- "", "", "", "", // just for test purposes; station is already a column in the file
- "line_station time",
- "", "", "", "", "",
- "-1", "" }, // defaultStandardizeWhat
- false); // doIt loop?
+ String gdxResults =
+ (new GenerateDatasetsXml())
+ .doIt(
+ new String[] {
+ "-verbose",
+ "EDDTableFromNcCFFiles",
+ dataDir,
+ fileNameRegex,
+ dataDir + "/ncCF1b.nc",
+ "1440",
+ "",
+ "",
+ "",
+ "", // just for test purposes; station is already a column in the file
+ "line_station time",
+ "",
+ "",
+ "",
+ "",
+ "",
+ "-1",
+ ""
+ }, // defaultStandardizeWhat
+ false); // doIt loop?
Test.ensureEqual(gdxResults, results, "Unexpected results from GenerateDatasetsXml.doIt.");
String tDatasetID = EDDTableFromNcCFFiles.suggestDatasetID(dataDir + fileNameRegex);
- String expected = "\n"
- +
- " 1440\n" +
- " 10000\n" +
- " " + dataDir + "\n" +
- " " + fileNameRegex + "\n" +
- " true\n" +
- " .*\n" +
- " last\n" +
- " 0\n" +
- " line_station time\n" +
- " false\n" +
- " \n" +
- " \n" +
- " COARDS, CF-1.10, ACDD-1.3\n" +
- " CalCOFI\n" +
- " institution\n" +
- " http://www.calcofi.org/newhome/publications/Atlases/atlases.htm\n"
- +
- " 1984-2004, altitude, animals, animals/vertebrates, aquatic, atmosphere, biological, biology, biosphere, calcofi, california, classification, coastal, code, common, cooperative, count, cruise, data, earth, Earth Science > Atmosphere > Altitude > Station Height, Earth Science > Biological Classification > Animals/Vertebrates > Fish, Earth Science > Biosphere > Aquatic Ecosystems > Coastal Habitat, Earth Science > Biosphere > Aquatic Ecosystems > Marine Habitat, Earth Science > Oceans > Aquatic Sciences > Fisheries, ecosystems, fish, fisheries, habitat, height, identifier, investigations, larvae, latitude, line, line_station, longitude, marine, name, number, observed, obsScientific, obsUnits, obsValue, occupancy, ocean, oceanic, oceans, order, science, sciences, scientific, ship, start, station, time, tow, units, value, vertebrates\n"
- +
- " null\n" +
- " CF Standard Name Table v70\n" +
- " \n" +
- " \n" +
- " line_station\n" +
- " line_station\n" +
- " String\n" +
- " \n" +
- " \n" +
- " \n" +
- " \n" +
- " \n" +
- " longitude\n" +
- " longitude\n" +
- " float\n" +
- " \n" +
- " \n" +
- " 180.0\n" +
- " -180.0\n" +
- " \n" +
- " \n" +
- " \n" +
- " latitude\n" +
- " latitude\n" +
- " float\n" +
- " \n" +
- " \n" +
- " 90.0\n" +
- " -90.0\n" +
- " \n" +
- " \n" +
- " \n" +
- " altitude\n" +
- " altitude\n" +
- " float\n" +
- " \n" +
- " \n" +
- " -210.0\n" +
- " -218.0\n" +
- " \n" +
- " \n" +
- " \n" +
- " time\n" +
- " time\n" +
- " double\n" +
- " \n" +
- " \n" +
- " 1.1007E9\n" +
- " 1.1002E9\n" +
- " \n" +
- " \n" +
- " \n" +
- " obsScientific\n" +
- " obsScientific\n" +
- " String\n" +
- " \n" +
- " \n" +
- " null\n" +
- " \n" +
- " \n" +
- " \n" +
- " obsValue\n" +
- " obsValue\n" +
- " int\n" +
- " \n" +
- " \n" +
- " 2147483647\n" +
- " 25.0\n" +
- " 0.0\n" +
- " null\n" +
- " \n" +
- " \n" +
- " \n" +
- " obsUnits\n" +
- " obsUnits\n" +
- " String\n" +
- " \n" +
- " \n" +
- " null\n" +
- " \n" +
- " \n" +
- "\n" +
- "\n\n";
+ String expected =
+ "\n"
+ + " 1440\n"
+ + " 10000\n"
+ + " "
+ + dataDir
+ + "\n"
+ + " "
+ + fileNameRegex
+ + "\n"
+ + " true\n"
+ + " .*\n"
+ + " last\n"
+ + " 0\n"
+ + " line_station time\n"
+ + " false\n"
+ + " \n"
+ + " \n"
+ + " COARDS, CF-1.10, ACDD-1.3\n"
+ + " CalCOFI\n"
+ + " institution\n"
+ + " http://www.calcofi.org/newhome/publications/Atlases/atlases.htm\n"
+ + " 1984-2004, altitude, animals, animals/vertebrates, aquatic, atmosphere, biological, biology, biosphere, calcofi, california, classification, coastal, code, common, cooperative, count, cruise, data, earth, Earth Science > Atmosphere > Altitude > Station Height, Earth Science > Biological Classification > Animals/Vertebrates > Fish, Earth Science > Biosphere > Aquatic Ecosystems > Coastal Habitat, Earth Science > Biosphere > Aquatic Ecosystems > Marine Habitat, Earth Science > Oceans > Aquatic Sciences > Fisheries, ecosystems, fish, fisheries, habitat, height, identifier, investigations, larvae, latitude, line, line_station, longitude, marine, name, number, observed, obsScientific, obsUnits, obsValue, occupancy, ocean, oceanic, oceans, order, science, sciences, scientific, ship, start, station, time, tow, units, value, vertebrates\n"
+ + " null\n"
+ + " CF Standard Name Table v70\n"
+ + " \n"
+ + " \n"
+ + " line_station\n"
+ + " line_station\n"
+ + " String\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " longitude\n"
+ + " longitude\n"
+ + " float\n"
+ + " \n"
+ + " \n"
+ + " 180.0\n"
+ + " -180.0\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " latitude\n"
+ + " latitude\n"
+ + " float\n"
+ + " \n"
+ + " \n"
+ + " 90.0\n"
+ + " -90.0\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " altitude\n"
+ + " altitude\n"
+ + " float\n"
+ + " \n"
+ + " \n"
+ + " -210.0\n"
+ + " -218.0\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " time\n"
+ + " time\n"
+ + " double\n"
+ + " \n"
+ + " \n"
+ + " 1.1007E9\n"
+ + " 1.1002E9\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " obsScientific\n"
+ + " obsScientific\n"
+ + " String\n"
+ + " \n"
+ + " \n"
+ + " null\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " obsValue\n"
+ + " obsValue\n"
+ + " int\n"
+ + " \n"
+ + " \n"
+ + " 2147483647\n"
+ + " 25.0\n"
+ + " 0.0\n"
+ + " null\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " obsUnits\n"
+ + " obsUnits\n"
+ + " String\n"
+ + " \n"
+ + " \n"
+ + " null\n"
+ + " \n"
+ + " \n"
+ + "\n"
+ + "\n\n";
Test.ensureEqual(results, expected, "results=\n" + results);
// Test.ensureEqual(results.substring(0, Math.min(results.length(),
// expected.length())),
@@ -296,20 +316,19 @@ void testGenerateDatasetsXml() throws Throwable {
EDD edd = EDDTableFromNcCFFiles.oneFromXmlFragment(null, results);
Test.ensureEqual(edd.datasetID(), tDatasetID, "");
Test.ensureEqual(edd.title(), "CalCOFI Fish Larvae Count, 1984-2004", "");
- Test.ensureEqual(String2.toCSSVString(edd.dataVariableDestinationNames()),
+ Test.ensureEqual(
+ String2.toCSSVString(edd.dataVariableDestinationNames()),
"line_station, longitude, latitude, altitude, time, obsScientific, obsValue, obsUnits",
"");
-
}
/**
- * testGenerateDatasetsXml2.
- * This doesn't test suggestTestOutOfDate, except that for old data
- * it doesn't suggest anything.
+ * testGenerateDatasetsXml2. This doesn't test suggestTestOutOfDate, except that for old data it
+ * doesn't suggest anything.
*/
@org.junit.jupiter.api.Test
@TagIncompleteTest // Simple ERROR on line #1 of ind199105_ctd.nc *GLOBAL*,Conventions,"...,
- // NCCSV-..." not found on line 1.
+ // NCCSV-..." not found on line 1.
void testGenerateDatasetsXml2() throws Throwable {
// testVerboseOn();
// debugMode = true;
@@ -326,782 +345,805 @@ void testGenerateDatasetsXml2() throws Throwable {
// From Ajay Krishnan, NCEI/NODC, from
// https://data.nodc.noaa.gov/thredds/catalog/testdata/wod_ragged/05052016/catalog.html?dataset=testdata/wod_ragged/05052016/ind199105_ctd.nc
// See low level reading test: Table.testReadNcCF7SampleDims()
- String dataDir = File2.addSlash(Path.of(
- EDDTableFromNcCFFilesTests.class.getResource("/data/nccf/ncei/").toURI()).toString());
+ String dataDir =
+ File2.addSlash(
+ Path.of(EDDTableFromNcCFFilesTests.class.getResource("/data/nccf/ncei/").toURI())
+ .toString());
String fileNameRegex = "ind199105_ctd\\.nc";
- String results = EDDTableFromNccsvFiles.generateDatasetsXml(
- dataDir,
- fileNameRegex,
- "",
- 1440,
- "", "", "", "", // just for test purposes; station is already a column in the file
- "WOD_cruise_identifier, time",
- "", "", "", "",
- -1, null, // defaultStandardizeWhat
- null) + "\n";
+ String results =
+ EDDTableFromNccsvFiles.generateDatasetsXml(
+ dataDir,
+ fileNameRegex,
+ "",
+ 1440,
+ "",
+ "",
+ "",
+ "", // just for test purposes; station is already a column in the file
+ "WOD_cruise_identifier, time",
+ "",
+ "",
+ "",
+ "",
+ -1,
+ null, // defaultStandardizeWhat
+ null)
+ + "\n";
// GenerateDatasetsXml
- String gdxResults = (new GenerateDatasetsXml()).doIt(new String[] { "-verbose",
- "EDDTableFromNcCFFiles",
- dataDir,
- fileNameRegex,
- "",
- "1440",
- "", "", "", "", // just for test purposes; station is already a column in the file
- "WOD_cruise_identifier, time",
- "", "", "", "", "",
- "-1", "" }, // defaultStandardizeWhat
- false); // doIt loop?
+ String gdxResults =
+ (new GenerateDatasetsXml())
+ .doIt(
+ new String[] {
+ "-verbose",
+ "EDDTableFromNcCFFiles",
+ dataDir,
+ fileNameRegex,
+ "",
+ "1440",
+ "",
+ "",
+ "",
+ "", // just for test purposes; station is already a column in the file
+ "WOD_cruise_identifier, time",
+ "",
+ "",
+ "",
+ "",
+ "",
+ "-1",
+ ""
+ }, // defaultStandardizeWhat
+ false); // doIt loop?
Test.ensureEqual(gdxResults, results, "Unexpected results from GenerateDatasetsXml.doIt.");
String tDatasetID = EDDTableFromNcCFFiles.suggestDatasetID(dataDir + fileNameRegex);
- String expected = "\n"
- +
- " 1440\n" +
- " 10000\n" +
- " " + dataDir + "\n" +
- " " + fileNameRegex + "\n" +
- " true\n" +
- " .*\n" +
- " last\n" +
- " 0\n" +
- " WOD_cruise_identifier, time\n" +
- " false\n" +
- " \n" +
- " \n" +
- " CF-1.10, COARDS, ACDD-1.3\n" +
- " institution\n" +
- " https://www.nodc.noaa.gov\n" +
- " World Ocean Database\n" +
- " https://www.nodc.noaa.gov\n" +
- " NGDC(NODC), NOAA\n" +
- " Access_no, accession, bathymetry, below, cast, Cast_Direction, Cast_Duration, Cast_Tow_number, center, chemistry, chlorophyll, Chlorophyll_Instrument, Chlorophyll_row_size, Chlorophyll_uncalibrated, Chlorophyll_WODprofileflag, color, concentration, concentration_of_chlorophyll_in_sea_water, conductivit, Conductivit_row_size, country, crs, cruise, data, database, dataset, date, dbase_orig, depth, direction, dissolved, dissolved o2, duration, earth, Earth Science > Oceans > Bathymetry/Seafloor Topography > Bathymetry, Earth Science > Oceans > Ocean Chemistry > Chlorophyll, file, flag, floor, geophysical, GMT_time, high, High_res_pair, identifier, institute, instrument, latitude, level, longitude, measured, multi, multi-cast, name, national, ncei, nesdis, ngdc, noaa, nodc, number, O2, observation, observations, ocean, ocean color, oceanographic, oceans, Orig_Stat_Num, origflagset, origin, original, originators, originators_cruise_identifier, oxygen, Oxygen_Instrument, Oxygen_Original_units, Oxygen_row_size, Oxygen_WODprofileflag, pair, platform, pressure, Pressure_row_size, profile, project, quality, resolution, responsible, salinity, Salinity_Instrument, Salinity_row_size, Salinity_Scale, Salinity_WODprofileflag, scale, science, sea, sea_floor_depth, seafloor, seawater, sigfig, station, statistics, temperature, Temperature_Instrument, Temperature_row_size, Temperature_Scale, Temperature_WODprofileflag, time, topography, tow, unique, units, upon, values, water, which, wod, WOD_cruise_identifier, wod_unique_cast, WODf, WODfd, wodflag, WODfp, wodprofileflag, world, z_sigfig, z_WODflag\n"
- +
- " GCMD Science Keywords\n" +
- " [standard]\n" +
- " institution\n" +
- " https://www.nodc.noaa.gov\n" +
- " World Ocean Database 2013. URL:https://data.nodc.noaa.gov/woa/WOD/DOC/wod_intro.pdf\n"
- +
- " (local files)\n" +
- " CF Standard Name Table v70\n" +
- " World Ocean Database - Multi-cast file. Data for multiple casts from the World Ocean Database\n"
- +
- " World Ocean Database, Multi-cast file\n" +
- " \n" +
- " \n" +
- " country\n" +
- " country\n" +
- " String\n" +
- " \n" +
- " \n" +
- " Location\n" +
- " Country\n" +
- " \n" +
- " \n" +
- " \n" +
- " WOD_cruise_identifier\n" +
- " WOD_cruise_identifier\n" +
- " String\n" +
- " \n" +
- " \n" +
- " Identifier\n" +
- " \n" +
- " \n" +
- " \n" +
- " originators_cruise_identifier\n" +
- " originators_cruise_identifier\n" +
- " String\n" +
- " \n" +
- " \n" +
- " Identifier\n" +
- " Originators Cruise Identifier\n" +
- " \n" +
- " \n" +
- " \n" +
- " wod_unique_cast\n" +
- " wod_unique_cast\n" +
- " int\n" +
- " \n" +
- " \n" +
- " 2147483647\n" +
- " Identifier\n" +
- " Wod Unique Cast\n" +
- " \n" +
- " \n" +
- " \n" +
- " lat\n" +
- " latitude\n" +
- " float\n" +
- " \n" +
- " \n" +
- " 90.0\n" +
- " -90.0\n" +
- " Location\n" +
- " Latitude\n" +
- " \n" +
- " \n" +
- " \n" +
- " lon\n" +
- " longitude\n" +
- " float\n" +
- " \n" +
- " \n" +
- " 180.0\n" +
- " -180.0\n" +
- " Location\n" +
- " Longitude\n" +
- " \n" +
- " \n" +
- " \n" +
- " time\n" +
- " time\n" +
- " double\n" +
- " \n" +
- " \n" +
- " Time\n" +
- " days since 1770-01-01T00:00:00Z\n" +
- " \n" +
- " \n" +
- " \n" +
- " date\n" +
- " date\n" +
- " int\n" +
- " \n" +
- " \n" +
- " 2147483647\n" +
- " Time\n" +
- " \n" +
- " \n" +
- " \n" +
- " GMT_time\n" +
- " GMT_time\n" +
- " float\n" +
- " \n" +
- " \n" +
- " Time\n" +
- " \n" +
- " \n" +
- " \n" +
- " Access_no\n" +
- " Access_no\n" +
- " int\n" +
- " \n" +
- " \n" +
- " 2147483647\n" +
- " 100.0\n" +
- " 0.0\n" +
- " Statistics\n" +
- " \n" +
- " \n" +
- " \n" +
- " Project\n" +
- " Project\n" +
- " String\n" +
- " \n" +
- " \n" +
- " Identifier\n" +
- " \n" +
- " \n" +
- " \n" +
- " Platform\n" +
- " Platform\n" +
- " String\n" +
- " \n" +
- " \n" +
- " Unknown\n" +
- " \n" +
- " \n" +
- " \n" +
- " Institute\n" +
- " Institute\n" +
- " String\n" +
- " \n" +
- " \n" +
- " Unknown\n" +
- " \n" +
- " \n" +
- " \n" +
- " Cast_Tow_number\n" +
- " Cast_Tow_number\n" +
- " int\n" +
- " \n" +
- " \n" +
- " -2147483647\n" +
- " 100.0\n" +
- " 0.0\n" +
- " Statistics\n" +
- " 2147483647\n" +
- " \n" +
- " \n" +
- " \n" +
- " Orig_Stat_Num\n" +
- " Orig_Stat_Num\n" +
- " float\n" +
- " \n" +
- " \n" +
- " 9.96921E36\n" +
- " 100.0\n" +
- " 0.0\n" +
- " Statistics\n" +
- " \n" +
- " \n" +
- " \n" +
- " Bottom_Depth\n" +
- " depth\n" +
- " float\n" +
- " \n" +
- " \n" +
- " 8000.0\n" +
- " -8000.0\n" +
- " TopographyDepth\n" +
- " Location\n" +
- " Bottom_Depth\n" +
- " sea_floor_depth\n" +
- " m\n" +
- " \n" +
- " \n" +
- " \n" +
- " Cast_Duration\n" +
- " Cast_Duration\n" +
- " float\n" +
- " \n" +
- " \n" +
- " 9.96921E36\n" +
- " Time\n" +
- " \n" +
- " \n" +
- " \n" +
- " Cast_Direction\n" +
- " Cast_Direction\n" +
- " String\n" +
- " \n" +
- " \n" +
- " Unknown\n" +
- " \n" +
- " \n" +
- " \n" +
- " High_res_pair\n" +
- " High_res_pair\n" +
- " int\n" +
- " \n" +
- " \n" +
- " -2147483647\n" +
- " 100.0\n" +
- " 0.0\n" +
- " Statistics\n" +
- " 2147483647\n" +
- " \n" +
- " \n" +
- " \n" +
- " dataset\n" +
- " dataset\n" +
- " String\n" +
- " \n" +
- " \n" +
- " Unknown\n" +
- " \n" +
- " \n" +
- " \n" +
- " dbase_orig\n" +
- " dbase_orig\n" +
- " String\n" +
- " \n" +
- " \n" +
- " Unknown\n" +
- " Database Origin\n" +
- " \n" +
- " \n" +
- " \n" +
- " origflagset\n" +
- " origflagset\n" +
- " String\n" +
- " \n" +
- " \n" +
- " Quality\n" +
- " Origflagset\n" +
- " \n" +
- " \n" +
- " \n" +
- " z\n" +
- " z\n" +
- " float\n" +
- " \n" +
- " \n" +
- " 8000.0\n" +
- " -8000.0\n" +
- " TopographyDepth\n" +
- " Location\n" +
- " Depth Below Sea Level\n" +
- " depth\n" +
- " \n" +
- " \n" +
- " \n" +
- " z_WODflag\n" +
- " z_WODflag\n" +
- " short\n" +
- " \n" +
- " \n" +
- " 32767\n" +
- " 150.0\n" +
- " 0.0\n" +
- " Quality\n" +
- " Z WODflag\n" +
- " \n" +
- " \n" +
- " \n" +
- " z_sigfig\n" +
- " z_sigfig\n" +
- " short\n" +
- " \n" +
- " \n" +
- " 32767\n" +
- " Location\n" +
- " Z Sigfig\n" +
- " \n" +
- " \n" +
- " \n" +
- " Temperature_row_size\n" +
- " Temperature_row_size\n" +
- " short\n" +
- " \n" +
- " \n" +
- " 32767\n" +
- " 100.0\n" +
- " 0.0\n" +
- " Statistics\n" +
- " \n" +
- " \n" +
- " \n" +
- " Temperature_WODprofileflag\n" +
- " Temperature_WODprofileflag\n" +
- " short\n" +
- " \n" +
- " \n" +
- " 32767\n" +
- " 150.0\n" +
- " 0.0\n" +
- " Quality\n" +
- " Temperature WODprofileflag\n" +
- " \n" +
- " \n" +
- " \n" +
- " Temperature_Scale\n" +
- " Temperature_Scale\n" +
- " String\n" +
- " \n" +
- " \n" +
- " Temperature\n" +
- " \n" +
- " \n" +
- " \n" +
- " Temperature_Instrument\n" +
- " Temperature_Instrument\n" +
- " String\n" +
- " \n" +
- " \n" +
- " Temperature\n" +
- " \n" +
- " \n" +
- " \n" +
- " Salinity_row_size\n" +
- " Salinity_row_size\n" +
- " short\n" +
- " \n" +
- " \n" +
- " 32767\n" +
- " 100.0\n" +
- " 0.0\n" +
- " Statistics\n" +
- " \n" +
- " \n" +
- " \n" +
- " Salinity_WODprofileflag\n" +
- " Salinity_WODprofileflag\n" +
- " short\n" +
- " \n" +
- " \n" +
- " 32767\n" +
- " 150.0\n" +
- " 0.0\n" +
- " Quality\n" +
- " Salinity WODprofileflag\n" +
- " \n" +
- " \n" +
- " \n" +
- " Salinity_Scale\n" +
- " Salinity_Scale\n" +
- " String\n" +
- " \n" +
- " \n" +
- " Salinity\n" + // no standard_name or units because String
- // data
- " \n" +
- " \n" +
- " \n" +
- " Salinity_Instrument\n" +
- " Salinity_Instrument\n" +
- " String\n" +
- " \n" +
- " \n" +
- " Salinity\n" + // no standard_name or units because String
- // data
- " \n" +
- " \n" +
- " \n" +
- " Oxygen_row_size\n" +
- " Oxygen_row_size\n" +
- " short\n" +
- " \n" +
- " \n" +
- " 32767\n" +
- " 100.0\n" +
- " 0.0\n" +
- " Statistics\n" +
- " \n" +
- " \n" +
- " \n" +
- " Oxygen_WODprofileflag\n" +
- " Oxygen_WODprofileflag\n" +
- " short\n" +
- " \n" +
- " \n" +
- " 32767\n" +
- " 150.0\n" +
- " 0.0\n" +
- " Quality\n" +
- " Oxygen WODprofileflag\n" +
- " \n" +
- " \n" +
- " \n" +
- " Oxygen_Instrument\n" +
- " Oxygen_Instrument\n" +
- " String\n" +
- " \n" +
- " \n" +
- " Dissolved O2\n" +
- " \n" +
- " \n" +
- " \n" +
- " Oxygen_Original_units\n" +
- " Oxygen_Original_units\n" +
- " String\n" +
- " \n" +
- " \n" +
- " Dissolved O2\n" +
- " Oxygen Original Units\n" +
- " \n" +
- " \n" +
- " \n" +
- " Pressure_row_size\n" +
- " Pressure_row_size\n" +
- " short\n" +
- " \n" +
- " \n" +
- " 32767\n" +
- " 100.0\n" +
- " 0.0\n" +
- " Statistics\n" +
- " \n" +
- " \n" +
- " \n" +
- " Chlorophyll_row_size\n" +
- " Chlorophyll_row_size\n" +
- " short\n" +
- " \n" +
- " \n" +
- " 999\n" +
- " 100.0\n" +
- " 0.0\n" +
- " Statistics\n" +
- " 32767\n" +
- " \n" +
- " \n" +
- " \n" +
- " Chlorophyll_WODprofileflag\n" +
- " Chlorophyll_WODprofileflag\n" +
- " short\n" +
- " \n" +
- " \n" +
- " 32767\n" +
- " 150.0\n" +
- " 0.0\n" +
- " Quality\n" +
- " Chlorophyll WODprofileflag\n" +
- " \n" +
- " \n" +
- " \n" +
- " Chlorophyll_Instrument\n" +
- " Chlorophyll_Instrument\n" +
- " String\n" +
- " \n" +
- " \n" +
- " Ocean Color\n" + // no standard_name or units because
- // String data
- " \n" +
- " \n" +
- " \n" +
- " Chlorophyll_uncalibrated\n" +
- " Chlorophyll_uncalibrated\n" +
- " int\n" +
- " \n" +
- " \n" +
- " -2147483647\n" +
- " 30.0\n" +
- " 0.03\n" +
- " Log\n" +
- " Ocean Color\n" +
- " Concentration Of Chlorophyll In Sea Water\n" +
- " 2147483647\n" +
- " concentration_of_chlorophyll_in_sea_water\n" +
- " \n" +
- " \n" +
- " \n" +
- " Conductivit_row_size\n" +
- " Conductivit_row_size\n" +
- " short\n" +
- " \n" +
- " \n" +
- " 999\n" +
- " 100.0\n" +
- " 0.0\n" +
- " Statistics\n" +
- " 32767\n" +
- " \n" +
- " \n" +
- " \n" +
- " crs\n" +
- " crs\n" +
- " int\n" +
- " \n" +
- " \n" +
- " -2147483647\n" +
- " Unknown\n" +
- " CRS\n" +
- " 2147483647\n" +
- " \n" +
- " \n" +
- " \n" +
- " WODf\n" +
- " WODf\n" +
- " short\n" +
- " \n" +
- " \n" +
- " -32767\n" +
- " 10.0\n" +
- " 0.0\n" +
- " Quality\n" +
- " 32767\n" +
- " \n" +
- " \n" +
- " \n" +
- " WODfp\n" +
- " WODfp\n" +
- " short\n" +
- " \n" +
- " \n" +
- " -32767\n" +
- " 10.0\n" +
- " 0.0\n" +
- " Quality\n" +
- " 32767\n" +
- " \n" +
- " \n" +
- " \n" +
- " WODfd\n" +
- " WODfd\n" +
- " short\n" +
- " \n" +
- " \n" +
- " -32767\n" +
- " 2.5\n" +
- " 0.0\n" +
- " Location\n" +
- " 32767\n" +
- " \n" +
- " \n" +
- "\n" +
- "\n\n";
+ String expected =
+ "\n"
+ + " 1440\n"
+ + " 10000\n"
+ + " "
+ + dataDir
+ + "\n"
+ + " "
+ + fileNameRegex
+ + "\n"
+ + " true\n"
+ + " .*\n"
+ + " last\n"
+ + " 0\n"
+ + " WOD_cruise_identifier, time\n"
+ + " false\n"
+ + " \n"
+ + " \n"
+ + " CF-1.10, COARDS, ACDD-1.3\n"
+ + " institution\n"
+ + " https://www.nodc.noaa.gov\n"
+ + " World Ocean Database\n"
+ + " https://www.nodc.noaa.gov\n"
+ + " NGDC(NODC), NOAA\n"
+ + " Access_no, accession, bathymetry, below, cast, Cast_Direction, Cast_Duration, Cast_Tow_number, center, chemistry, chlorophyll, Chlorophyll_Instrument, Chlorophyll_row_size, Chlorophyll_uncalibrated, Chlorophyll_WODprofileflag, color, concentration, concentration_of_chlorophyll_in_sea_water, conductivit, Conductivit_row_size, country, crs, cruise, data, database, dataset, date, dbase_orig, depth, direction, dissolved, dissolved o2, duration, earth, Earth Science > Oceans > Bathymetry/Seafloor Topography > Bathymetry, Earth Science > Oceans > Ocean Chemistry > Chlorophyll, file, flag, floor, geophysical, GMT_time, high, High_res_pair, identifier, institute, instrument, latitude, level, longitude, measured, multi, multi-cast, name, national, ncei, nesdis, ngdc, noaa, nodc, number, O2, observation, observations, ocean, ocean color, oceanographic, oceans, Orig_Stat_Num, origflagset, origin, original, originators, originators_cruise_identifier, oxygen, Oxygen_Instrument, Oxygen_Original_units, Oxygen_row_size, Oxygen_WODprofileflag, pair, platform, pressure, Pressure_row_size, profile, project, quality, resolution, responsible, salinity, Salinity_Instrument, Salinity_row_size, Salinity_Scale, Salinity_WODprofileflag, scale, science, sea, sea_floor_depth, seafloor, seawater, sigfig, station, statistics, temperature, Temperature_Instrument, Temperature_row_size, Temperature_Scale, Temperature_WODprofileflag, time, topography, tow, unique, units, upon, values, water, which, wod, WOD_cruise_identifier, wod_unique_cast, WODf, WODfd, wodflag, WODfp, wodprofileflag, world, z_sigfig, z_WODflag\n"
+ + " GCMD Science Keywords\n"
+ + " [standard]\n"
+ + " institution\n"
+ + " https://www.nodc.noaa.gov\n"
+ + " World Ocean Database 2013. URL:https://data.nodc.noaa.gov/woa/WOD/DOC/wod_intro.pdf\n"
+ + " (local files)\n"
+ + " CF Standard Name Table v70\n"
+ + " World Ocean Database - Multi-cast file. Data for multiple casts from the World Ocean Database\n"
+ + " World Ocean Database, Multi-cast file\n"
+ + " \n"
+ + " \n"
+ + " country\n"
+ + " country\n"
+ + " String\n"
+ + " \n"
+ + " \n"
+ + " Location\n"
+ + " Country\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " WOD_cruise_identifier\n"
+ + " WOD_cruise_identifier\n"
+ + " String\n"
+ + " \n"
+ + " \n"
+ + " Identifier\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " originators_cruise_identifier\n"
+ + " originators_cruise_identifier\n"
+ + " String\n"
+ + " \n"
+ + " \n"
+ + " Identifier\n"
+ + " Originators Cruise Identifier\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " wod_unique_cast\n"
+ + " wod_unique_cast\n"
+ + " int\n"
+ + " \n"
+ + " \n"
+ + " 2147483647\n"
+ + " Identifier\n"
+ + " Wod Unique Cast\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " lat\n"
+ + " latitude\n"
+ + " float\n"
+ + " \n"
+ + " \n"
+ + " 90.0\n"
+ + " -90.0\n"
+ + " Location\n"
+ + " Latitude\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " lon\n"
+ + " longitude\n"
+ + " float\n"
+ + " \n"
+ + " \n"
+ + " 180.0\n"
+ + " -180.0\n"
+ + " Location\n"
+ + " Longitude\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " time\n"
+ + " time\n"
+ + " double\n"
+ + " \n"
+ + " \n"
+ + " Time\n"
+ + " days since 1770-01-01T00:00:00Z\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " date\n"
+ + " date\n"
+ + " int\n"
+ + " \n"
+ + " \n"
+ + " 2147483647\n"
+ + " Time\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " GMT_time\n"
+ + " GMT_time\n"
+ + " float\n"
+ + " \n"
+ + " \n"
+ + " Time\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " Access_no\n"
+ + " Access_no\n"
+ + " int\n"
+ + " \n"
+ + " \n"
+ + " 2147483647\n"
+ + " 100.0\n"
+ + " 0.0\n"
+ + " Statistics\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " Project\n"
+ + " Project\n"
+ + " String\n"
+ + " \n"
+ + " \n"
+ + " Identifier\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " Platform\n"
+ + " Platform\n"
+ + " String\n"
+ + " \n"
+ + " \n"
+ + " Unknown\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " Institute\n"
+ + " Institute\n"
+ + " String\n"
+ + " \n"
+ + " \n"
+ + " Unknown\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " Cast_Tow_number\n"
+ + " Cast_Tow_number\n"
+ + " int\n"
+ + " \n"
+ + " \n"
+ + " -2147483647\n"
+ + " 100.0\n"
+ + " 0.0\n"
+ + " Statistics\n"
+ + " 2147483647\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " Orig_Stat_Num\n"
+ + " Orig_Stat_Num\n"
+ + " float\n"
+ + " \n"
+ + " \n"
+ + " 9.96921E36\n"
+ + " 100.0\n"
+ + " 0.0\n"
+ + " Statistics\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " Bottom_Depth\n"
+ + " depth\n"
+ + " float\n"
+ + " \n"
+ + " \n"
+ + " 8000.0\n"
+ + " -8000.0\n"
+ + " TopographyDepth\n"
+ + " Location\n"
+ + " Bottom_Depth\n"
+ + " sea_floor_depth\n"
+ + " m\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " Cast_Duration\n"
+ + " Cast_Duration\n"
+ + " float\n"
+ + " \n"
+ + " \n"
+ + " 9.96921E36\n"
+ + " Time\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " Cast_Direction\n"
+ + " Cast_Direction\n"
+ + " String\n"
+ + " \n"
+ + " \n"
+ + " Unknown\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " High_res_pair\n"
+ + " High_res_pair\n"
+ + " int\n"
+ + " \n"
+ + " \n"
+ + " -2147483647\n"
+ + " 100.0\n"
+ + " 0.0\n"
+ + " Statistics\n"
+ + " 2147483647\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " dataset\n"
+ + " dataset\n"
+ + " String\n"
+ + " \n"
+ + " \n"
+ + " Unknown\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " dbase_orig\n"
+ + " dbase_orig\n"
+ + " String\n"
+ + " \n"
+ + " \n"
+ + " Unknown\n"
+ + " Database Origin\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " origflagset\n"
+ + " origflagset\n"
+ + " String\n"
+ + " \n"
+ + " \n"
+ + " Quality\n"
+ + " Origflagset\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " z\n"
+ + " z\n"
+ + " float\n"
+ + " \n"
+ + " \n"
+ + " 8000.0\n"
+ + " -8000.0\n"
+ + " TopographyDepth\n"
+ + " Location\n"
+ + " Depth Below Sea Level\n"
+ + " depth\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " z_WODflag\n"
+ + " z_WODflag\n"
+ + " short\n"
+ + " \n"
+ + " \n"
+ + " 32767\n"
+ + " 150.0\n"
+ + " 0.0\n"
+ + " Quality\n"
+ + " Z WODflag\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " z_sigfig\n"
+ + " z_sigfig\n"
+ + " short\n"
+ + " \n"
+ + " \n"
+ + " 32767\n"
+ + " Location\n"
+ + " Z Sigfig\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " Temperature_row_size\n"
+ + " Temperature_row_size\n"
+ + " short\n"
+ + " \n"
+ + " \n"
+ + " 32767\n"
+ + " 100.0\n"
+ + " 0.0\n"
+ + " Statistics\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " Temperature_WODprofileflag\n"
+ + " Temperature_WODprofileflag\n"
+ + " short\n"
+ + " \n"
+ + " \n"
+ + " 32767\n"
+ + " 150.0\n"
+ + " 0.0\n"
+ + " Quality\n"
+ + " Temperature WODprofileflag\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " Temperature_Scale\n"
+ + " Temperature_Scale\n"
+ + " String\n"
+ + " \n"
+ + " \n"
+ + " Temperature\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " Temperature_Instrument\n"
+ + " Temperature_Instrument\n"
+ + " String\n"
+ + " \n"
+ + " \n"
+ + " Temperature\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " Salinity_row_size\n"
+ + " Salinity_row_size\n"
+ + " short\n"
+ + " \n"
+ + " \n"
+ + " 32767\n"
+ + " 100.0\n"
+ + " 0.0\n"
+ + " Statistics\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " Salinity_WODprofileflag\n"
+ + " Salinity_WODprofileflag\n"
+ + " short\n"
+ + " \n"
+ + " \n"
+ + " 32767\n"
+ + " 150.0\n"
+ + " 0.0\n"
+ + " Quality\n"
+ + " Salinity WODprofileflag\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " Salinity_Scale\n"
+ + " Salinity_Scale\n"
+ + " String\n"
+ + " \n"
+ + " \n"
+ + " Salinity\n"
+ + // no standard_name or units because String
+ // data
+ " \n"
+ + " \n"
+ + " \n"
+ + " Salinity_Instrument\n"
+ + " Salinity_Instrument\n"
+ + " String\n"
+ + " \n"
+ + " \n"
+ + " Salinity\n"
+ + // no standard_name or units because String
+ // data
+ " \n"
+ + " \n"
+ + " \n"
+ + " Oxygen_row_size\n"
+ + " Oxygen_row_size\n"
+ + " short\n"
+ + " \n"
+ + " \n"
+ + " 32767\n"
+ + " 100.0\n"
+ + " 0.0\n"
+ + " Statistics\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " Oxygen_WODprofileflag\n"
+ + " Oxygen_WODprofileflag\n"
+ + " short\n"
+ + " \n"
+ + " \n"
+ + " 32767\n"
+ + " 150.0\n"
+ + " 0.0\n"
+ + " Quality\n"
+ + " Oxygen WODprofileflag\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " Oxygen_Instrument\n"
+ + " Oxygen_Instrument\n"
+ + " String\n"
+ + " \n"
+ + " \n"
+ + " Dissolved O2\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " Oxygen_Original_units\n"
+ + " Oxygen_Original_units\n"
+ + " String\n"
+ + " \n"
+ + " \n"
+ + " Dissolved O2\n"
+ + " Oxygen Original Units\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " Pressure_row_size\n"
+ + " Pressure_row_size\n"
+ + " short\n"
+ + " \n"
+ + " \n"
+ + " 32767\n"
+ + " 100.0\n"
+ + " 0.0\n"
+ + " Statistics\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " Chlorophyll_row_size\n"
+ + " Chlorophyll_row_size\n"
+ + " short\n"
+ + " \n"
+ + " \n"
+ + " 999\n"
+ + " 100.0\n"
+ + " 0.0\n"
+ + " Statistics\n"
+ + " 32767\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " Chlorophyll_WODprofileflag\n"
+ + " Chlorophyll_WODprofileflag\n"
+ + " short\n"
+ + " \n"
+ + " \n"
+ + " 32767\n"
+ + " 150.0\n"
+ + " 0.0\n"
+ + " Quality\n"
+ + " Chlorophyll WODprofileflag\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " Chlorophyll_Instrument\n"
+ + " Chlorophyll_Instrument\n"
+ + " String\n"
+ + " \n"
+ + " \n"
+ + " Ocean Color\n"
+ + // no standard_name or units because
+ // String data
+ " \n"
+ + " \n"
+ + " \n"
+ + " Chlorophyll_uncalibrated\n"
+ + " Chlorophyll_uncalibrated\n"
+ + " int\n"
+ + " \n"
+ + " \n"
+ + " -2147483647\n"
+ + " 30.0\n"
+ + " 0.03\n"
+ + " Log\n"
+ + " Ocean Color\n"
+ + " Concentration Of Chlorophyll In Sea Water\n"
+ + " 2147483647\n"
+ + " concentration_of_chlorophyll_in_sea_water\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " Conductivit_row_size\n"
+ + " Conductivit_row_size\n"
+ + " short\n"
+ + " \n"
+ + " \n"
+ + " 999\n"
+ + " 100.0\n"
+ + " 0.0\n"
+ + " Statistics\n"
+ + " 32767\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " crs\n"
+ + " crs\n"
+ + " int\n"
+ + " \n"
+ + " \n"
+ + " -2147483647\n"
+ + " Unknown\n"
+ + " CRS\n"
+ + " 2147483647\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " WODf\n"
+ + " WODf\n"
+ + " short\n"
+ + " \n"
+ + " \n"
+ + " -32767\n"
+ + " 10.0\n"
+ + " 0.0\n"
+ + " Quality\n"
+ + " 32767\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " WODfp\n"
+ + " WODfp\n"
+ + " short\n"
+ + " \n"
+ + " \n"
+ + " -32767\n"
+ + " 10.0\n"
+ + " 0.0\n"
+ + " Quality\n"
+ + " 32767\n"
+ + " \n"
+ + " \n"
+ + " \n"
+ + " WODfd\n"
+ + " WODfd\n"
+ + " short\n"
+ + " \n"
+ + " \n"
+ + " -32767\n"
+ + " 2.5\n"
+ + " 0.0\n"
+ + " Location\n"
+ + " 32767\n"
+ + " \n"
+ + " \n"
+ + "\n"
+ + "\n\n";
Test.ensureEqual(results, expected, "results=\n" + results);
// Test.ensureEqual(results.substring(0, Math.min(results.length(),
// expected.length())),
@@ -1115,7 +1157,7 @@ void testGenerateDatasetsXml2() throws Throwable {
* @throws Throwable if trouble
*/
@ParameterizedTest
- @ValueSource(booleans = { true, false })
+ @ValueSource(booleans = {true, false})
void test1(boolean deleteCachedDatasetInfo) throws Throwable {
// String2.log("\n****************** EDDTableFromNcCFFiles.test1()
// *****************\n");
@@ -1127,39 +1169,52 @@ void test1(boolean deleteCachedDatasetInfo) throws Throwable {
String today = Calendar2.getCurrentISODateTimeStringZulu().substring(0, 10);
String id = "testNcCF1b";
- if (deleteCachedDatasetInfo)
- EDDTableFromNcCFFiles.deleteCachedDatasetInfo(id);
+ if (deleteCachedDatasetInfo) EDDTableFromNcCFFiles.deleteCachedDatasetInfo(id);
EDDTable eddTable = (EDDTable) EDDTestDataset.gettestNcCF1b();
// .csv for one lat,lon,time
userDapQuery = "";
- tName = eddTable.makeNewFileForDapQuery(language, null, null, userDapQuery,
- EDStatic.fullTestCacheDirectory, eddTable.className() + "_test1a", ".csv");
+ tName =
+ eddTable.makeNewFileForDapQuery(
+ language,
+ null,
+ null,
+ userDapQuery,
+ EDStatic.fullTestCacheDirectory,
+ eddTable.className() + "_test1a",
+ ".csv");
results = File2.directReadFrom88591File(EDStatic.fullTestCacheDirectory + tName);
// String2.log(results);
- expected = "line_station,longitude,latitude,altitude,time,obsScientific,obsValue,obsUnits\n" +
- ",degrees_east,degrees_north,m,UTC,,,\n" +
- "076.7_100,-124.32333,33.388332,-214.1,2004-11-16T21:20:00Z,Argyropelecus sladeni,2,number of larvae\n"
- +
- "076.7_100,-124.32333,33.388332,-214.1,2004-11-16T21:20:00Z,Chauliodus macouni,3,number of larvae\n" +
- "076.7_100,-124.32333,33.388332,-214.1,2004-11-16T21:20:00Z,Danaphos oculatus,4,number of larvae\n" +
- "076.7_100,-124.32333,33.388332,-214.1,2004-11-16T21:20:00Z,Diogenichthys atlanticus,3,number of larvae\n";
+ expected =
+ "line_station,longitude,latitude,altitude,time,obsScientific,obsValue,obsUnits\n"
+ + ",degrees_east,degrees_north,m,UTC,,,\n"
+ + "076.7_100,-124.32333,33.388332,-214.1,2004-11-16T21:20:00Z,Argyropelecus sladeni,2,number of larvae\n"
+ + "076.7_100,-124.32333,33.388332,-214.1,2004-11-16T21:20:00Z,Chauliodus macouni,3,number of larvae\n"
+ + "076.7_100,-124.32333,33.388332,-214.1,2004-11-16T21:20:00Z,Danaphos oculatus,4,number of larvae\n"
+ + "076.7_100,-124.32333,33.388332,-214.1,2004-11-16T21:20:00Z,Diogenichthys atlanticus,3,number of larvae\n";
Test.ensureEqual(results.substring(0, expected.length()), expected, "results=\n" + results);
// .csv only outer vars
userDapQuery = "line_station";
- tName = eddTable.makeNewFileForDapQuery(language, null, null, userDapQuery,
- EDStatic.fullTestCacheDirectory, eddTable.className() + "_test1b", ".csv");
+ tName =
+ eddTable.makeNewFileForDapQuery(
+ language,
+ null,
+ null,
+ userDapQuery,
+ EDStatic.fullTestCacheDirectory,
+ eddTable.className() + "_test1b",
+ ".csv");
results = File2.directReadFrom88591File(EDStatic.fullTestCacheDirectory + tName);
// String2.log(results);
- expected = "line_station\n" +
- "\n" +
- "076.7_100\n" +
- "080_100\n" +
- "083.3_100\n"; // 4 row with all mv was removed
+ expected =
+ "line_station\n"
+ + "\n"
+ + "076.7_100\n"
+ + "080_100\n"
+ + "083.3_100\n"; // 4 row with all mv was removed
Test.ensureEqual(results, expected, "\nresults=\n" + results);
-
}
/**
@@ -1186,30 +1241,46 @@ void testKevin20130109() throws Throwable {
// test time < first time is 2011-02-15T00:00:00Z
userDapQuery = "traj,obs,time,longitude,latitude,temp,ve,vn&traj<26.5&time<2011-02-15T00:05";
- tName = eddTable.makeNewFileForDapQuery(language, null, null, userDapQuery,
- EDStatic.fullTestCacheDirectory, eddTable.className() + "_test1Kevin20130109a", ".csv");
+ tName =
+ eddTable.makeNewFileForDapQuery(
+ language,
+ null,
+ null,
+ userDapQuery,
+ EDStatic.fullTestCacheDirectory,
+ eddTable.className() + "_test1Kevin20130109a",
+ ".csv");
results = File2.directReadFrom88591File(EDStatic.fullTestCacheDirectory + tName);
// String2.log(results);
- expected = "traj,obs,time,longitude,latitude,temp,ve,vn\n" +
- ",,UTC,degrees_east,degrees_north,Deg C,cm/s,cm/s\n" +
- "1.0,1.0,2011-02-15T00:00:00Z,-111.344,-38.71,18.508,-14.618,17.793\n" +
- "22.0,7387.0,2011-02-15T00:00:00Z,91.875,-54.314,3.018,64.135,1.534\n" +
- "26.0,9139.0,2011-02-15T00:00:00Z,168.892,-48.516,11.381,24.49,4.884\n";
+ expected =
+ "traj,obs,time,longitude,latitude,temp,ve,vn\n"
+ + ",,UTC,degrees_east,degrees_north,Deg C,cm/s,cm/s\n"
+ + "1.0,1.0,2011-02-15T00:00:00Z,-111.344,-38.71,18.508,-14.618,17.793\n"
+ + "22.0,7387.0,2011-02-15T00:00:00Z,91.875,-54.314,3.018,64.135,1.534\n"
+ + "26.0,9139.0,2011-02-15T00:00:00Z,168.892,-48.516,11.381,24.49,4.884\n";
Test.ensureEqual(results, expected, "results=\n" + results);
// test time > last time is 2011-09-30T18
userDapQuery = "traj,obs,time,longitude,latitude,temp,ve,vn&traj<6&time>=2011-09-30T17:50";
- tName = eddTable.makeNewFileForDapQuery(language, null, null, userDapQuery,
- EDStatic.fullTestCacheDirectory, eddTable.className() + "_test1Kevin20130109a", ".csv");
+ tName =
+ eddTable.makeNewFileForDapQuery(
+ language,
+ null,
+ null,
+ userDapQuery,
+ EDStatic.fullTestCacheDirectory,
+ eddTable.className() + "_test1Kevin20130109a",
+ ".csv");
results = File2.directReadFrom88591File(EDStatic.fullTestCacheDirectory + tName);
// String2.log(results);
- expected = "traj,obs,time,longitude,latitude,temp,ve,vn\n" +
- ",,UTC,degrees_east,degrees_north,Deg C,cm/s,cm/s\n" +
- "1.0,912.0,2011-09-30T18:00:00Z,-91.252,-33.43,15.28,NaN,NaN\n" +
- "2.0,1352.0,2011-09-30T18:00:00Z,145.838,38.44,22.725,NaN,NaN\n" +
- "3.0,1794.0,2011-09-30T18:00:00Z,156.895,39.877,21.517,NaN,NaN\n" +
- "4.0,2233.0,2011-09-30T18:00:00Z,150.312,34.38,26.658,-78.272,41.257\n" +
- "5.0,2676.0,2011-09-30T18:00:00Z,162.9,36.15,26.129,-4.85,15.724\n";
+ expected =
+ "traj,obs,time,longitude,latitude,temp,ve,vn\n"
+ + ",,UTC,degrees_east,degrees_north,Deg C,cm/s,cm/s\n"
+ + "1.0,912.0,2011-09-30T18:00:00Z,-91.252,-33.43,15.28,NaN,NaN\n"
+ + "2.0,1352.0,2011-09-30T18:00:00Z,145.838,38.44,22.725,NaN,NaN\n"
+ + "3.0,1794.0,2011-09-30T18:00:00Z,156.895,39.877,21.517,NaN,NaN\n"
+ + "4.0,2233.0,2011-09-30T18:00:00Z,150.312,34.38,26.658,-78.272,41.257\n"
+ + "5.0,2676.0,2011-09-30T18:00:00Z,162.9,36.15,26.129,-4.85,15.724\n";
Test.ensureEqual(results, expected, "results=\n" + results);
// debugMode = oDebugMode;
@@ -1240,51 +1311,90 @@ void testKevin20160519() throws Throwable {
Table table;
// query with commas is okay
- userDapQuery = "array,station,wmo_platform_code,longitude,latitude,time,depth," +
- "LON_502,QX_5502,LAT_500,QY_5500&time>=2016-01-10&time<=2016-01-20&station=\"0n110w\"";
- tName = eddTable.makeNewFileForDapQuery(language, null, null, userDapQuery,
- dir, eddTable.className() + "_testKevin20160519_1", ".ncCF");
+ userDapQuery =
+ "array,station,wmo_platform_code,longitude,latitude,time,depth,"
+ + "LON_502,QX_5502,LAT_500,QY_5500&time>=2016-01-10&time<=2016-01-20&station=\"0n110w\"";
+ tName =
+ eddTable.makeNewFileForDapQuery(
+ language,
+ null,
+ null,
+ userDapQuery,
+ dir,
+ eddTable.className() + "_testKevin20160519_1",
+ ".ncCF");
table = new Table();
- table.readNcCF(dir + tName, null, 0, // standardizeWhat
- null, null, null);
+ table.readNcCF(
+ dir + tName,
+ null,
+ 0, // standardizeWhat
+ null,
+ null,
+ null);
results = table.dataToString();
expected = // depth/time are unexpected order because of .ncCF file read then flatten
- "array,station,wmo_platform_code,longitude,latitude,depth,time,LON_502,QX_5502,LAT_500,QY_5500\n" +
- "TAO/TRITON,0n110w,32323,250.0,0.0,0.0,1.4529456E9,250.06406,2.0,0.03540476,2.0\n";
+ "array,station,wmo_platform_code,longitude,latitude,depth,time,LON_502,QX_5502,LAT_500,QY_5500\n"
+ + "TAO/TRITON,0n110w,32323,250.0,0.0,0.0,1.4529456E9,250.06406,2.0,0.03540476,2.0\n";
Test.ensureEqual(results, expected, "results=\n" + results);
// percent-encoded query is okay for other file type(s)
- userDapQuery = "array%2Cstation%2Cwmo_platform_code%2Clongitude%2Clatitude" +
- "%2Ctime%2Cdepth%2CLON_502%2CQX_5502%2CLAT_500%2CQY_5500" +
- "&time>=2016-01-10&time<=2016-01-20&station=\"0n110w\"";
- tName = eddTable.makeNewFileForDapQuery(language, null, null, userDapQuery,
- dir, eddTable.className() + "_testKevin20160519_2", ".nc");
+ userDapQuery =
+ "array%2Cstation%2Cwmo_platform_code%2Clongitude%2Clatitude"
+ + "%2Ctime%2Cdepth%2CLON_502%2CQX_5502%2CLAT_500%2CQY_5500"
+ + "&time>=2016-01-10&time<=2016-01-20&station=\"0n110w\"";
+ tName =
+ eddTable.makeNewFileForDapQuery(
+ language,
+ null,
+ null,
+ userDapQuery,
+ dir,
+ eddTable.className() + "_testKevin20160519_2",
+ ".nc");
table = new Table();
- table.readNDNc(dir + tName, null, 0, // standardizeWhat
- null, 0, 0);
+ table.readNDNc(
+ dir + tName,
+ null,
+ 0, // standardizeWhat
+ null,
+ 0,
+ 0);
// expected is same except there's an additional 'row' column, remove it
table.removeColumn(table.findColumnNumber("row"));
expected = // then same except for order depth/time
- "array,station,wmo_platform_code,longitude,latitude,time,depth,LON_502,QX_5502,LAT_500,QY_5500\n" +
- "TAO/TRITON,0n110w,32323,250.0,0.0,1.4529456E9,0.0,250.06406,2.0,0.03540476,2.0\n";
+ "array,station,wmo_platform_code,longitude,latitude,time,depth,LON_502,QX_5502,LAT_500,QY_5500\n"
+ + "TAO/TRITON,0n110w,32323,250.0,0.0,1.4529456E9,0.0,250.06406,2.0,0.03540476,2.0\n";
results = table.dataToString();
Test.ensureEqual(results, expected, "results=\n" + results);
// percent encoded query for .ncCF fails with error
// "HTTP Status 500 - Query error: variable=station is listed twice in the
// results variables list."
- userDapQuery = "array%2Cstation%2Cwmo_platform_code%2Clongitude%2Clatitude" +
- "%2Ctime%2Cdepth%2CLON_502%2CQX_5502%2CLAT_500%2CQY_5500" +
- "&time>=2016-01-10&time<=2016-01-20&station=\"0n110w\"";
- tName = eddTable.makeNewFileForDapQuery(language, null, null, userDapQuery,
- dir, eddTable.className() + "_testKevin20160519_3", ".ncCF");
+ userDapQuery =
+ "array%2Cstation%2Cwmo_platform_code%2Clongitude%2Clatitude"
+ + "%2Ctime%2Cdepth%2CLON_502%2CQX_5502%2CLAT_500%2CQY_5500"
+ + "&time>=2016-01-10&time<=2016-01-20&station=\"0n110w\"";
+ tName =
+ eddTable.makeNewFileForDapQuery(
+ language,
+ null,
+ null,
+ userDapQuery,
+ dir,
+ eddTable.className() + "_testKevin20160519_3",
+ ".ncCF");
table = new Table();
- table.readNcCF(dir + tName, null, 0, // standardizeWhat
- null, null, null);
+ table.readNcCF(
+ dir + tName,
+ null,
+ 0, // standardizeWhat
+ null,
+ null,
+ null);
results = table.dataToString();
expected = // depth/time are unexpected order because of .ncCF file read then flatten
- "array,station,wmo_platform_code,longitude,latitude,depth,time,LON_502,QX_5502,LAT_500,QY_5500\n" +
- "TAO/TRITON,0n110w,32323,250.0,0.0,0.0,1.4529456E9,250.06406,2.0,0.03540476,2.0\n";
+ "array,station,wmo_platform_code,longitude,latitude,depth,time,LON_502,QX_5502,LAT_500,QY_5500\n"
+ + "TAO/TRITON,0n110w,32323,250.0,0.0,0.0,1.4529456E9,250.06406,2.0,0.03540476,2.0\n";
Test.ensureEqual(results, expected, "results=\n" + results);
// debugMode = oDebugMode;
@@ -1305,7 +1415,8 @@ void testNoAttName() throws Throwable {
throw new SimpleException("shouldn't get here");
} catch (Throwable t) {
String msg = t.toString();
- Test.ensureLinesMatch(msg,
+ Test.ensureLinesMatch(
+ msg,
"java.lang.RuntimeException: datasets.xml error on line #\\d{1,7}: An tag doesn't have a \"name\" attribute.",
"");
}
@@ -1315,11 +1426,11 @@ void testNoAttName() throws Throwable {
throw new SimpleException("shouldn't get here");
} catch (Throwable t) {
String msg = t.toString();
- Test.ensureLinesMatch(msg,
+ Test.ensureLinesMatch(
+ msg,
"java.lang.RuntimeException: datasets.xml error on line #\\d{1,7}: An tag doesn't have a \"name\" attribute.",
"");
}
-
}
/**
@@ -1339,16 +1450,27 @@ void testBridger() throws Throwable {
String today = Calendar2.getCurrentISODateTimeStringZulu().substring(0, 10);
Table table;
- String dataDir = Path.of(EDDTableFromNcCFFilesTests.class.getResource("/data/bridger/").toURI()).toString();
+ String dataDir =
+ Path.of(EDDTableFromNcCFFilesTests.class.getResource("/data/bridger/").toURI()).toString();
table = new Table();
- table.readNcCF(dataDir + "/B01.accelerometer.historical.nc", null, 0, // standardizeWhat
- null, null, null);
+ table.readNcCF(
+ dataDir + "/B01.accelerometer.historical.nc",
+ null,
+ 0, // standardizeWhat
+ null,
+ null,
+ null);
Test.ensureSomethingUnicode(table.globalAttributes(), "historical global attributes");
table = new Table();
- table.readNcCF(dataDir + "/B01.accelerometer.realtime.nc", null, 0, // standardizeWhat
- null, null, null);
+ table.readNcCF(
+ dataDir + "/B01.accelerometer.realtime.nc",
+ null,
+ 0, // standardizeWhat
+ null,
+ null,
+ null);
Test.ensureSomethingUnicode(table.globalAttributes(), "realtime global attributes");
String id = "UMaineAccB01";
@@ -1356,360 +1478,376 @@ void testBridger() throws Throwable {
EDDTable eddTable = (EDDTable) EDDTestDataset.getUMaineAccB01();
// .dds
- tName = eddTable.makeNewFileForDapQuery(language, null, null, "",
- EDStatic.fullTestCacheDirectory, eddTable.className() + "_bridger", ".dds");
+ tName =
+ eddTable.makeNewFileForDapQuery(
+ language,
+ null,
+ null,
+ "",
+ EDStatic.fullTestCacheDirectory,
+ eddTable.className() + "_bridger",
+ ".dds");
results = File2.directReadFrom88591File(EDStatic.fullTestCacheDirectory + tName);
// String2.log(results);
- expected = "Dataset {\n" +
- " Sequence {\n" +
- " String station;\n" +
- " Float32 longitude;\n" +
- " Float32 latitude;\n" +
- " Float32 depth;\n" +
- " Float64 time;\n" +
- " Float64 time_created;\n" +
- " Float64 time_modified;\n" +
- " Float32 significant_wave_height;\n" +
- " Byte significant_wave_height_qc;\n" +
- " Float32 dominant_wave_period;\n" +
- " Byte dominant_wave_period_qc;\n" +
- " } s;\n" +
- "} s;\n";
+ expected =
+ "Dataset {\n"
+ + " Sequence {\n"
+ + " String station;\n"
+ + " Float32 longitude;\n"
+ + " Float32 latitude;\n"
+ + " Float32 depth;\n"
+ + " Float64 time;\n"
+ + " Float64 time_created;\n"
+ + " Float64 time_modified;\n"
+ + " Float32 significant_wave_height;\n"
+ + " Byte significant_wave_height_qc;\n"
+ + " Float32 dominant_wave_period;\n"
+ + " Byte dominant_wave_period_qc;\n"
+ + " } s;\n"
+ + "} s;\n";
Test.ensureEqual(results.substring(0, expected.length()), expected, "results=\n" + results);
// .das
- tName = eddTable.makeNewFileForDapQuery(language, null, null, "",
- EDStatic.fullTestCacheDirectory, eddTable.className() + "_bridger", ".das");
+ tName =
+ eddTable.makeNewFileForDapQuery(
+ language,
+ null,
+ null,
+ "",
+ EDStatic.fullTestCacheDirectory,
+ eddTable.className() + "_bridger",
+ ".das");
results = File2.directReadFrom88591File(EDStatic.fullTestCacheDirectory + tName);
// String2.log(results);
- expected = "Attributes \\{\n" +
- " s \\{\n" +
- " station \\{\n" +
- " String cf_role \"timeseries_id\";\n" +
- " String ioos_category \"Unknown\";\n" +
- " String long_name \"B01\";\n" +
- " String name \"B01\";\n" +
- " String short_name \"B01\";\n" +
- " String standard_name \"station_name\";\n" +
- " \\}\n" +
- " longitude \\{\n" +
- " String _CoordinateAxisType \"Lon\";\n" +
- " Float32 actual_range -70.42779, -70.42755;\n" +
- " String axis \"X\";\n" +
- " Float64 colorBarMaximum 180.0;\n" +
- " Float64 colorBarMinimum -180.0;\n" +
- " String ioos_category \"Location\";\n" +
- " String long_name \"Longitude\";\n" +
- " String standard_name \"longitude\";\n" +
- " String units \"degrees_east\";\n" +
- " \\}\n" +
- " latitude \\{\n" +
- " String _CoordinateAxisType \"Lat\";\n" +
- " Float32 actual_range 43.18019, 43.18044;\n" +
- " String axis \"Y\";\n" +
- " Float64 colorBarMaximum 90.0;\n" +
- " Float64 colorBarMinimum -90.0;\n" +
- " String ioos_category \"Location\";\n" +
- " String long_name \"Latitude\";\n" +
- " String standard_name \"latitude\";\n" +
- " String units \"degrees_north\";\n" +
- " \\}\n" +
- " depth \\{\n" +
- " String _CoordinateAxisType \"Height\";\n" +
- " String _CoordinateZisPositive \"down\";\n" +
- " Float32 actual_range 0.0, 0.0;\n" +
- " String axis \"Z\";\n" +
- " Float64 colorBarMaximum 8000.0;\n" +
- " Float64 colorBarMinimum -8000.0;\n" +
- " String colorBarPalette \"TopographyDepth\";\n" +
- " String ioos_category \"Location\";\n" +
- " String long_name \"Depth\";\n" +
- " String positive \"down\";\n" +
- " String standard_name \"depth\";\n" +
- " String units \"m\";\n" +
- " \\}\n" +
- " time \\{\n" +
- " UInt32 _ChunkSizes 1;\n" +
- " String _CoordinateAxisType \"Time\";\n" +
- " Float64 actual_range 1.0173492e\\+9, 1.3907502e\\+9;\n" +
- " String axis \"T\";\n" +
- " String calendar \"gregorian\";\n" +
- " String ioos_category \"Time\";\n" +
- " String long_name \"Time\";\n" +
- " String standard_name \"time\";\n" +
- " String time_origin \"01-JAN-1970 00:00:00\";\n" +
- " String units \"seconds since 1970-01-01T00:00:00Z\";\n" +
- " \\}\n" +
- " time_created \\{\n" +
- " Float64 actual_range 1.371744887122e\\+9, 1.390750745219e\\+9;\n" +
- " String coordinates \"time lon lat depth\";\n" +
- " String ioos_category \"Time\";\n" +
- " String long_name \"Time Record Created\";\n" +
- " String short_name \"time_cr\";\n" +
- " String standard_name \"time\";\n" +
- " String time_origin \"01-JAN-1970 00:00:00\";\n" +
- " String units \"seconds since 1970-01-01T00:00:00Z\";\n" +
- " Float64 valid_range 0.0, 99999.0;\n" +
- " \\}\n" +
- " time_modified \\{\n" +
- " Float64 actual_range 1.371744887122e\\+9, 1.390750745219e\\+9;\n" +
- " String coordinates \"time lon lat depth\";\n" +
- " String ioos_category \"Time\";\n" +
- " String long_name \"Time Record Last Modified\";\n" +
- " String short_name \"time_mod\";\n" +
- " String standard_name \"time\";\n" +
- " String time_origin \"01-JAN-1970 00:00:00\";\n" +
- " String units \"seconds since 1970-01-01T00:00:00Z\";\n" +
- " Float64 valid_range 0.0, 99999.0;\n" +
- " \\}\n" +
- " significant_wave_height \\{\n" +
- " UInt32 _ChunkSizes 1;\n" +
- " Float32 _FillValue -999.0;\n" +
- " Float64 accuracy 0.5;\n" +
- " Float32 actual_range 0.009102137, 9.613417;\n" +
- " String ancillary_variables \"significant_wave_height_qc\";\n" +
- " Float64 colorBarMaximum 10.0;\n" +
- " Float64 colorBarMinimum 0.0;\n" +
- " String coordinates \"time lon lat depth\";\n" +
- " Int32 epic_code 4061;\n" +
- " String ioos_category \"Surface Waves\";\n" +
- " Float64 is_dead 0.0;\n" +
- " String long_name \"Significant Wave Height\";\n" +
- " String measurement_type \"Computed\";\n" +
- " Float64 precision 0.1;\n" +
- " String short_name \"SWH\";\n" +
- " String standard_name \"significant_height_of_wind_and_swell_waves\";\n" +
- " String units \"m\";\n" +
- " Float32 valid_range 0.0, 10.0;\n" +
- " \\}\n" +
- " significant_wave_height_qc \\{\n" +
- " UInt32 _ChunkSizes 1;\n" +
- " Byte _FillValue -128;\n" +
- " String _Unsigned \"false\";\n" +
- " Byte actual_range 0, 99;\n" +
- " Float64 colorBarMaximum 128.0;\n" +
- " Float64 colorBarMinimum 0.0;\n" +
- " String coordinates \"time lon lat depth\";\n" +
- " String flag_meanings \"quality_good out_of_range sensor_nonfunctional algorithm_failure_no_infl_pt\";\n"
- +
- " Byte flag_values 0, 1, 2, 3;\n" +
- " String intent \"data_quality\";\n" +
- " String ioos_category \"Quality\";\n" +
- " String long_name \"Significant Wave Height Quality Control\";\n" +
- " String short_name \"SWHQC\";\n" +
- " String standard_name \"significant_height_of_wind_and_swell_waves data_quality\";\n" +
- " String units \"1\";\n" +
- " Byte valid_range -127, 127;\n" +
- " \\}\n" +
- " dominant_wave_period \\{\n" +
- " UInt32 _ChunkSizes 1;\n" +
- " Float32 _FillValue -999.0;\n" +
- " Float64 accuracy 2.0;\n" +
- " Float32 actual_range 1.032258, 16.0;\n" +
- " String ancillary_variables \"dominant_wave_period_qc\";\n" +
- " Float64 colorBarMaximum 40.0;\n" +
- " Float64 colorBarMinimum 0.0;\n" +
- " String coordinates \"time lon lat depth\";\n" +
- " Int32 epic_code 4063;\n" +
- " String ioos_category \"Surface Waves\";\n" +
- " Float64 is_dead 0.0;\n" +
- " String long_name \"Dominant Wave Period\";\n" +
- " String measurement_type \"Computed\";\n" +
- " Float64 precision 1.0;\n" +
- " Float64 sensor_depth 0.0;\n" +
- " String short_name \"DWP\";\n" +
- " String standard_name \"period\";\n" +
- " String units \"s\";\n" +
- " Float32 valid_range 0.0, 32.0;\n" +
- " \\}\n" +
- " dominant_wave_period_qc \\{\n" +
- " UInt32 _ChunkSizes 1;\n" +
- " Byte _FillValue -128;\n" +
- " String _Unsigned \"false\";\n" +
- " Byte actual_range 0, 99;\n" +
- " Float64 colorBarMaximum 128.0;\n" +
- " Float64 colorBarMinimum 0.0;\n" +
- " String coordinates \"time lon lat depth\";\n" +
- " String flag_meanings \"quality_good out_of_range sensor_nonfunctional algorithm_failure_no_infl_pt\";\n"
- +
- " Byte flag_values 0, 1, 2, 3;\n" +
- " String intent \"data_quality\";\n" +
- " String ioos_category \"Quality\";\n" +
- " String long_name \"Dominant Wave Period Quality\";\n" +
- " String short_name \"DWPQ\";\n" +
- " String standard_name \"period data_quality\";\n" +
- " String units \"1\";\n" +
- " Byte valid_range -127, 127;\n" +
- " \\}\n" +
- " \\}\n" +
- " NC_GLOBAL \\{\n" +
- " String accelerometer_serial_number \"SUMAC0902A01107\";\n" +
- " String algorithm_ids \"Waves_SWH_DWP_1.12: 12-Jun-2013 15:15:53\";\n" +
- " Float64 averaging_period 17.07;\n" +
- " String averaging_period_units \"Minutes\";\n" +
- " Int32 breakout_id 7;\n" +
- " String buffer_type \"accelerometer\";\n" +
- " String cdm_data_type \"TimeSeries\";\n" +
- " String cdm_timeseries_variables \"station\";\n" +
- " String clock_time \"Center of period\";\n" +
- " String contact \"nealp@maine.edu,ljm@umeoce.maine.edu,bfleming@umeoce.maine.edu\";\n" +
- " String control_box_serial_number \"UMECB124\";\n" +
- " String Conventions \"CF-1.6, COARDS, ACDD-1.3\";\n" +
- " String creator_email \"nealp@maine.edu,ljm@umeoce.maine.edu,bfleming@umeoce.maine.edu\";\n" +
- " String creator_name \"Neal Pettigrew\";\n" +
- " String creator_url \"http://gyre.umeoce.maine.edu\";\n" +
- " String depth_datum \"Sea Level\";\n" +
- " Float64 Easternmost_Easting -70.42755;\n" +
- " String featureType \"TimeSeries\";\n" +
- " Float64 geospatial_lat_max 43.18044;\n" +
- " Float64 geospatial_lat_min 43.18019;\n" +
- " String geospatial_lat_units \"degrees_north\";\n" +
- " Float64 geospatial_lon_max -70.42755;\n" +
- " Float64 geospatial_lon_min -70.42779;\n" +
- " String geospatial_lon_units \"degrees_east\";\n" +
- " Float64 geospatial_vertical_max 0.0;\n" +
- " Float64 geospatial_vertical_min 0.0;\n" +
- " String geospatial_vertical_positive \"down\";\n" +
- " String geospatial_vertical_units \"m\";\n" +
- " String goes_platform_id \"044250DC\";\n" +
- " String history \"2014-01-03 11:20:56: Parameter dominant_wave_period marked as non-functional as of julian day 56660.395833 \\(2014-01-03 09:30:00\\)\n"
- +
- "2014-01-03 11:20:46: Parameter significant_wave_height marked as non-functional as of julian day 56660.395833 \\(2014-01-03 09:30:00\\)\n"
- +
- "2013-06-25 11:57:07: Modified \\[lon,lat\\] to \\[-70.427787,43.180192\\].\n" +
- "Thu Jun 20 16:50:01 2013: /usr/local/bin/ncrcat -d time,56463.65625,56464.00 B0125.accelerometer.realtime.nc B0125.accelerometer.realtime.nc.new\n"
- +
- "\n" +
- today + "T.{8}Z \\(local files\\)\n" +
- today + "T.{8}Z http://localhost:8080/erddap/tabledap/UMaineAccB01.das\";\n" +
- " String id \"B01\";\n" +
- " String infoUrl \"http://gyre.umeoce.maine.edu/\";\n" +
- " String institution \"Department of Physical Oceanography, School of Marine Sciences, University of Maine\";\n"
- +
- " String institution_url \"http://gyre.umeoce.maine.edu\";\n" +
- " Int32 instrument_number 0;\n" +
- " String keywords \"accelerometer, b01, buoy, chemistry, chlorophyll, circulation, conductivity, control, currents, data, density, department, depth, dominant, dominant_wave_period data_quality, Earth Science > Oceans > Ocean Chemistry > Chlorophyll, Earth Science > Oceans > Ocean Chemistry > Oxygen, Earth Science > Oceans > Ocean Circulation > Ocean Currents, Earth Science > Oceans > Ocean Optics > Turbidity, Earth Science > Oceans > Ocean Pressure > Sea Level Pressure, Earth Science > Oceans > Ocean Temperature > Water Temperature, Earth Science > Oceans > Ocean Waves > Significant Wave Height, Earth Science > Oceans > Ocean Waves > Swells, Earth Science > Oceans > Ocean Waves > Wave Period, Earth Science > Oceans > Ocean Winds > Surface Winds, Earth Science > Oceans > Salinity/Density > Conductivity, Earth Science > Oceans > Salinity/Density > Density, Earth Science > Oceans > Salinity/Density > Salinity, height, level, maine, marine, name, o2, ocean, oceanography, oceans, optics, oxygen, period, physical, pressure, quality, salinity, school, sciences, sea, seawater, sensor, significant, significant_height_of_wind_and_swell_waves, significant_wave_height data_quality, station, station_name, surface, surface waves, swell, swells, temperature, time, turbidity, university, water, wave, waves, wind, winds\";\n"
- +
- " String keywords_vocabulary \"GCMD Science Keywords\";\n" +
- " Float64 latitude 43.18019230109601;\n" +
- " String license \"The data may be used and redistributed for free but is not intended\n" +
- "for legal use, since it may contain inaccuracies. Neither the data\n" +
- "Contributor, ERD, NOAA, nor the United States Government, nor any\n" +
- "of their employees or contractors, makes any warranty, express or\n" +
- "implied, including warranties of merchantability and fitness for a\n" +
- "particular purpose, or assumes any legal liability for the accuracy,\n" +
- "completeness, or usefulness, of this information.\";\n" +
- " String long_name \"B01\";\n" +
- " Float64 longitude -70.42778651970477;\n" +
- " Float64 magnetic_variation -16.3;\n" +
- " String mooring_site_desc \"Western Maine Shelf\";\n" +
- " String mooring_site_id \"B0125\";\n" +
- " String mooring_type \"Slack\";\n" +
- " String naming_authority \"edu.maine\";\n" +
- " Int32 nco_openmp_thread_number 1;\n" +
- " String ndbc_site_id \"44030\";\n" +
- " Float64 Northernmost_Northing 43.18044;\n" +
- " Int32 number_observations_per_hour 2;\n" +
- " Int32 number_samples_per_observation 2048;\n" +
- " String position_datum \"WGS 84\";\n" +
- " String processing \"realtime\";\n" +
- " String project \"NERACOOS\";\n" +
- " String project_url \"http://www.neracoos.org\";\n" +
- " String publisher \"Department of Physical Oceanography, School of Marine Sciences, University of Maine\";\n"
- +
- " String publisher_email \"info@neracoos.org\";\n" +
- " String publisher_name \"Northeastern Regional Association of Coastal and Ocean Observing Systems \\(NERACOOS\\)\";\n"
- +
- " String publisher_phone \"\\(603\\) 319 1785\";\n" +
- " String publisher_url \"http://www.neracoos.org/\";\n" +
- " String references \"http://gyre.umeoce.maine.edu/data/gomoos/buoy/doc/buoy_system_doc/buoy_system/book1.html\";\n"
- +
- " String short_name \"B01\";\n" +
- " String source \"Ocean Data Acquisition Systems \\(ODAS\\) Buoy\";\n" +
- " String sourceUrl \"\\(local files\\)\";\n" +
- " Float64 Southernmost_Northing 43.18019;\n" +
- " String standard_name_vocabulary \"CF-1.6\";\n" +
- " String station_name \"B01\";\n" +
- " String station_photo \"http://gyre.umeoce.maine.edu/gomoos/images/generic_buoy.png\";\n" +
- " String station_type \"Surface Mooring\";\n" +
- " String subsetVariables \"station\";\n" +
- " String summary \"Ocean observation data from the Northeastern Regional Association of Coastal & Ocean Observing Systems \\(NERACOOS\\). The NERACOOS region includes the northeast United States and Canadian Maritime provinces, as part of the United States Integrated Ocean Observing System \\(IOOS\\). These data are served by Unidata's Thematic Realtime Environmental Distributed Data Services \\(THREDDS\\) Data Server \\(TDS\\) in a variety of interoperable data services and output formats.\";\n"
- +
- " String time_coverage_end \"2014-01-26T15:30:00Z\";\n" +
- " String time_coverage_start \"2002-03-28T21:00:00Z\";\n" +
- " String time_zone \"UTC\";\n" +
- " String title \"University of Maine, B01 Accelerometer Buoy Sensor\";\n" +
- " String uscg_light_list_letter \"B\";\n" +
- " String uscg_light_list_number \"113\";\n" +
- " Int32 watch_circle_radius 45;\n" +
- " Float64 water_depth 62.0;\n" +
- " Float64 Westernmost_Easting -70.42779;\n" +
- " \\}\n" +
- "\\}\n";
+ expected =
+ "Attributes \\{\n"
+ + " s \\{\n"
+ + " station \\{\n"
+ + " String cf_role \"timeseries_id\";\n"
+ + " String ioos_category \"Unknown\";\n"
+ + " String long_name \"B01\";\n"
+ + " String name \"B01\";\n"
+ + " String short_name \"B01\";\n"
+ + " String standard_name \"station_name\";\n"
+ + " \\}\n"
+ + " longitude \\{\n"
+ + " String _CoordinateAxisType \"Lon\";\n"
+ + " Float32 actual_range -70.42779, -70.42755;\n"
+ + " String axis \"X\";\n"
+ + " Float64 colorBarMaximum 180.0;\n"
+ + " Float64 colorBarMinimum -180.0;\n"
+ + " String ioos_category \"Location\";\n"
+ + " String long_name \"Longitude\";\n"
+ + " String standard_name \"longitude\";\n"
+ + " String units \"degrees_east\";\n"
+ + " \\}\n"
+ + " latitude \\{\n"
+ + " String _CoordinateAxisType \"Lat\";\n"
+ + " Float32 actual_range 43.18019, 43.18044;\n"
+ + " String axis \"Y\";\n"
+ + " Float64 colorBarMaximum 90.0;\n"
+ + " Float64 colorBarMinimum -90.0;\n"
+ + " String ioos_category \"Location\";\n"
+ + " String long_name \"Latitude\";\n"
+ + " String standard_name \"latitude\";\n"
+ + " String units \"degrees_north\";\n"
+ + " \\}\n"
+ + " depth \\{\n"
+ + " String _CoordinateAxisType \"Height\";\n"
+ + " String _CoordinateZisPositive \"down\";\n"
+ + " Float32 actual_range 0.0, 0.0;\n"
+ + " String axis \"Z\";\n"
+ + " Float64 colorBarMaximum 8000.0;\n"
+ + " Float64 colorBarMinimum -8000.0;\n"
+ + " String colorBarPalette \"TopographyDepth\";\n"
+ + " String ioos_category \"Location\";\n"
+ + " String long_name \"Depth\";\n"
+ + " String positive \"down\";\n"
+ + " String standard_name \"depth\";\n"
+ + " String units \"m\";\n"
+ + " \\}\n"
+ + " time \\{\n"
+ + (results.indexOf("UInt32 _ChunkSizes 1;\n") > -1 ? " UInt32 _ChunkSizes 1;\n" : "")
+ + " String _CoordinateAxisType \"Time\";\n"
+ + " Float64 actual_range 1.0173492e\\+9, 1.3907502e\\+9;\n"
+ + " String axis \"T\";\n"
+ + " String calendar \"gregorian\";\n"
+ + " String ioos_category \"Time\";\n"
+ + " String long_name \"Time\";\n"
+ + " String standard_name \"time\";\n"
+ + " String time_origin \"01-JAN-1970 00:00:00\";\n"
+ + " String units \"seconds since 1970-01-01T00:00:00Z\";\n"
+ + " \\}\n"
+ + " time_created \\{\n"
+ + " Float64 actual_range 1.371744887122e\\+9, 1.390750745219e\\+9;\n"
+ + " String coordinates \"time lon lat depth\";\n"
+ + " String ioos_category \"Time\";\n"
+ + " String long_name \"Time Record Created\";\n"
+ + " String short_name \"time_cr\";\n"
+ + " String standard_name \"time\";\n"
+ + " String time_origin \"01-JAN-1970 00:00:00\";\n"
+ + " String units \"seconds since 1970-01-01T00:00:00Z\";\n"
+ + " Float64 valid_range 0.0, 99999.0;\n"
+ + " \\}\n"
+ + " time_modified \\{\n"
+ + " Float64 actual_range 1.371744887122e\\+9, 1.390750745219e\\+9;\n"
+ + " String coordinates \"time lon lat depth\";\n"
+ + " String ioos_category \"Time\";\n"
+ + " String long_name \"Time Record Last Modified\";\n"
+ + " String short_name \"time_mod\";\n"
+ + " String standard_name \"time\";\n"
+ + " String time_origin \"01-JAN-1970 00:00:00\";\n"
+ + " String units \"seconds since 1970-01-01T00:00:00Z\";\n"
+ + " Float64 valid_range 0.0, 99999.0;\n"
+ + " \\}\n"
+ + " significant_wave_height \\{\n"
+ + (results.indexOf("UInt32 _ChunkSizes 1;\n") > -1 ? " UInt32 _ChunkSizes 1;\n" : "")
+ + " Float32 _FillValue -999.0;\n"
+ + " Float64 accuracy 0.5;\n"
+ + " Float32 actual_range 0.009102137, 9.613417;\n"
+ + " String ancillary_variables \"significant_wave_height_qc\";\n"
+ + " Float64 colorBarMaximum 10.0;\n"
+ + " Float64 colorBarMinimum 0.0;\n"
+ + " String coordinates \"time lon lat depth\";\n"
+ + " Int32 epic_code 4061;\n"
+ + " String ioos_category \"Surface Waves\";\n"
+ + " Float64 is_dead 0.0;\n"
+ + " String long_name \"Significant Wave Height\";\n"
+ + " String measurement_type \"Computed\";\n"
+ + " Float64 precision 0.1;\n"
+ + " String short_name \"SWH\";\n"
+ + " String standard_name \"significant_height_of_wind_and_swell_waves\";\n"
+ + " String units \"m\";\n"
+ + " Float32 valid_range 0.0, 10.0;\n"
+ + " \\}\n"
+ + " significant_wave_height_qc \\{\n"
+ + (results.indexOf("UInt32 _ChunkSizes 1;\n") > -1 ? " UInt32 _ChunkSizes 1;\n" : "")
+ + " Byte _FillValue -128;\n"
+ + " String _Unsigned \"false\";\n"
+ + " Byte actual_range 0, 99;\n"
+ + " Float64 colorBarMaximum 128.0;\n"
+ + " Float64 colorBarMinimum 0.0;\n"
+ + " String coordinates \"time lon lat depth\";\n"
+ + " String flag_meanings \"quality_good out_of_range sensor_nonfunctional algorithm_failure_no_infl_pt\";\n"
+ + " Byte flag_values 0, 1, 2, 3;\n"
+ + " String intent \"data_quality\";\n"
+ + " String ioos_category \"Quality\";\n"
+ + " String long_name \"Significant Wave Height Quality Control\";\n"
+ + " String short_name \"SWHQC\";\n"
+ + " String standard_name \"significant_height_of_wind_and_swell_waves data_quality\";\n"
+ + " String units \"1\";\n"
+ + " Byte valid_range -127, 127;\n"
+ + " \\}\n"
+ + " dominant_wave_period \\{\n"
+ + (results.indexOf("UInt32 _ChunkSizes 1;\n") > -1 ? " UInt32 _ChunkSizes 1;\n" : "")
+ + " Float32 _FillValue -999.0;\n"
+ + " Float64 accuracy 2.0;\n"
+ + " Float32 actual_range 1.032258, 16.0;\n"
+ + " String ancillary_variables \"dominant_wave_period_qc\";\n"
+ + " Float64 colorBarMaximum 40.0;\n"
+ + " Float64 colorBarMinimum 0.0;\n"
+ + " String coordinates \"time lon lat depth\";\n"
+ + " Int32 epic_code 4063;\n"
+ + " String ioos_category \"Surface Waves\";\n"
+ + " Float64 is_dead 0.0;\n"
+ + " String long_name \"Dominant Wave Period\";\n"
+ + " String measurement_type \"Computed\";\n"
+ + " Float64 precision 1.0;\n"
+ + " Float64 sensor_depth 0.0;\n"
+ + " String short_name \"DWP\";\n"
+ + " String standard_name \"period\";\n"
+ + " String units \"s\";\n"
+ + " Float32 valid_range 0.0, 32.0;\n"
+ + " \\}\n"
+ + " dominant_wave_period_qc \\{\n"
+ + (results.indexOf("UInt32 _ChunkSizes 1;\n") > -1 ? " UInt32 _ChunkSizes 1;\n" : "")
+ + " Byte _FillValue -128;\n"
+ + " String _Unsigned \"false\";\n"
+ + " Byte actual_range 0, 99;\n"
+ + " Float64 colorBarMaximum 128.0;\n"
+ + " Float64 colorBarMinimum 0.0;\n"
+ + " String coordinates \"time lon lat depth\";\n"
+ + " String flag_meanings \"quality_good out_of_range sensor_nonfunctional algorithm_failure_no_infl_pt\";\n"
+ + " Byte flag_values 0, 1, 2, 3;\n"
+ + " String intent \"data_quality\";\n"
+ + " String ioos_category \"Quality\";\n"
+ + " String long_name \"Dominant Wave Period Quality\";\n"
+ + " String short_name \"DWPQ\";\n"
+ + " String standard_name \"period data_quality\";\n"
+ + " String units \"1\";\n"
+ + " Byte valid_range -127, 127;\n"
+ + " \\}\n"
+ + " \\}\n"
+ + " NC_GLOBAL \\{\n"
+ + " String accelerometer_serial_number \"SUMAC0902A01107\";\n"
+ + " String algorithm_ids \"Waves_SWH_DWP_1.12: 12-Jun-2013 15:15:53\";\n"
+ + " Float64 averaging_period 17.07;\n"
+ + " String averaging_period_units \"Minutes\";\n"
+ + " Int32 breakout_id 7;\n"
+ + " String buffer_type \"accelerometer\";\n"
+ + " String cdm_data_type \"TimeSeries\";\n"
+ + " String cdm_timeseries_variables \"station\";\n"
+ + " String clock_time \"Center of period\";\n"
+ + " String contact \"nealp@maine.edu,ljm@umeoce.maine.edu,bfleming@umeoce.maine.edu\";\n"
+ + " String control_box_serial_number \"UMECB124\";\n"
+ + " String Conventions \"CF-1.6, COARDS, ACDD-1.3\";\n"
+ + " String creator_email \"nealp@maine.edu,ljm@umeoce.maine.edu,bfleming@umeoce.maine.edu\";\n"
+ + " String creator_name \"Neal Pettigrew\";\n"
+ + " String creator_url \"http://gyre.umeoce.maine.edu\";\n"
+ + " String depth_datum \"Sea Level\";\n"
+ + " Float64 Easternmost_Easting -70.42755;\n"
+ + " String featureType \"TimeSeries\";\n"
+ + " Float64 geospatial_lat_max 43.18044;\n"
+ + " Float64 geospatial_lat_min 43.18019;\n"
+ + " String geospatial_lat_units \"degrees_north\";\n"
+ + " Float64 geospatial_lon_max -70.42755;\n"
+ + " Float64 geospatial_lon_min -70.42779;\n"
+ + " String geospatial_lon_units \"degrees_east\";\n"
+ + " Float64 geospatial_vertical_max 0.0;\n"
+ + " Float64 geospatial_vertical_min 0.0;\n"
+ + " String geospatial_vertical_positive \"down\";\n"
+ + " String geospatial_vertical_units \"m\";\n"
+ + " String goes_platform_id \"044250DC\";\n"
+ + " String history \"2014-01-03 11:20:56: Parameter dominant_wave_period marked as non-functional as of julian day 56660.395833 \\(2014-01-03 09:30:00\\)\n"
+ + "2014-01-03 11:20:46: Parameter significant_wave_height marked as non-functional as of julian day 56660.395833 \\(2014-01-03 09:30:00\\)\n"
+ + "2013-06-25 11:57:07: Modified \\[lon,lat\\] to \\[-70.427787,43.180192\\].\n"
+ + "Thu Jun 20 16:50:01 2013: /usr/local/bin/ncrcat -d time,56463.65625,56464.00 B0125.accelerometer.realtime.nc B0125.accelerometer.realtime.nc.new\n"
+ + "\n"
+ + today
+ + "T.{8}Z \\(local files\\)\n"
+ + today
+ + "T.{8}Z http://localhost:8080/erddap/tabledap/UMaineAccB01.das\";\n"
+ + " String id \"B01\";\n"
+ + " String infoUrl \"http://gyre.umeoce.maine.edu/\";\n"
+ + " String institution \"Department of Physical Oceanography, School of Marine Sciences, University of Maine\";\n"
+ + " String institution_url \"http://gyre.umeoce.maine.edu\";\n"
+ + " Int32 instrument_number 0;\n"
+ + " String keywords \"accelerometer, b01, buoy, chemistry, chlorophyll, circulation, conductivity, control, currents, data, density, department, depth, dominant, dominant_wave_period data_quality, Earth Science > Oceans > Ocean Chemistry > Chlorophyll, Earth Science > Oceans > Ocean Chemistry > Oxygen, Earth Science > Oceans > Ocean Circulation > Ocean Currents, Earth Science > Oceans > Ocean Optics > Turbidity, Earth Science > Oceans > Ocean Pressure > Sea Level Pressure, Earth Science > Oceans > Ocean Temperature > Water Temperature, Earth Science > Oceans > Ocean Waves > Significant Wave Height, Earth Science > Oceans > Ocean Waves > Swells, Earth Science > Oceans > Ocean Waves > Wave Period, Earth Science > Oceans > Ocean Winds > Surface Winds, Earth Science > Oceans > Salinity/Density > Conductivity, Earth Science > Oceans > Salinity/Density > Density, Earth Science > Oceans > Salinity/Density > Salinity, height, level, maine, marine, name, o2, ocean, oceanography, oceans, optics, oxygen, period, physical, pressure, quality, salinity, school, sciences, sea, seawater, sensor, significant, significant_height_of_wind_and_swell_waves, significant_wave_height data_quality, station, station_name, surface, surface waves, swell, swells, temperature, time, turbidity, university, water, wave, waves, wind, winds\";\n"
+ + " String keywords_vocabulary \"GCMD Science Keywords\";\n"
+ + " Float64 latitude 43.18019230109601;\n"
+ + " String license \"The data may be used and redistributed for free but is not intended\n"
+ + "for legal use, since it may contain inaccuracies. Neither the data\n"
+ + "Contributor, ERD, NOAA, nor the United States Government, nor any\n"
+ + "of their employees or contractors, makes any warranty, express or\n"
+ + "implied, including warranties of merchantability and fitness for a\n"
+ + "particular purpose, or assumes any legal liability for the accuracy,\n"
+ + "completeness, or usefulness, of this information.\";\n"
+ + " String long_name \"B01\";\n"
+ + " Float64 longitude -70.42778651970477;\n"
+ + " Float64 magnetic_variation -16.3;\n"
+ + " String mooring_site_desc \"Western Maine Shelf\";\n"
+ + " String mooring_site_id \"B0125\";\n"
+ + " String mooring_type \"Slack\";\n"
+ + " String naming_authority \"edu.maine\";\n"
+ + " Int32 nco_openmp_thread_number 1;\n"
+ + " String ndbc_site_id \"44030\";\n"
+ + " Float64 Northernmost_Northing 43.18044;\n"
+ + " Int32 number_observations_per_hour 2;\n"
+ + " Int32 number_samples_per_observation 2048;\n"
+ + " String position_datum \"WGS 84\";\n"
+ + " String processing \"realtime\";\n"
+ + " String project \"NERACOOS\";\n"
+ + " String project_url \"http://www.neracoos.org\";\n"
+ + " String publisher \"Department of Physical Oceanography, School of Marine Sciences, University of Maine\";\n"
+ + " String publisher_email \"info@neracoos.org\";\n"
+ + " String publisher_name \"Northeastern Regional Association of Coastal and Ocean Observing Systems \\(NERACOOS\\)\";\n"
+ + " String publisher_phone \"\\(603\\) 319 1785\";\n"
+ + " String publisher_url \"http://www.neracoos.org/\";\n"
+ + " String references \"http://gyre.umeoce.maine.edu/data/gomoos/buoy/doc/buoy_system_doc/buoy_system/book1.html\";\n"
+ + " String short_name \"B01\";\n"
+ + " String source \"Ocean Data Acquisition Systems \\(ODAS\\) Buoy\";\n"
+ + " String sourceUrl \"\\(local files\\)\";\n"
+ + " Float64 Southernmost_Northing 43.18019;\n"
+ + " String standard_name_vocabulary \"CF-1.6\";\n"
+ + " String station_name \"B01\";\n"
+ + " String station_photo \"http://gyre.umeoce.maine.edu/gomoos/images/generic_buoy.png\";\n"
+ + " String station_type \"Surface Mooring\";\n"
+ + " String subsetVariables \"station\";\n"
+ + " String summary \"Ocean observation data from the Northeastern Regional Association of Coastal & Ocean Observing Systems \\(NERACOOS\\). The NERACOOS region includes the northeast United States and Canadian Maritime provinces, as part of the United States Integrated Ocean Observing System \\(IOOS\\). These data are served by Unidata's Thematic Realtime Environmental Distributed Data Services \\(THREDDS\\) Data Server \\(TDS\\) in a variety of interoperable data services and output formats.\";\n"
+ + " String time_coverage_end \"2014-01-26T15:30:00Z\";\n"
+ + " String time_coverage_start \"2002-03-28T21:00:00Z\";\n"
+ + " String time_zone \"UTC\";\n"
+ + " String title \"University of Maine, B01 Accelerometer Buoy Sensor\";\n"
+ + " String uscg_light_list_letter \"B\";\n"
+ + " String uscg_light_list_number \"113\";\n"
+ + " Int32 watch_circle_radius 45;\n"
+ + " Float64 water_depth 62.0;\n"
+ + " Float64 Westernmost_Easting -70.42779;\n"
+ + " \\}\n"
+ + "\\}\n";
Test.ensureLinesMatch(results, expected, "results=\n" + results);
// .csv for start time time
// " String time_coverage_start \"2002-03-28T21:00:00Z\";\n" +
userDapQuery = "&time<=2002-03-28T22:00:00Z";
- tName = eddTable.makeNewFileForDapQuery(language, null, null, userDapQuery,
- EDStatic.fullTestCacheDirectory, eddTable.className() + "_bridger1", ".csv");
+ tName =
+ eddTable.makeNewFileForDapQuery(
+ language,
+ null,
+ null,
+ userDapQuery,
+ EDStatic.fullTestCacheDirectory,
+ eddTable.className() + "_bridger1",
+ ".csv");
results = File2.directReadFrom88591File(EDStatic.fullTestCacheDirectory + tName);
// String2.log(results);
- expected = "station,longitude,latitude,depth,time,time_created,time_modified,significant_wave_height,significant_wave_height_qc,dominant_wave_period,dominant_wave_period_qc\n"
- +
- ",degrees_east,degrees_north,m,UTC,UTC,UTC,m,1,s,1\n" +
- "B01,-70.42755,43.18044,0.0,2002-03-28T21:00:00Z,,,2.605597,0,10.66667,0\n" +
- "B01,-70.42755,43.18044,0.0,2002-03-28T22:00:00Z,,,1.720958,0,10.66667,0\n";
+ expected =
+ "station,longitude,latitude,depth,time,time_created,time_modified,significant_wave_height,significant_wave_height_qc,dominant_wave_period,dominant_wave_period_qc\n"
+ + ",degrees_east,degrees_north,m,UTC,UTC,UTC,m,1,s,1\n"
+ + "B01,-70.42755,43.18044,0.0,2002-03-28T21:00:00Z,,,2.605597,0,10.66667,0\n"
+ + "B01,-70.42755,43.18044,0.0,2002-03-28T22:00:00Z,,,1.720958,0,10.66667,0\n";
Test.ensureEqual(results.substring(0, expected.length()), expected, "results=\n" + results);
// .csv for end time
// " String time_coverage_end \"2014-01-26T15:30:00Z\";\n" +
userDapQuery = "&time>=2014-01-26T15:00:00Z";
- tName = eddTable.makeNewFileForDapQuery(language, null, null, userDapQuery,
- EDStatic.fullTestCacheDirectory, eddTable.className() + "_bridger2", ".csv");
+ tName =
+ eddTable.makeNewFileForDapQuery(
+ language,
+ null,
+ null,
+ userDapQuery,
+ EDStatic.fullTestCacheDirectory,
+ eddTable.className() + "_bridger2",
+ ".csv");
results = File2.directReadFrom88591File(EDStatic.fullTestCacheDirectory + tName);
// String2.log(results);
- expected = "station,longitude,latitude,depth,time,time_created,time_modified,significant_wave_height,significant_wave_height_qc,dominant_wave_period,dominant_wave_period_qc\n"
- +
- ",degrees_east,degrees_north,m,UTC,UTC,UTC,m,1,s,1\n" +
- "B01,-70.42779,43.18019,0.0,2014-01-26T15:00:00Z,2014-01-26T15:12:04Z,2014-01-26T15:12:04Z,1.3848689,0,4.0,0\n"
- +
- "B01,-70.42779,43.18019,0.0,2014-01-26T15:30:00Z,2014-01-26T15:39:05Z,2014-01-26T15:39:05Z,1.3212088,0,4.0,0\n";
+ expected =
+ "station,longitude,latitude,depth,time,time_created,time_modified,significant_wave_height,significant_wave_height_qc,dominant_wave_period,dominant_wave_period_qc\n"
+ + ",degrees_east,degrees_north,m,UTC,UTC,UTC,m,1,s,1\n"
+ + "B01,-70.42779,43.18019,0.0,2014-01-26T15:00:00Z,2014-01-26T15:12:04Z,2014-01-26T15:12:04Z,1.3848689,0,4.0,0\n"
+ + "B01,-70.42779,43.18019,0.0,2014-01-26T15:30:00Z,2014-01-26T15:39:05Z,2014-01-26T15:39:05Z,1.3212088,0,4.0,0\n";
Test.ensureEqual(results.substring(0, expected.length()), expected, "results=\n" + results);
// .csv only outer vars
userDapQuery = "station&distinct()";
- tName = eddTable.makeNewFileForDapQuery(language, null, null, userDapQuery,
- EDStatic.fullTestCacheDirectory, eddTable.className() + "_bridger3", ".csv");
+ tName =
+ eddTable.makeNewFileForDapQuery(
+ language,
+ null,
+ null,
+ userDapQuery,
+ EDStatic.fullTestCacheDirectory,
+ eddTable.className() + "_bridger3",
+ ".csv");
results = File2.directReadFrom88591File(EDStatic.fullTestCacheDirectory + tName);
// String2.log(results);
- expected = "station\n" +
- "\n" +
- "B01\n";
+ expected = "station\n" + "\n" + "B01\n";
Test.ensureEqual(results, expected, "\nresults=\n" + results);
String2.log("\n*** EDDTableFromNcCFFiles.testBridger() finished.");
}
/**
- * This tests a Profile Contiguous Ragged Array with 7 sample_dimension
- * variables.
- * !!!Because the file has featureType=Profile, to be a cdm_data_type=Profile
- * in ERDDAP, it must have an altitude/depth variable.
- * That's fine if reading the z-obs variables.
- * But for others (e.g., temperature_obs) I change to cdm_data_type=TimeSeries
- * in datasets.xml.
- * Also, for wod_unique_cast, I changed cf_role=profile_id to
- * cf_role-timeseries_id
- * in datasets.xml.
+ * This tests a Profile Contiguous Ragged Array with 7 sample_dimension variables. !!!Because the
+ * file has featureType=Profile, to be a cdm_data_type=Profile in ERDDAP, it must have an
+ * altitude/depth variable. That's fine if reading the z-obs variables. But for others (e.g.,
+ * temperature_obs) I change to cdm_data_type=TimeSeries in datasets.xml. Also, for
+ * wod_unique_cast, I changed cf_role=profile_id to cf_role-timeseries_id in datasets.xml.
*
- *
- * !!!This tests that ERDDAP can read the variables associated with
- * any one of the sample_dimensions (including the non zobs_dimension, here
- * temperature_obs)
- * and convert the cdm_data_type=Profile into TimeSeries (since no
- * altitude/depth)
- * and make a dataset from it.
+ *
!!!This tests that ERDDAP can read the variables associated with any one of the
+ * sample_dimensions (including the non zobs_dimension, here temperature_obs) and convert the
+ * cdm_data_type=Profile into TimeSeries (since no altitude/depth) and make a dataset from it.
*
* @throws Throwable if trouble
*/
@@ -1729,9 +1867,12 @@ void test7SampleDimensions() throws Throwable {
// From Ajay Krishnan, NCEI/NODC, from
// https://data.nodc.noaa.gov/thredds/catalog/testdata/wod_ragged/05052016/catalog.html?dataset=testdata/wod_ragged/05052016/ind199105_ctd.nc
// See low level reading test: Table.testReadNcCF7SampleDims()
- String fileName = Path
- .of(EDDTableFromNcCFFilesTests.class.getResource("/data/nccf/ncei/ind199105_ctd.nc").toURI())
- .toString();
+ String fileName =
+ Path.of(
+ EDDTableFromNcCFFilesTests.class
+ .getResource("/data/nccf/ncei/ind199105_ctd.nc")
+ .toURI())
+ .toString();
// String2.log(NcHelper.ncdump(fileName, "-h"));
String id = "testNcCF7SampleDimensions";
@@ -1739,315 +1880,352 @@ void test7SampleDimensions() throws Throwable {
EDDTable eddTable = (EDDTable) EDDTestDataset.gettestNcCF7SampleDimensions();
// .dds
- tName = eddTable.makeNewFileForDapQuery(language, null, null, "",
- testCacheDir, eddTable.className() + "_7SampleDimensions", ".dds");
+ tName =
+ eddTable.makeNewFileForDapQuery(
+ language,
+ null,
+ null,
+ "",
+ testCacheDir,
+ eddTable.className() + "_7SampleDimensions",
+ ".dds");
results = File2.directReadFrom88591File(testCacheDir + tName);
// String2.log(results);
- expected = "Dataset {\n" +
- " Sequence {\n" +
- " Int32 wod_unique_cast;\n" +
- " Float32 latitude;\n" +
- " Float32 longitude;\n" +
- " Float64 time;\n" +
- " Int32 Access_no;\n" +
- " String Project;\n" +
- " String Platform;\n" +
- " String Institute;\n" +
- " Int32 Cast_Tow_number;\n" +
- " Int16 Temperature_WODprofileFlag;\n" +
- " String Temperature_Scale;\n" +
- " String Temperature_instrument;\n" +
- " Float32 Temperature;\n" +
- " Int16 Temperature_sigfigs;\n" +
- " Int16 Temperature_WODflag;\n" +
- " Int16 Temperature_origflag;\n" +
- " Int32 crs;\n" +
- " Int16 WODf;\n" +
- " Int16 WODfp;\n" +
- " Int16 WODfd;\n" +
- " } s;\n" +
- "} s;\n";
+ expected =
+ "Dataset {\n"
+ + " Sequence {\n"
+ + " Int32 wod_unique_cast;\n"
+ + " Float32 latitude;\n"
+ + " Float32 longitude;\n"
+ + " Float64 time;\n"
+ + " Int32 Access_no;\n"
+ + " String Project;\n"
+ + " String Platform;\n"
+ + " String Institute;\n"
+ + " Int32 Cast_Tow_number;\n"
+ + " Int16 Temperature_WODprofileFlag;\n"
+ + " String Temperature_Scale;\n"
+ + " String Temperature_instrument;\n"
+ + " Float32 Temperature;\n"
+ + " Int16 Temperature_sigfigs;\n"
+ + " Int16 Temperature_WODflag;\n"
+ + " Int16 Temperature_origflag;\n"
+ + " Int32 crs;\n"
+ + " Int16 WODf;\n"
+ + " Int16 WODfp;\n"
+ + " Int16 WODfd;\n"
+ + " } s;\n"
+ + "} s;\n";
Test.ensureEqual(results.substring(0, expected.length()), expected, "results=\n" + results);
// .das
- tName = eddTable.makeNewFileForDapQuery(language, null, null, "",
- testCacheDir, eddTable.className() + "_7SampleDimensions", ".das");
+ tName =
+ eddTable.makeNewFileForDapQuery(
+ language,
+ null,
+ null,
+ "",
+ testCacheDir,
+ eddTable.className() + "_7SampleDimensions",
+ ".das");
results = File2.directReadFrom88591File(testCacheDir + tName);
// String2.log(results);
- expected = "Attributes {\n" +
- " s {\n" +
- " wod_unique_cast {\n" +
- " Int32 actual_range 3390296, 10587111;\n" +
- " String cf_role \"timeseries_id\";\n" +
- " String ioos_category \"Other\";\n" +
- " }\n" +
- " latitude {\n" +
- " String _CoordinateAxisType \"Lat\";\n" +
- " String axis \"Y\";\n" +
- " String ioos_category \"Location\";\n" +
- " String long_name \"Latitude\";\n" +
- " String standard_name \"latitude\";\n" +
- " String units \"degrees_north\";\n" +
- " }\n" +
- " longitude {\n" +
- " String _CoordinateAxisType \"Lon\";\n" +
- " String axis \"X\";\n" +
- " String ioos_category \"Location\";\n" +
- " String long_name \"Longitude\";\n" +
- " String standard_name \"longitude\";\n" +
- " String units \"degrees_east\";\n" +
- " }\n" +
- " time {\n" +
- " String _CoordinateAxisType \"Time\";\n" +
- " Float64 actual_range 6.73082939999e+8, 6.75733075002e+8;\n" +
- " String axis \"T\";\n" +
- " String ioos_category \"Time\";\n" +
- " String long_name \"Time\";\n" +
- " String standard_name \"time\";\n" +
- " String time_origin \"01-JAN-1970 00:00:00\";\n" +
- " String units \"seconds since 1970-01-01T00:00:00Z\";\n" +
- " }\n" +
- " Access_no {\n" +
- " Int32 actual_range 841, 9700263;\n" +
- " String comment \"used to find original data at NODC\";\n" +
- " String ioos_category \"Other\";\n" +
- " String long_name \"NODC_accession_number\";\n" +
- " String units_wod \"NODC_code\";\n" +
- " }\n" +
- " Project {\n" +
- " String comment \"name or acronym of project under which data were measured\";\n" +
- " String ioos_category \"Other\";\n" +
- " String long_name \"Project_name\";\n" +
- " }\n" +
- " Platform {\n" +
- " String comment \"name of platform from which measurements were taken\";\n" +
- " String ioos_category \"Other\";\n" +
- " String long_name \"Platform_name\";\n" +
- " }\n" +
- " Institute {\n" +
- " String comment \"name of institute which collected data\";\n" +
- " String ioos_category \"Other\";\n" +
- " String long_name \"Responsible_institute\";\n" +
- " }\n" +
- " Cast_Tow_number {\n" +
- " Int32 actual_range -2147483647, 1;\n" +
- " String comment \"originator assigned sequential cast or tow_no\";\n" +
- " String ioos_category \"Other\";\n" +
- " String long_name \"Cast_or_Tow_number\";\n" +
- " }\n" +
- " Temperature_WODprofileFlag {\n" +
- " String ioos_category \"Other\";\n" +
- " }\n" +
- " Temperature_Scale {\n" +
- " String ioos_category \"Other\";\n" +
- " String long_name \"Scale upon which values were measured\";\n" +
- " }\n" +
- " Temperature_instrument {\n" +
- " String ioos_category \"Other\";\n" +
- " }\n" +
- " Temperature {\n" +
- " Float32 actual_range 0.425, 31.042;\n" +
- " String coordinates \"time lat lon z\";\n" +
- " String grid_mapping \"crs\";\n" +
- " String ioos_category \"Other\";\n" +
- " String long_name \"Temperature\";\n" +
- " String standard_name \"sea_water_temperature\";\n" +
- " String units \"degree_C\";\n" +
- " }\n" +
- " Temperature_sigfigs {\n" +
- " Int16 actual_range 4, 6;\n" +
- " String ioos_category \"Other\";\n" +
- " }\n" +
- " Temperature_WODflag {\n" +
- " Int16 actual_range 0, 3;\n" +
- " String flag_definitions \"WODf\";\n" +
- " String ioos_category \"Other\";\n" +
- " }\n" +
- " Temperature_origflag {\n" +
- " Int16 actual_range -32767, -32767;\n" +
- " String flag_definitions \"Oflag\";\n" +
- " String ioos_category \"Other\";\n" +
- " }\n" +
- " crs {\n" +
- " Int32 actual_range -2147483647, -2147483647;\n" +
- " String epsg_code \"EPSG:4326\";\n" +
- " String grid_mapping_name \"latitude_longitude\";\n" +
- " Float32 inverse_flattening 298.25723;\n" +
- " String ioos_category \"Location\";\n" +
- " String long_name \"CRS\";\n" +
- " Float32 longitude_of_prime_meridian 0.0;\n" +
- " Float32 semi_major_axis 6378137.0;\n" +
- " }\n" +
- " WODf {\n" +
- " Int16 actual_range -32767, -32767;\n" +
- " Float64 colorBarMaximum 10.0;\n" +
- " Float64 colorBarMinimum 0.0;\n" +
- " String flag_meanings \"accepted range_out inversion gradient anomaly gradient+inversion range+inversion range+gradient range+anomaly range+inversion+gradient\";\n"
- +
- " Int16 flag_values 0, 1, 2, 3, 4, 5, 6, 7, 8, 9;\n" +
- " String ioos_category \"Quality\";\n" +
- " String long_name \"WOD_observation_flag\";\n" +
- " }\n" +
- " WODfp {\n" +
- " Int16 actual_range -32767, -32767;\n" +
- " Float64 colorBarMaximum 10.0;\n" +
- " Float64 colorBarMinimum 0.0;\n" +
- " String flag_meanings \"accepted annual_sd_out density_inversion cruise seasonal_sd_out monthly_sd_out annual+seasonal_sd_out anomaly_or_annual+monthly_sd_out seasonal+monthly_sd_out annual+seasonal+monthly_sd_out\";\n"
- +
- " Int16 flag_values 0, 1, 2, 3, 4, 5, 6, 7, 8, 9;\n" +
- " String ioos_category \"Quality\";\n" +
- " String long_name \"WOD_profile_flag\";\n" +
- " }\n" +
- " WODfd {\n" +
- " Int16 actual_range -32767, -32767;\n" +
- " Float64 colorBarMaximum 2.5;\n" +
- " Float64 colorBarMinimum 0.0;\n" +
- " String flag_meanings \"accepted duplicate_or_inversion density_inversion\";\n" +
- " Int16 flag_values 0, 1, 2;\n" +
- " String ioos_category \"Location\";\n" +
- " String long_name \"WOD_depth_level_\";\n" +
- " }\n" +
- " }\n" +
- " NC_GLOBAL {\n" +
- " String cdm_data_type \"TimeSeries\";\n" +
- " String cdm_timeseries_variables \"wod_unique_cast,latitude,longitude,time,Access_no,Project,Platform,Institute,Cast_Tow_number,Temperature_WODprofileFlag,Temperature_Scale,Temperature_instrument\";\n"
- +
- " String Conventions \"CF-1.6, ACDD-1.3, COARDS\";\n" +
- " String creator_email \"OCLhelp@noaa.gov\";\n" +
- " String creator_name \"Ocean Climate Lab/NODC\";\n" +
- " String creator_url \"https://www.nodc.noaa.gov\";\n" +
- " String date_created \"2016-05-02\";\n" +
- " String date_modified \"2016-05-02\";\n" +
- " String featureType \"TimeSeries\";\n" +
- " String geospatial_lat_resolution \"point\";\n" +
- " String geospatial_lat_units \"degrees_north\";\n" +
- " String geospatial_lon_resolution \"point\";\n" +
- " String geospatial_lon_units \"degrees_east\";\n" +
- " String geospatial_vertical_positive \"down\";\n" +
- " String geospatial_vertical_units \"meters\";\n" +
- " String grid_mapping_epsg_code \"EPSG:4326\";\n" +
- " Float32 grid_mapping_inverse_flattening 298.25723;\n" +
- " Float32 grid_mapping_longitude_of_prime_meridian 0.0;\n" +
- " String grid_mapping_name \"latitude_longitude\";\n" +
- " Float32 grid_mapping_semi_major_axis 6378137.0;\n" +
- " String history";
- Test.ensureEqual(results.substring(0, expected.length()), expected,
- "results=\n" + results);
+ expected =
+ "Attributes {\n"
+ + " s {\n"
+ + " wod_unique_cast {\n"
+ + " Int32 actual_range 3390296, 10587111;\n"
+ + " String cf_role \"timeseries_id\";\n"
+ + " String ioos_category \"Other\";\n"
+ + " }\n"
+ + " latitude {\n"
+ + " String _CoordinateAxisType \"Lat\";\n"
+ + " String axis \"Y\";\n"
+ + " String ioos_category \"Location\";\n"
+ + " String long_name \"Latitude\";\n"
+ + " String standard_name \"latitude\";\n"
+ + " String units \"degrees_north\";\n"
+ + " }\n"
+ + " longitude {\n"
+ + " String _CoordinateAxisType \"Lon\";\n"
+ + " String axis \"X\";\n"
+ + " String ioos_category \"Location\";\n"
+ + " String long_name \"Longitude\";\n"
+ + " String standard_name \"longitude\";\n"
+ + " String units \"degrees_east\";\n"
+ + " }\n"
+ + " time {\n"
+ + " String _CoordinateAxisType \"Time\";\n"
+ + " Float64 actual_range 6.73082939999e+8, 6.75733075002e+8;\n"
+ + " String axis \"T\";\n"
+ + " String ioos_category \"Time\";\n"
+ + " String long_name \"Time\";\n"
+ + " String standard_name \"time\";\n"
+ + " String time_origin \"01-JAN-1970 00:00:00\";\n"
+ + " String units \"seconds since 1970-01-01T00:00:00Z\";\n"
+ + " }\n"
+ + " Access_no {\n"
+ + " Int32 actual_range 841, 9700263;\n"
+ + " String comment \"used to find original data at NODC\";\n"
+ + " String ioos_category \"Other\";\n"
+ + " String long_name \"NODC_accession_number\";\n"
+ + " String units_wod \"NODC_code\";\n"
+ + " }\n"
+ + " Project {\n"
+ + " String comment \"name or acronym of project under which data were measured\";\n"
+ + " String ioos_category \"Other\";\n"
+ + " String long_name \"Project_name\";\n"
+ + " }\n"
+ + " Platform {\n"
+ + " String comment \"name of platform from which measurements were taken\";\n"
+ + " String ioos_category \"Other\";\n"
+ + " String long_name \"Platform_name\";\n"
+ + " }\n"
+ + " Institute {\n"
+ + " String comment \"name of institute which collected data\";\n"
+ + " String ioos_category \"Other\";\n"
+ + " String long_name \"Responsible_institute\";\n"
+ + " }\n"
+ + " Cast_Tow_number {\n"
+ + " Int32 actual_range -2147483647, 1;\n"
+ + " String comment \"originator assigned sequential cast or tow_no\";\n"
+ + " String ioos_category \"Other\";\n"
+ + " String long_name \"Cast_or_Tow_number\";\n"
+ + " }\n"
+ + " Temperature_WODprofileFlag {\n"
+ + " String ioos_category \"Other\";\n"
+ + " }\n"
+ + " Temperature_Scale {\n"
+ + " String ioos_category \"Other\";\n"
+ + " String long_name \"Scale upon which values were measured\";\n"
+ + " }\n"
+ + " Temperature_instrument {\n"
+ + " String ioos_category \"Other\";\n"
+ + " }\n"
+ + " Temperature {\n"
+ + " Float32 actual_range 0.425, 31.042;\n"
+ + " String coordinates \"time lat lon z\";\n"
+ + " String grid_mapping \"crs\";\n"
+ + " String ioos_category \"Other\";\n"
+ + " String long_name \"Temperature\";\n"
+ + " String standard_name \"sea_water_temperature\";\n"
+ + " String units \"degree_C\";\n"
+ + " }\n"
+ + " Temperature_sigfigs {\n"
+ + " Int16 actual_range 4, 6;\n"
+ + " String ioos_category \"Other\";\n"
+ + " }\n"
+ + " Temperature_WODflag {\n"
+ + " Int16 actual_range 0, 3;\n"
+ + " String flag_definitions \"WODf\";\n"
+ + " String ioos_category \"Other\";\n"
+ + " }\n"
+ + " Temperature_origflag {\n"
+ + " Int16 actual_range -32767, -32767;\n"
+ + " String flag_definitions \"Oflag\";\n"
+ + " String ioos_category \"Other\";\n"
+ + " }\n"
+ + " crs {\n"
+ + " Int32 actual_range -2147483647, -2147483647;\n"
+ + " String epsg_code \"EPSG:4326\";\n"
+ + " String grid_mapping_name \"latitude_longitude\";\n"
+ + " Float32 inverse_flattening 298.25723;\n"
+ + " String ioos_category \"Location\";\n"
+ + " String long_name \"CRS\";\n"
+ + " Float32 longitude_of_prime_meridian 0.0;\n"
+ + " Float32 semi_major_axis 6378137.0;\n"
+ + " }\n"
+ + " WODf {\n"
+ + " Int16 actual_range -32767, -32767;\n"
+ + " Float64 colorBarMaximum 10.0;\n"
+ + " Float64 colorBarMinimum 0.0;\n"
+ + " String flag_meanings \"accepted range_out inversion gradient anomaly gradient+inversion range+inversion range+gradient range+anomaly range+inversion+gradient\";\n"
+ + " Int16 flag_values 0, 1, 2, 3, 4, 5, 6, 7, 8, 9;\n"
+ + " String ioos_category \"Quality\";\n"
+ + " String long_name \"WOD_observation_flag\";\n"
+ + " }\n"
+ + " WODfp {\n"
+ + " Int16 actual_range -32767, -32767;\n"
+ + " Float64 colorBarMaximum 10.0;\n"
+ + " Float64 colorBarMinimum 0.0;\n"
+ + " String flag_meanings \"accepted annual_sd_out density_inversion cruise seasonal_sd_out monthly_sd_out annual+seasonal_sd_out anomaly_or_annual+monthly_sd_out seasonal+monthly_sd_out annual+seasonal+monthly_sd_out\";\n"
+ + " Int16 flag_values 0, 1, 2, 3, 4, 5, 6, 7, 8, 9;\n"
+ + " String ioos_category \"Quality\";\n"
+ + " String long_name \"WOD_profile_flag\";\n"
+ + " }\n"
+ + " WODfd {\n"
+ + " Int16 actual_range -32767, -32767;\n"
+ + " Float64 colorBarMaximum 2.5;\n"
+ + " Float64 colorBarMinimum 0.0;\n"
+ + " String flag_meanings \"accepted duplicate_or_inversion density_inversion\";\n"
+ + " Int16 flag_values 0, 1, 2;\n"
+ + " String ioos_category \"Location\";\n"
+ + " String long_name \"WOD_depth_level_\";\n"
+ + " }\n"
+ + " }\n"
+ + " NC_GLOBAL {\n"
+ + " String cdm_data_type \"TimeSeries\";\n"
+ + " String cdm_timeseries_variables \"wod_unique_cast,latitude,longitude,time,Access_no,Project,Platform,Institute,Cast_Tow_number,Temperature_WODprofileFlag,Temperature_Scale,Temperature_instrument\";\n"
+ + " String Conventions \"CF-1.6, ACDD-1.3, COARDS\";\n"
+ + " String creator_email \"OCLhelp@noaa.gov\";\n"
+ + " String creator_name \"Ocean Climate Lab/NODC\";\n"
+ + " String creator_url \"https://www.nodc.noaa.gov\";\n"
+ + " String date_created \"2016-05-02\";\n"
+ + " String date_modified \"2016-05-02\";\n"
+ + " String featureType \"TimeSeries\";\n"
+ + " String geospatial_lat_resolution \"point\";\n"
+ + " String geospatial_lat_units \"degrees_north\";\n"
+ + " String geospatial_lon_resolution \"point\";\n"
+ + " String geospatial_lon_units \"degrees_east\";\n"
+ + " String geospatial_vertical_positive \"down\";\n"
+ + " String geospatial_vertical_units \"meters\";\n"
+ + " String grid_mapping_epsg_code \"EPSG:4326\";\n"
+ + " Float32 grid_mapping_inverse_flattening 298.25723;\n"
+ + " Float32 grid_mapping_longitude_of_prime_meridian 0.0;\n"
+ + " String grid_mapping_name \"latitude_longitude\";\n"
+ + " Float32 grid_mapping_semi_major_axis 6378137.0;\n"
+ + " String history";
+ Test.ensureEqual(results.substring(0, expected.length()), expected, "results=\n" + results);
// "2016-06-10T18:38:03Z (local files)
// 2016-06-10T18:38:03Z
// http://localhost:8080/cwexperimental/tabledap/testNcCF7SampleDimensions.das";
- expected = "String id \"ind199105_ctd.nc\";\n" +
- " String infoUrl \"https://www.nodc.noaa.gov/OC5/WOD/pr_wod.html\";\n" +
- " String institution \"National Oceanographic Data Center(NODC), NOAA\";\n" +
- " String keywords \"temperature\";\n" +
- " String keywords_vocabulary \"GCMD Science Keywords\";\n" +
- " String license \"The data may be used and redistributed for free but is not intended\n" +
- "for legal use, since it may contain inaccuracies. Neither the data\n" +
- "Contributor, ERD, NOAA, nor the United States Government, nor any\n" +
- "of their employees or contractors, makes any warranty, express or\n" +
- "implied, including warranties of merchantability and fitness for a\n" +
- "particular purpose, or assumes any legal liability for the accuracy,\n" +
- "completeness, or usefulness, of this information.\";\n" +
- " String naming_authority \"gov.noaa.nodc\";\n" +
- " String project \"World Ocean Database\";\n" +
- " String publisher_email \"NODC.Services@noaa.gov\";\n" +
- " String publisher_name \"US DOC; NESDIS; NATIONAL OCEANOGRAPHIC DATA CENTER - IN295\";\n" +
- " String publisher_url \"https://www.nodc.noaa.gov\";\n" +
- " String references \"World Ocean Database 2013. URL:https://data.nodc.noaa.gov/woa/WOD/DOC/wod_intro.pdf\";\n"
- +
- " String source \"World Ocean Database\";\n" +
- " String sourceUrl \"(local files)\";\n" +
- " String standard_name_vocabulary \"CF Standard Name Table v70\";\n" +
- " String subsetVariables \"wod_unique_cast,latitude,longitude,time,Access_no,Project,Platform,Institute,Cast_Tow_number,Temperature_WODprofileFlag,Temperature_Scale,Temperature_instrument\";\n"
- +
- " String summary \"Test WOD .ncCF file\";\n" +
- " String time_coverage_end \"1991-05-31T23:37:55Z\";\n" +
- " String time_coverage_start \"1991-05-01T07:28:59Z\";\n" +
- " String title \"Test WOD .ncCF file\";\n" +
- " }\n" +
- "}\n";
+ expected =
+ "String id \"ind199105_ctd.nc\";\n"
+ + " String infoUrl \"https://www.nodc.noaa.gov/OC5/WOD/pr_wod.html\";\n"
+ + " String institution \"National Oceanographic Data Center(NODC), NOAA\";\n"
+ + " String keywords \"temperature\";\n"
+ + " String keywords_vocabulary \"GCMD Science Keywords\";\n"
+ + " String license \"The data may be used and redistributed for free but is not intended\n"
+ + "for legal use, since it may contain inaccuracies. Neither the data\n"
+ + "Contributor, ERD, NOAA, nor the United States Government, nor any\n"
+ + "of their employees or contractors, makes any warranty, express or\n"
+ + "implied, including warranties of merchantability and fitness for a\n"
+ + "particular purpose, or assumes any legal liability for the accuracy,\n"
+ + "completeness, or usefulness, of this information.\";\n"
+ + " String naming_authority \"gov.noaa.nodc\";\n"
+ + " String project \"World Ocean Database\";\n"
+ + " String publisher_email \"NODC.Services@noaa.gov\";\n"
+ + " String publisher_name \"US DOC; NESDIS; NATIONAL OCEANOGRAPHIC DATA CENTER - IN295\";\n"
+ + " String publisher_url \"https://www.nodc.noaa.gov\";\n"
+ + " String references \"World Ocean Database 2013. URL:https://data.nodc.noaa.gov/woa/WOD/DOC/wod_intro.pdf\";\n"
+ + " String source \"World Ocean Database\";\n"
+ + " String sourceUrl \"(local files)\";\n"
+ + " String standard_name_vocabulary \"CF Standard Name Table v70\";\n"
+ + " String subsetVariables \"wod_unique_cast,latitude,longitude,time,Access_no,Project,Platform,Institute,Cast_Tow_number,Temperature_WODprofileFlag,Temperature_Scale,Temperature_instrument\";\n"
+ + " String summary \"Test WOD .ncCF file\";\n"
+ + " String time_coverage_end \"1991-05-31T23:37:55Z\";\n"
+ + " String time_coverage_start \"1991-05-01T07:28:59Z\";\n"
+ + " String title \"Test WOD .ncCF file\";\n"
+ + " }\n"
+ + "}\n";
int po = Math.max(0, results.indexOf(expected.substring(0, 20)));
Test.ensureEqual(results.substring(po), expected, "results=\n" + results);
// .csv all vars
userDapQuery = "&time=1991-05-02T02:08:00Z";
- tName = eddTable.makeNewFileForDapQuery(language, null, null, userDapQuery,
- testCacheDir, eddTable.className() + "_7SampleDimensions_all", ".csv");
+ tName =
+ eddTable.makeNewFileForDapQuery(
+ language,
+ null,
+ null,
+ userDapQuery,
+ testCacheDir,
+ eddTable.className() + "_7SampleDimensions_all",
+ ".csv");
results = File2.directReadFrom88591File(testCacheDir + tName);
// String2.log(results);
- expected = "wod_unique_cast,latitude,longitude,time,Access_no,Project,Platform,Institute," +
- "Cast_Tow_number,Temperature_WODprofileFlag,Temperature_Scale,Temperature_instrument," +
- "Temperature,Temperature_sigfigs,Temperature_WODflag,Temperature_origflag,crs,WODf,WODfp,WODfd\n" +
- ",degrees_north,degrees_east,UTC,,,,,,,,,degree_C,,,,,,,\n" +
- "3390310,NaN,NaN,1991-05-02T02:08:00Z,841,WORLD OCEAN CIRCULATION EXPERIMENT (WOCE)," +
- "MARION DUFRESNE (C.s.FNGB;built 1972;decomm-d 1995;renamed Fres;IMO7208388)," +
- "NATIONAL MUSEUM OF NATURAL HISTORY (PARIS),1,NaN,,,7.738,5,0,-32767,-2147483647,-32767,-32767,-32767\n"
- +
- "3390310,NaN,NaN,1991-05-02T02:08:00Z,841,WORLD OCEAN CIRCULATION EXPERIMENT (WOCE)," +
- "MARION DUFRESNE (C.s.FNGB;built 1972;decomm-d 1995;renamed Fres;IMO7208388)," +
- "NATIONAL MUSEUM OF NATURAL HISTORY (PARIS),1,NaN,,,7.74,5,0,-32767,-2147483647,-32767,-32767,-32767\n"
- +
- "3390310,NaN,NaN,1991-05-02T02:08:00Z,841,WORLD OCEAN CIRCULATION EXPERIMENT (WOCE)," +
- "MARION DUFRESNE (C.s.FNGB;built 1972;decomm-d 1995;renamed Fres;IMO7208388)," +
- "NATIONAL MUSEUM OF NATURAL HISTORY (PARIS),1,NaN,,,7.713,5,0,-32767,-2147483647,-32767,-32767,-32767\n";
- Test.ensureEqual(results.substring(0, expected.length()), expected,
- "results=\n" + results);
+ expected =
+ "wod_unique_cast,latitude,longitude,time,Access_no,Project,Platform,Institute,"
+ + "Cast_Tow_number,Temperature_WODprofileFlag,Temperature_Scale,Temperature_instrument,"
+ + "Temperature,Temperature_sigfigs,Temperature_WODflag,Temperature_origflag,crs,WODf,WODfp,WODfd\n"
+ + ",degrees_north,degrees_east,UTC,,,,,,,,,degree_C,,,,,,,\n"
+ + "3390310,NaN,NaN,1991-05-02T02:08:00Z,841,WORLD OCEAN CIRCULATION EXPERIMENT (WOCE),"
+ + "MARION DUFRESNE (C.s.FNGB;built 1972;decomm-d 1995;renamed Fres;IMO7208388),"
+ + "NATIONAL MUSEUM OF NATURAL HISTORY (PARIS),1,NaN,,,7.738,5,0,-32767,-2147483647,-32767,-32767,-32767\n"
+ + "3390310,NaN,NaN,1991-05-02T02:08:00Z,841,WORLD OCEAN CIRCULATION EXPERIMENT (WOCE),"
+ + "MARION DUFRESNE (C.s.FNGB;built 1972;decomm-d 1995;renamed Fres;IMO7208388),"
+ + "NATIONAL MUSEUM OF NATURAL HISTORY (PARIS),1,NaN,,,7.74,5,0,-32767,-2147483647,-32767,-32767,-32767\n"
+ + "3390310,NaN,NaN,1991-05-02T02:08:00Z,841,WORLD OCEAN CIRCULATION EXPERIMENT (WOCE),"
+ + "MARION DUFRESNE (C.s.FNGB;built 1972;decomm-d 1995;renamed Fres;IMO7208388),"
+ + "NATIONAL MUSEUM OF NATURAL HISTORY (PARIS),1,NaN,,,7.713,5,0,-32767,-2147483647,-32767,-32767,-32767\n";
+ Test.ensureEqual(results.substring(0, expected.length()), expected, "results=\n" + results);
// .csv outer and inner vars
- userDapQuery = "wod_unique_cast,latitude,longitude,time,Temperature" + scalarVars +
- "&time=1991-05-02T02:08:00Z";
- tName = eddTable.makeNewFileForDapQuery(language, null, null, userDapQuery,
- testCacheDir, eddTable.className() + "_7SampleDimensions_outerInner", ".csv");
+ userDapQuery =
+ "wod_unique_cast,latitude,longitude,time,Temperature"
+ + scalarVars
+ + "&time=1991-05-02T02:08:00Z";
+ tName =
+ eddTable.makeNewFileForDapQuery(
+ language,
+ null,
+ null,
+ userDapQuery,
+ testCacheDir,
+ eddTable.className() + "_7SampleDimensions_outerInner",
+ ".csv");
results = File2.directReadFrom88591File(testCacheDir + tName);
// String2.log(results);
- expected = "wod_unique_cast,latitude,longitude,time,Temperature,crs,WODf,WODfd\n" +
- ",degrees_north,degrees_east,UTC,degree_C,,,\n" +
- "3390310,NaN,NaN,1991-05-02T02:08:00Z,7.738,-2147483647,-32767,-32767\n" +
- "3390310,NaN,NaN,1991-05-02T02:08:00Z,7.74,-2147483647,-32767,-32767\n" +
- "3390310,NaN,NaN,1991-05-02T02:08:00Z,7.713,-2147483647,-32767,-32767\n";
- Test.ensureEqual(results.substring(0, expected.length()), expected,
- "results=\n" + results);
+ expected =
+ "wod_unique_cast,latitude,longitude,time,Temperature,crs,WODf,WODfd\n"
+ + ",degrees_north,degrees_east,UTC,degree_C,,,\n"
+ + "3390310,NaN,NaN,1991-05-02T02:08:00Z,7.738,-2147483647,-32767,-32767\n"
+ + "3390310,NaN,NaN,1991-05-02T02:08:00Z,7.74,-2147483647,-32767,-32767\n"
+ + "3390310,NaN,NaN,1991-05-02T02:08:00Z,7.713,-2147483647,-32767,-32767\n";
+ Test.ensureEqual(results.substring(0, expected.length()), expected, "results=\n" + results);
// .csv outer vars only
- userDapQuery = "wod_unique_cast,latitude,longitude,time" + scalarVars +
- "&time=1991-05-02T02:08:00Z";
- tName = eddTable.makeNewFileForDapQuery(language, null, null, userDapQuery,
- testCacheDir, eddTable.className() + "_7SampleDimensions_outer", ".csv");
+ userDapQuery =
+ "wod_unique_cast,latitude,longitude,time" + scalarVars + "&time=1991-05-02T02:08:00Z";
+ tName =
+ eddTable.makeNewFileForDapQuery(
+ language,
+ null,
+ null,
+ userDapQuery,
+ testCacheDir,
+ eddTable.className() + "_7SampleDimensions_outer",
+ ".csv");
results = File2.directReadFrom88591File(testCacheDir + tName);
// String2.log(results);
- expected = "wod_unique_cast,latitude,longitude,time,crs,WODf,WODfd\n" +
- ",degrees_north,degrees_east,UTC,,,\n" +
- "3390310,NaN,NaN,1991-05-02T02:08:00Z,-2147483647,-32767,-32767\n";
- Test.ensureEqual(results.substring(0, expected.length()), expected,
- "results=\n" + results);
+ expected =
+ "wod_unique_cast,latitude,longitude,time,crs,WODf,WODfd\n"
+ + ",degrees_north,degrees_east,UTC,,,\n"
+ + "3390310,NaN,NaN,1991-05-02T02:08:00Z,-2147483647,-32767,-32767\n";
+ Test.ensureEqual(results.substring(0, expected.length()), expected, "results=\n" + results);
// .csv scalar vars only
- userDapQuery = "crs,WODf,WODfd" +
- "&time=1991-05-02T02:08:00Z";
- tName = eddTable.makeNewFileForDapQuery(language, null, null, userDapQuery,
- testCacheDir, eddTable.className() + "_7SampleDimensions_scalar", ".csv");
+ userDapQuery = "crs,WODf,WODfd" + "&time=1991-05-02T02:08:00Z";
+ tName =
+ eddTable.makeNewFileForDapQuery(
+ language,
+ null,
+ null,
+ userDapQuery,
+ testCacheDir,
+ eddTable.className() + "_7SampleDimensions_scalar",
+ ".csv");
results = File2.directReadFrom88591File(testCacheDir + tName);
// String2.log(results);
- expected = "crs,WODf,WODfd\n" +
- ",,\n" +
- "-2147483647,-32767,-32767\n";
+ expected = "crs,WODf,WODfd\n" + ",,\n" + "-2147483647,-32767,-32767\n";
Test.ensureEqual(results, expected, "results=\n" + results);
// .csv inner vars vars only
- userDapQuery = "Temperature" +
- "&time=1991-05-02T02:08:00Z";
- tName = eddTable.makeNewFileForDapQuery(language, null, null, userDapQuery,
- testCacheDir, eddTable.className() + "_7SampleDimensions_inner", ".csv");
+ userDapQuery = "Temperature" + "&time=1991-05-02T02:08:00Z";
+ tName =
+ eddTable.makeNewFileForDapQuery(
+ language,
+ null,
+ null,
+ userDapQuery,
+ testCacheDir,
+ eddTable.className() + "_7SampleDimensions_inner",
+ ".csv");
results = File2.directReadFrom88591File(testCacheDir + tName);
// String2.log(results);
- expected = "Temperature\n" +
- "degree_C\n" +
- "7.738\n" +
- "7.74\n" +
- "7.713\n";
- Test.ensureEqual(results.substring(0, expected.length()), expected,
- "\nresults=\n" + results);
+ expected = "Temperature\n" + "degree_C\n" + "7.738\n" + "7.74\n" + "7.713\n";
+ Test.ensureEqual(results.substring(0, expected.length()), expected, "\nresults=\n" + results);
String2.log("\n*** EDDTableFromNcCFFiles.test7SampleDimensions() finished.");
}
@@ -2071,295 +2249,303 @@ void testNcml() throws Throwable {
// ncdump the .nc file
String2.log("Here's the ncdump of " + baseName + ".nc:");
results = NcHelper.ncdump(baseName + ".nc", "-h");
- expected = "netcdf CTZ-T500-MCT-NS5649-Z408-INS12-REC14.nc {\n" +
- " dimensions:\n" +
- " time = UNLIMITED; // (101 currently)\n" +
- " one = 1;\n" +
- " ni_Srec = 93;\n" +
- " lat = 1;\n" +
- " lon = 1;\n" +
- " variables:\n" +
- " double Cond(time=101);\n" +
- " :long_name = \"Conductividad\";\n" +
- " :units = \"S/m\";\n" +
- "\n" +
- " double Pres(time=101);\n" +
- " :long_name = \"Presion\";\n" +
- " :units = \"dBar\";\n" +
- "\n" +
- " double ProfDiseno(one=1);\n" +
- " :long_name = \"Profundidad de diseno\";\n" +
- " :units = \"m\";\n" +
- "\n" +
- " double ProfEstimada(one=1);\n" +
- " :long_name = \"Profundidad estimada\";\n" +
- " :units = \"m\";\n" +
- "\n" +
- " double Sal(time=101);\n" +
- " :long_name = \"Salinidad\";\n" +
- " :units = \"PSU\";\n" +
- "\n" +
- " double Temp(time=101);\n" +
- " :long_name = \"Temperatura\";\n" +
- " :units = \"\uFFFDC\";\n" + // 65533 which is "unknown character". Not right!!!???
- "\n" +
- " double TiranteDiseno(one=1);\n" +
- " :long_name = \"Tirante diseno\";\n" +
- " :units = \"m\";\n" +
- "\n" +
- " double TiranteEstimado(one=1);\n" +
- " :long_name = \"Tirante estimado\";\n" +
- " :units = \"m\";\n" +
- "\n" +
- " double i_Salrec(ni_Srec=93);\n" +
- " :long_name = \"Indices salinidad reconstruida\";\n" +
- " :units = \"N/A\";\n" +
- "\n" +
- " double jd(time=101);\n" +
- " :long_name = \"tiempo en dias Julianos\";\n" +
- " :units = \"days since 0000-01-01 00:00:00 \";\n" +
- " :time_origin = \"0000-01-01 00:00:00\";\n" +
- "\n" +
- " double lat(lat=1);\n" +
- " :long_name = \"Latitud\";\n" +
- " :Units = \"degrees_north\";\n" +
- "\n" +
- " double lon(lon=1);\n" +
- " :long_name = \"Longitud\";\n" +
- " :units = \"degrees_east\";\n" +
- "\n" +
- " double var_pres(one=1);\n" +
- " :long_name = \"Bandera presion\";\n" +
- " :units = \"N/A\";\n" +
- "\n" +
- " // global attributes:\n" +
- " :Title = \"Datos MCT para el anclaje CTZ-T500 crucero CANEK 14\";\n" +
- " :Anclaje = \"CTZ-T500\";\n" +
- " :Equipo = \"MCT\";\n" +
- " :Numero_de_serie = \"5649\";\n" +
- " :Source_file = \"CTZ-T500-MCT-NS5649-Z408-INS12-REC14.mat\";\n" +
- " :Final_NC_file = \"CTZ-T500-MCT-NS5649-Z408-INS12-REC14.nc\";\n" +
- " :Creation_date = \"06-Aug-2014 12:22:59\";\n" +
- " :NCO = \"\\\"4.5.2\\\"\";\n" +
- "}\n";
+ expected =
+ "netcdf CTZ-T500-MCT-NS5649-Z408-INS12-REC14.nc {\n"
+ + " dimensions:\n"
+ + " time = UNLIMITED; // (101 currently)\n"
+ + " one = 1;\n"
+ + " ni_Srec = 93;\n"
+ + " lat = 1;\n"
+ + " lon = 1;\n"
+ + " variables:\n"
+ + " double Cond(time=101);\n"
+ + " :long_name = \"Conductividad\";\n"
+ + " :units = \"S/m\";\n"
+ + "\n"
+ + " double Pres(time=101);\n"
+ + " :long_name = \"Presion\";\n"
+ + " :units = \"dBar\";\n"
+ + "\n"
+ + " double ProfDiseno(one=1);\n"
+ + " :long_name = \"Profundidad de diseno\";\n"
+ + " :units = \"m\";\n"
+ + "\n"
+ + " double ProfEstimada(one=1);\n"
+ + " :long_name = \"Profundidad estimada\";\n"
+ + " :units = \"m\";\n"
+ + "\n"
+ + " double Sal(time=101);\n"
+ + " :long_name = \"Salinidad\";\n"
+ + " :units = \"PSU\";\n"
+ + "\n"
+ + " double Temp(time=101);\n"
+ + " :long_name = \"Temperatura\";\n"
+ + " :units = \"\uFFFDC\";\n"
+ + // 65533 which is "unknown character". Not right!!!???
+ "\n"
+ + " double TiranteDiseno(one=1);\n"
+ + " :long_name = \"Tirante diseno\";\n"
+ + " :units = \"m\";\n"
+ + "\n"
+ + " double TiranteEstimado(one=1);\n"
+ + " :long_name = \"Tirante estimado\";\n"
+ + " :units = \"m\";\n"
+ + "\n"
+ + " double i_Salrec(ni_Srec=93);\n"
+ + " :long_name = \"Indices salinidad reconstruida\";\n"
+ + " :units = \"N/A\";\n"
+ + "\n"
+ + " double jd(time=101);\n"
+ + " :long_name = \"tiempo en dias Julianos\";\n"
+ + " :units = \"days since 0000-01-01 00:00:00 \";\n"
+ + " :time_origin = \"0000-01-01 00:00:00\";\n"
+ + "\n"
+ + " double lat(lat=1);\n"
+ + " :long_name = \"Latitud\";\n"
+ + " :Units = \"degrees_north\";\n"
+ + "\n"
+ + " double lon(lon=1);\n"
+ + " :long_name = \"Longitud\";\n"
+ + " :units = \"degrees_east\";\n"
+ + "\n"
+ + " double var_pres(one=1);\n"
+ + " :long_name = \"Bandera presion\";\n"
+ + " :units = \"N/A\";\n"
+ + "\n"
+ + " // global attributes:\n"
+ + " :Title = \"Datos MCT para el anclaje CTZ-T500 crucero CANEK 14\";\n"
+ + " :Anclaje = \"CTZ-T500\";\n"
+ + " :Equipo = \"MCT\";\n"
+ + " :Numero_de_serie = \"5649\";\n"
+ + " :Source_file = \"CTZ-T500-MCT-NS5649-Z408-INS12-REC14.mat\";\n"
+ + " :Final_NC_file = \"CTZ-T500-MCT-NS5649-Z408-INS12-REC14.nc\";\n"
+ + " :Creation_date = \"06-Aug-2014 12:22:59\";\n"
+ + " :NCO = \"\\\"4.5.2\\\"\";\n"
+ + "}\n";
Test.ensureEqual(results, expected, "results=\n" + results);
// ncdump the .ncml file
String2.log("\nHere's the ncdump of " + baseName + ".ncml:");
results = NcHelper.ncdump(baseName + ".ncml", "-h");
- expected = "netcdf CTZ-T500-MCT-NS5649-Z408-INS12-REC14.ncml {\n" +
- " dimensions:\n" +
- " time = 101;\n" +
- " station = 1;\n" +
- " variables:\n" +
- " double Cond(station=1, time=101);\n" +
- " :long_name = \"Conductividad\";\n" +
- " :units = \"S/m\";\n" +
- " :standard_name = \"sea_water_electrical_conductivity\";\n" +
- " :coordinates = \"time latitude longitude z\";\n" +
- "\n" +
- " double Pres(station=1, time=101);\n" +
- " :long_name = \"Presion\";\n" +
- " :units = \"dBar\";\n" +
- " :standard_name = \"sea_water_pressure\";\n" +
- " :coordinates = \"time latitude longitude z\";\n" +
- "\n" +
- " double Temp(station=1, time=101);\n" +
- " :long_name = \"Temperatura\";\n" +
- " :units = \"degree_celsius\";\n" +
- " :standard_name = \"sea_water_temperature\";\n" +
- " :coordinates = \"time latitude longitude z\";\n" +
- "\n" +
- " double Sal(station=1, time=101);\n" +
- " :long_name = \"Salinidad\";\n" +
- " :units = \"PSU\";\n" +
- " :standard_name = \"sea_water_salinity\";\n" +
- " :coordinates = \"time latitude longitude z\";\n" +
- "\n" +
- " double ProfDiseno(station=1);\n" +
- " :long_name = \"Profundidad de diseno\";\n" +
- " :units = \"m\";\n" +
- "\n" +
- " double TiranteDiseno(station=1);\n" +
- " :long_name = \"Tirante diseno\";\n" +
- " :units = \"m\";\n" +
- "\n" +
- " double TiranteEstimado(station=1);\n" +
- " :long_name = \"Tirante estimado\";\n" +
- " :units = \"m\";\n" +
- "\n" +
- " double var_pres(station=1);\n" +
- " :long_name = \"Bandera presion\";\n" +
- " :units = \"N/A\";\n" +
- "\n" +
- " double station(station=1);\n" + // 2020-01-23 this was int before netcdf-java 5.2!
- " :long_name = \"CTZ-T500-MCT-NS5649-Z408-INS12-REC14\";\n" +
- " :cf_role = \"timeseries_id\";\n" +
- "\n" +
- " double time(station=1, time=101);\n" +
- " :long_name = \"tiempo en dias Julianos\";\n" +
- " :units = \"days since 0000-01-01 00:00:00 \";\n" +
- " :time_origin = \"0000-01-01 00:00:00\";\n" +
- " :standard_name = \"time\";\n" +
- " :axis = \"T\";\n" +
- " :calendar = \"julian\";\n" +
- " :_CoordinateAxisType = \"Time\";\n" +
- "\n" +
- " double latitude(station=1);\n" +
- " :long_name = \"Latitud\";\n" +
- " :standard_name = \"latitude\";\n" +
- " :units = \"degrees_north\";\n" +
- " :axis = \"Y\";\n" +
- " :_CoordinateAxisType = \"Lat\";\n" +
- "\n" +
- " double longitude(station=1);\n" +
- " :long_name = \"Longitud\";\n" +
- " :units = \"degrees_east\";\n" +
- " :standard_name = \"longitude\";\n" +
- " :axis = \"X\";\n" +
- " :_CoordinateAxisType = \"Lon\";\n" +
- "\n" +
- " double z(station=1);\n" +
- " :long_name = \"profundidad\";\n" +
- " :units = \"m\";\n" +
- " :standard_name = \"depth\";\n" +
- " :axis = \"Z\";\n" +
- " :_CoordinateAxisType = \"Height\";\n" +
- "\n" +
- " // global attributes:\n" +
- " :Anclaje = \"CTZ-T500\";\n" +
- " :Equipo = \"MCT\";\n" +
- " :Numero_de_serie = \"5649\";\n" +
- " :Source_file = \"CTZ-T500-MCT-NS5649-Z408-INS12-REC14.mat\";\n" +
- " :Final_NC_file = \"CTZ-T500-MCT-NS5649-Z408-INS12-REC14.nc\";\n" +
- " :NCO = \"\\\"4.5.2\\\"\";\n" +
- " :Conventions = \"CF-1.6\";\n" +
- " :featureType = \"timeSeries\";\n" +
- " :standard_name_vocabulary = \"CF-1.6\";\n" +
- " :title = \"CTZ-T500-MCT-NS5649-Z408-INS12-REC14\";\n" +
- " :cdm_data_type = \"TimeSeries\";\n" +
- " :cdm_timeseries_variables = \"station\";\n" +
- " :date_created = \"06-Aug-2014 12:22:59\";\n" +
- " :_CoordSysBuilder = \"ucar.nc2.dataset.conv.CF1Convention\";\n" +
- "}\n";
+ expected =
+ "netcdf CTZ-T500-MCT-NS5649-Z408-INS12-REC14.ncml {\n"
+ + " dimensions:\n"
+ + " time = 101;\n"
+ + " station = 1;\n"
+ + " variables:\n"
+ + " double Cond(station=1, time=101);\n"
+ + " :long_name = \"Conductividad\";\n"
+ + " :units = \"S/m\";\n"
+ + " :standard_name = \"sea_water_electrical_conductivity\";\n"
+ + " :coordinates = \"time latitude longitude z\";\n"
+ + "\n"
+ + " double Pres(station=1, time=101);\n"
+ + " :long_name = \"Presion\";\n"
+ + " :units = \"dBar\";\n"
+ + " :standard_name = \"sea_water_pressure\";\n"
+ + " :coordinates = \"time latitude longitude z\";\n"
+ + "\n"
+ + " double Temp(station=1, time=101);\n"
+ + " :long_name = \"Temperatura\";\n"
+ + " :units = \"degree_celsius\";\n"
+ + " :standard_name = \"sea_water_temperature\";\n"
+ + " :coordinates = \"time latitude longitude z\";\n"
+ + "\n"
+ + " double Sal(station=1, time=101);\n"
+ + " :long_name = \"Salinidad\";\n"
+ + " :units = \"PSU\";\n"
+ + " :standard_name = \"sea_water_salinity\";\n"
+ + " :coordinates = \"time latitude longitude z\";\n"
+ + "\n"
+ + " double ProfDiseno(station=1);\n"
+ + " :long_name = \"Profundidad de diseno\";\n"
+ + " :units = \"m\";\n"
+ + "\n"
+ + " double TiranteDiseno(station=1);\n"
+ + " :long_name = \"Tirante diseno\";\n"
+ + " :units = \"m\";\n"
+ + "\n"
+ + " double TiranteEstimado(station=1);\n"
+ + " :long_name = \"Tirante estimado\";\n"
+ + " :units = \"m\";\n"
+ + "\n"
+ + " double var_pres(station=1);\n"
+ + " :long_name = \"Bandera presion\";\n"
+ + " :units = \"N/A\";\n"
+ + "\n"
+ + " double station(station=1);\n"
+ + // 2020-01-23 this was int before netcdf-java 5.2!
+ " :long_name = \"CTZ-T500-MCT-NS5649-Z408-INS12-REC14\";\n"
+ + " :cf_role = \"timeseries_id\";\n"
+ + "\n"
+ + " double time(station=1, time=101);\n"
+ + " :long_name = \"tiempo en dias Julianos\";\n"
+ + " :units = \"days since 0000-01-01 00:00:00 \";\n"
+ + " :time_origin = \"0000-01-01 00:00:00\";\n"
+ + " :standard_name = \"time\";\n"
+ + " :axis = \"T\";\n"
+ + " :calendar = \"julian\";\n"
+ + " :_CoordinateAxisType = \"Time\";\n"
+ + "\n"
+ + " double latitude(station=1);\n"
+ + " :long_name = \"Latitud\";\n"
+ + " :standard_name = \"latitude\";\n"
+ + " :units = \"degrees_north\";\n"
+ + " :axis = \"Y\";\n"
+ + " :_CoordinateAxisType = \"Lat\";\n"
+ + "\n"
+ + " double longitude(station=1);\n"
+ + " :long_name = \"Longitud\";\n"
+ + " :units = \"degrees_east\";\n"
+ + " :standard_name = \"longitude\";\n"
+ + " :axis = \"X\";\n"
+ + " :_CoordinateAxisType = \"Lon\";\n"
+ + "\n"
+ + " double z(station=1);\n"
+ + " :long_name = \"profundidad\";\n"
+ + " :units = \"m\";\n"
+ + " :standard_name = \"depth\";\n"
+ + " :axis = \"Z\";\n"
+ + " :_CoordinateAxisType = \"Height\";\n"
+ + "\n"
+ + " // global attributes:\n"
+ + " :Anclaje = \"CTZ-T500\";\n"
+ + " :Equipo = \"MCT\";\n"
+ + " :Numero_de_serie = \"5649\";\n"
+ + " :Source_file = \"CTZ-T500-MCT-NS5649-Z408-INS12-REC14.mat\";\n"
+ + " :Final_NC_file = \"CTZ-T500-MCT-NS5649-Z408-INS12-REC14.nc\";\n"
+ + " :NCO = \"\\\"4.5.2\\\"\";\n"
+ + " :Conventions = \"CF-1.6\";\n"
+ + " :featureType = \"timeSeries\";\n"
+ + " :standard_name_vocabulary = \"CF-1.6\";\n"
+ + " :title = \"CTZ-T500-MCT-NS5649-Z408-INS12-REC14\";\n"
+ + " :cdm_data_type = \"TimeSeries\";\n"
+ + " :cdm_timeseries_variables = \"station\";\n"
+ + " :date_created = \"06-Aug-2014 12:22:59\";\n"
+ + " :_CoordSysBuilder = \"ucar.nc2.dataset.conv.CF1Convention\";\n"
+ + "}\n";
Test.ensureEqual(results, expected, "results=\n" + results);
// read the .ncml via table.readNcCF
table = new Table();
- table.readNcCF(baseName + ".ncml", null, 0, // standardizeWhat
- null, null, null);
+ table.readNcCF(
+ baseName + ".ncml",
+ null,
+ 0, // standardizeWhat
+ null,
+ null,
+ null);
results = table.toString(5);
results = String2.replaceAll(results, '\t', ' ');
- expected = "{\n" +
- "dimensions:\n" +
- " row = 101 ;\n" +
- "variables:\n" +
- " double Cond(row) ;\n" +
- " Cond:coordinates = \"time latitude longitude z\" ;\n" +
- " Cond:long_name = \"Conductividad\" ;\n" +
- " Cond:standard_name = \"sea_water_electrical_conductivity\" ;\n" +
- " Cond:units = \"S/m\" ;\n" +
- " double Pres(row) ;\n" +
- " Pres:coordinates = \"time latitude longitude z\" ;\n" +
- " Pres:long_name = \"Presion\" ;\n" +
- " Pres:standard_name = \"sea_water_pressure\" ;\n" +
- " Pres:units = \"dBar\" ;\n" +
- " double Temp(row) ;\n" +
- " Temp:coordinates = \"time latitude longitude z\" ;\n" +
- " Temp:long_name = \"Temperatura\" ;\n" +
- " Temp:standard_name = \"sea_water_temperature\" ;\n" +
- " Temp:units = \"degree_celsius\" ;\n" +
- " double Sal(row) ;\n" +
- " Sal:coordinates = \"time latitude longitude z\" ;\n" +
- " Sal:long_name = \"Salinidad\" ;\n" +
- " Sal:standard_name = \"sea_water_salinity\" ;\n" +
- " Sal:units = \"PSU\" ;\n" +
- " double ProfDiseno(row) ;\n" +
- " ProfDiseno:long_name = \"Profundidad de diseno\" ;\n" +
- " ProfDiseno:units = \"m\" ;\n" +
- " double TiranteDiseno(row) ;\n" +
- " TiranteDiseno:long_name = \"Tirante diseno\" ;\n" +
- " TiranteDiseno:units = \"m\" ;\n" +
- " double TiranteEstimado(row) ;\n" +
- " TiranteEstimado:long_name = \"Tirante estimado\" ;\n" +
- " TiranteEstimado:units = \"m\" ;\n" +
- " double var_pres(row) ;\n" +
- " var_pres:long_name = \"Bandera presion\" ;\n" +
- " var_pres:units = \"N/A\" ;\n" +
- " double station(row) ;\n" + // was int!
- " station:cf_role = \"timeseries_id\" ;\n" +
- " station:long_name = \"CTZ-T500-MCT-NS5649-Z408-INS12-REC14\" ;\n" +
- " double time(row) ;\n" +
- " time:_CoordinateAxisType = \"Time\" ;\n" +
- " time:axis = \"T\" ;\n" +
- " time:calendar = \"julian\" ;\n" +
- " time:long_name = \"tiempo en dias Julianos\" ;\n" +
- " time:standard_name = \"time\" ;\n" +
- " time:time_origin = \"0000-01-01 00:00:00\" ;\n" +
- " time:units = \"days since 0000-01-01 00:00:00 \" ;\n" +
- " double latitude(row) ;\n" +
- " latitude:_CoordinateAxisType = \"Lat\" ;\n" +
- " latitude:axis = \"Y\" ;\n" +
- " latitude:long_name = \"Latitud\" ;\n" +
- " latitude:standard_name = \"latitude\" ;\n" +
- " latitude:units = \"degrees_north\" ;\n" +
- " double longitude(row) ;\n" +
- " longitude:_CoordinateAxisType = \"Lon\" ;\n" +
- " longitude:axis = \"X\" ;\n" +
- " longitude:long_name = \"Longitud\" ;\n" +
- " longitude:standard_name = \"longitude\" ;\n" +
- " longitude:units = \"degrees_east\" ;\n" +
- " double z(row) ;\n" +
- " z:_CoordinateAxisType = \"Height\" ;\n" +
- " z:axis = \"Z\" ;\n" +
- " z:long_name = \"profundidad\" ;\n" +
- " z:standard_name = \"depth\" ;\n" +
- " z:units = \"m\" ;\n" +
- "\n" +
- "// global attributes:\n" +
- " :_CoordSysBuilder = \"ucar.nc2.dataset.conv.CF1Convention\" ;\n" +
- " :Anclaje = \"CTZ-T500\" ;\n" +
- " :cdm_data_type = \"TimeSeries\" ;\n" +
- " :cdm_timeseries_variables = \"ProfDiseno, TiranteDiseno, TiranteEstimado, var_pres, station, latitude, longitude, z\" ;\n"
- +
- " :Conventions = \"CF-1.6\" ;\n" +
- " :date_created = \"06-Aug-2014 12:22:59\" ;\n" +
- " :Equipo = \"MCT\" ;\n" +
- " :featureType = \"timeSeries\" ;\n" +
- " :Final_NC_file = \"CTZ-T500-MCT-NS5649-Z408-INS12-REC14.nc\" ;\n" +
- " :NCO = \"\\\"4.5.2\\\"\" ;\n" +
- " :Numero_de_serie = \"5649\" ;\n" +
- " :Source_file = \"CTZ-T500-MCT-NS5649-Z408-INS12-REC14.mat\" ;\n" +
- " :standard_name_vocabulary = \"CF-1.6\" ;\n" +
- " :subsetVariables = \"ProfDiseno, TiranteDiseno, TiranteEstimado, var_pres, station, latitude, longitude, z\" ;\n"
- +
- " :title = \"CTZ-T500-MCT-NS5649-Z408-INS12-REC14\" ;\n" +
- "}\n" +
- "Cond,Pres,Temp,Sal,ProfDiseno,TiranteDiseno,TiranteEstimado,var_pres,station,time,latitude,longitude,z\n"
- +
- "3.88991,409.629,10.3397,35.310065426337346,408.0,500.0,498.0,1.0,0.0,733358.7847222222,18.843666666666667,-94.81761666666667,406.0\n"
- +
- "3.88691,409.12,10.3353,35.28414747593317,408.0,500.0,498.0,1.0,0.0,733358.786111111,18.843666666666667,-94.81761666666667,406.0\n"
- +
- "3.88678,408.803,10.3418,35.27667928948258,408.0,500.0,498.0,1.0,0.0,733358.7875,18.843666666666667,-94.81761666666667,406.0\n"
- +
- "3.88683,408.623,10.3453,35.273879094537904,408.0,500.0,498.0,1.0,0.0,733358.7888888889,18.843666666666667,-94.81761666666667,406.0\n"
- +
- "3.88808,408.517,10.3687,35.26394801644307,408.0,500.0,498.0,1.0,0.0,733358.7902777778,18.843666666666667,-94.81761666666667,406.0\n"
- +
- "...\n";
+ expected =
+ "{\n"
+ + "dimensions:\n"
+ + " row = 101 ;\n"
+ + "variables:\n"
+ + " double Cond(row) ;\n"
+ + " Cond:coordinates = \"time latitude longitude z\" ;\n"
+ + " Cond:long_name = \"Conductividad\" ;\n"
+ + " Cond:standard_name = \"sea_water_electrical_conductivity\" ;\n"
+ + " Cond:units = \"S/m\" ;\n"
+ + " double Pres(row) ;\n"
+ + " Pres:coordinates = \"time latitude longitude z\" ;\n"
+ + " Pres:long_name = \"Presion\" ;\n"
+ + " Pres:standard_name = \"sea_water_pressure\" ;\n"
+ + " Pres:units = \"dBar\" ;\n"
+ + " double Temp(row) ;\n"
+ + " Temp:coordinates = \"time latitude longitude z\" ;\n"
+ + " Temp:long_name = \"Temperatura\" ;\n"
+ + " Temp:standard_name = \"sea_water_temperature\" ;\n"
+ + " Temp:units = \"degree_celsius\" ;\n"
+ + " double Sal(row) ;\n"
+ + " Sal:coordinates = \"time latitude longitude z\" ;\n"
+ + " Sal:long_name = \"Salinidad\" ;\n"
+ + " Sal:standard_name = \"sea_water_salinity\" ;\n"
+ + " Sal:units = \"PSU\" ;\n"
+ + " double ProfDiseno(row) ;\n"
+ + " ProfDiseno:long_name = \"Profundidad de diseno\" ;\n"
+ + " ProfDiseno:units = \"m\" ;\n"
+ + " double TiranteDiseno(row) ;\n"
+ + " TiranteDiseno:long_name = \"Tirante diseno\" ;\n"
+ + " TiranteDiseno:units = \"m\" ;\n"
+ + " double TiranteEstimado(row) ;\n"
+ + " TiranteEstimado:long_name = \"Tirante estimado\" ;\n"
+ + " TiranteEstimado:units = \"m\" ;\n"
+ + " double var_pres(row) ;\n"
+ + " var_pres:long_name = \"Bandera presion\" ;\n"
+ + " var_pres:units = \"N/A\" ;\n"
+ + " double station(row) ;\n"
+ + // was int!
+ " station:cf_role = \"timeseries_id\" ;\n"
+ + " station:long_name = \"CTZ-T500-MCT-NS5649-Z408-INS12-REC14\" ;\n"
+ + " double time(row) ;\n"
+ + " time:_CoordinateAxisType = \"Time\" ;\n"
+ + " time:axis = \"T\" ;\n"
+ + " time:calendar = \"julian\" ;\n"
+ + " time:long_name = \"tiempo en dias Julianos\" ;\n"
+ + " time:standard_name = \"time\" ;\n"
+ + " time:time_origin = \"0000-01-01 00:00:00\" ;\n"
+ + " time:units = \"days since 0000-01-01 00:00:00 \" ;\n"
+ + " double latitude(row) ;\n"
+ + " latitude:_CoordinateAxisType = \"Lat\" ;\n"
+ + " latitude:axis = \"Y\" ;\n"
+ + " latitude:long_name = \"Latitud\" ;\n"
+ + " latitude:standard_name = \"latitude\" ;\n"
+ + " latitude:units = \"degrees_north\" ;\n"
+ + " double longitude(row) ;\n"
+ + " longitude:_CoordinateAxisType = \"Lon\" ;\n"
+ + " longitude:axis = \"X\" ;\n"
+ + " longitude:long_name = \"Longitud\" ;\n"
+ + " longitude:standard_name = \"longitude\" ;\n"
+ + " longitude:units = \"degrees_east\" ;\n"
+ + " double z(row) ;\n"
+ + " z:_CoordinateAxisType = \"Height\" ;\n"
+ + " z:axis = \"Z\" ;\n"
+ + " z:long_name = \"profundidad\" ;\n"
+ + " z:standard_name = \"depth\" ;\n"
+ + " z:units = \"m\" ;\n"
+ + "\n"
+ + "// global attributes:\n"
+ + " :_CoordSysBuilder = \"ucar.nc2.dataset.conv.CF1Convention\" ;\n"
+ + " :Anclaje = \"CTZ-T500\" ;\n"
+ + " :cdm_data_type = \"TimeSeries\" ;\n"
+ + " :cdm_timeseries_variables = \"ProfDiseno, TiranteDiseno, TiranteEstimado, var_pres, station, latitude, longitude, z\" ;\n"
+ + " :Conventions = \"CF-1.6\" ;\n"
+ + " :date_created = \"06-Aug-2014 12:22:59\" ;\n"
+ + " :Equipo = \"MCT\" ;\n"
+ + " :featureType = \"timeSeries\" ;\n"
+ + " :Final_NC_file = \"CTZ-T500-MCT-NS5649-Z408-INS12-REC14.nc\" ;\n"
+ + " :NCO = \"\\\"4.5.2\\\"\" ;\n"
+ + " :Numero_de_serie = \"5649\" ;\n"
+ + " :Source_file = \"CTZ-T500-MCT-NS5649-Z408-INS12-REC14.mat\" ;\n"
+ + " :standard_name_vocabulary = \"CF-1.6\" ;\n"
+ + " :subsetVariables = \"ProfDiseno, TiranteDiseno, TiranteEstimado, var_pres, station, latitude, longitude, z\" ;\n"
+ + " :title = \"CTZ-T500-MCT-NS5649-Z408-INS12-REC14\" ;\n"
+ + "}\n"
+ + "Cond,Pres,Temp,Sal,ProfDiseno,TiranteDiseno,TiranteEstimado,var_pres,station,time,latitude,longitude,z\n"
+ + "3.88991,409.629,10.3397,35.310065426337346,408.0,500.0,498.0,1.0,0.0,733358.7847222222,18.843666666666667,-94.81761666666667,406.0\n"
+ + "3.88691,409.12,10.3353,35.28414747593317,408.0,500.0,498.0,1.0,0.0,733358.786111111,18.843666666666667,-94.81761666666667,406.0\n"
+ + "3.88678,408.803,10.3418,35.27667928948258,408.0,500.0,498.0,1.0,0.0,733358.7875,18.843666666666667,-94.81761666666667,406.0\n"
+ + "3.88683,408.623,10.3453,35.273879094537904,408.0,500.0,498.0,1.0,0.0,733358.7888888889,18.843666666666667,-94.81761666666667,406.0\n"
+ + "3.88808,408.517,10.3687,35.26394801644307,408.0,500.0,498.0,1.0,0.0,733358.7902777778,18.843666666666667,-94.81761666666667,406.0\n"
+ + "...\n";
Test.ensureEqual(results, expected, "results=\n" + results);
// read the .ncml via table.readNcCF -- just station info
table = new Table();
- table.readNcCF(baseName + ".ncml",
- StringArray.fromCSV("station,latitude,longitude,z,ProfDiseno,TiranteDiseno,TiranteEstimado,var_pres"),
+ table.readNcCF(
+ baseName + ".ncml",
+ StringArray.fromCSV(
+ "station,latitude,longitude,z,ProfDiseno,TiranteDiseno,TiranteEstimado,var_pres"),
0, // standardizeWhat
- null, null, null);
+ null,
+ null,
+ null);
results = table.dataToString();
results = String2.replaceAll(results, '\t', ' ');
- expected = "station,latitude,longitude,z,ProfDiseno,TiranteDiseno,TiranteEstimado,var_pres\n" +
- "0.0,18.843666666666667,-94.81761666666667,406.0,408.0,500.0,498.0,1.0\n";
+ expected =
+ "station,latitude,longitude,z,ProfDiseno,TiranteDiseno,TiranteEstimado,var_pres\n"
+ + "0.0,18.843666666666667,-94.81761666666667,406.0,408.0,500.0,498.0,1.0\n";
Test.ensureEqual(results, expected, "results=\n" + results);
String2.log("\n*** EDDTableFromNcCFFiles.testNcml() finished.");
@@ -2376,7 +2562,9 @@ void testJP14323() throws Throwable {
// String2.log("\n****************** EDDTableFromNcCFFiles.testJP14323()
// *****************\n");
// testVerboseOn();
- String dir = Path.of(EDDTableFromNcCFFilesTests.class.getResource("/data/nccf/ncei/").toURI()).toString();
+ String dir =
+ Path.of(EDDTableFromNcCFFilesTests.class.getResource("/data/nccf/ncei/").toURI())
+ .toString();
String sampleName = "biology_JP14323.nc";
String results, expected;
Table table;
@@ -2385,231 +2573,234 @@ void testJP14323() throws Throwable {
String2.log("Here's the ncdump of " + dir + sampleName);
results = NcHelper.ncdump(dir + sampleName, "-h");
String2.log(results);
- expected = "netcdf biology_JP14323.nc {\n" +
- " dimensions:\n" +
- " casts = 52;\n" +
- " z_obs = 74;\n" +
- " Temperature_obs = 74;\n" +
- " strnlen = 170;\n" +
- " strnlensmall = 35;\n" +
- " biosets = 52;\n" +
- " variables:\n" +
- " char country(casts=52, strnlensmall=35);\n" +
- "\n" +
- " char WOD_cruise_identifier(casts=52, strnlensmall=35);\n" +
- " :comment = \"two byte country code + WOD cruise number (unique to country code)\";\n" +
- " :long_name = \"WOD_cruise_identifier\";\n" +
- "\n" +
- " int wod_unique_cast(casts=52);\n" +
- " :cf_role = \"profile_id\";\n" +
- "\n" +
- " float lat(casts=52);\n" +
- " :standard_name = \"latitude\";\n" +
- " :long_name = \"latitude\";\n" +
- " :units = \"degrees_north\";\n" +
- "\n" +
- " float lon(casts=52);\n" +
- " :standard_name = \"longitude\";\n" +
- " :long_name = \"longitude\";\n" +
- " :units = \"degrees_east\";\n" +
- "\n" +
- " double time(casts=52);\n" +
- " :standard_name = \"time\";\n" +
- " :long_name = \"time\";\n" +
- " :units = \"days since 1770-01-01 00:00:00\";\n" +
- "\n" +
- " int date(casts=52);\n" +
- " :long_name = \"date\";\n" +
- " :comment = \"YYYYMMDD\";\n" +
- "\n" +
- " float GMT_time(casts=52);\n" +
- " :long_name = \"GMT_time\";\n" +
- "\n" +
- " int Access_no(casts=52);\n" +
- " :long_name = \"NODC_accession_number\";\n" +
- " :units_wod = \"NODC_code\";\n" +
- " :comment = \"used to find original data at NODC\";\n" +
- "\n" +
- " char Platform(casts=52, strnlen=170);\n" +
- " :long_name = \"Platform_name\";\n" +
- " :comment = \"name of platform from which measurements were taken\";\n" +
- "\n" +
- " char Institute(casts=52, strnlen=170);\n" +
- " :long_name = \"Responsible_institute\";\n" +
- " :comment = \"name of institute which collected data\";\n" +
- "\n" +
- " char dataset(casts=52, strnlen=170);\n" +
- " :long_name = \"WOD_dataset\";\n" +
- "\n" +
- " float z(z_obs=74);\n" +
- " :standard_name = \"altitude\";\n" +
- " :long_name = \"depth_below_sea_level\";\n" +
- " :units = \"m\";\n" +
- " :positive = \"down\";\n" +
- "\n" +
- " short z_WODflag(z_obs=74);\n" +
- " :flag_definitions = \"WODfd\";\n" +
- "\n" +
- " short z_sigfig(z_obs=74);\n" +
- "\n" +
- " int z_row_size(casts=52);\n" +
- " :long_name = \"number of depth observations for this cast\";\n" +
- " :sample_dimension = \"z_obs\";\n" +
- "\n" +
- " float Temperature(Temperature_obs=74);\n" +
- " :long_name = \"Temperature\";\n" +
- " :standard_name = \"sea_water_temperature\";\n" +
- " :units = \"degree_C\";\n" +
- " :coordinates = \"time lat lon z\";\n" +
- " :grid_mapping = \"crs\";\n" +
- "\n" +
- " short Temperature_sigfigs(Temperature_obs=74);\n" +
- "\n" +
- " short Temperature_row_size(casts=52);\n" +
- " :long_name = \"number of Temperature observations for this cast\";\n" +
- " :sample_dimension = \"Temperature_obs\";\n" +
- "\n" +
- " short Temperature_WODflag(Temperature_obs=74);\n" +
- " :flag_definitions = \"WODf\";\n" +
- "\n" +
- " short Temperature_WODprofileflag(casts=52);\n" +
- " :flag_definitions = \"WODfp\";\n" +
- "\n" +
- " float Mesh_size(casts=52);\n" +
- " :long_name = \"Mesh_size\";\n" +
- " :units = \"microns\";\n" +
- "\n" +
- " char Type_tow(casts=52, strnlen=170);\n" +
- " :long_name = \"Type_of_tow\";\n" +
- " :units = \"WOD_code\";\n" +
- " :comment = \"0\";\n" +
- "\n" +
- " char Gear_code(casts=52, strnlen=170);\n" +
- " :long_name = \"WOD_code\";\n" +
- " :comment = \"Gear_code\";\n" +
- "\n" +
- " float net_mouth_area(casts=52);\n" +
- " :long_name = \"net_mouth_area\";\n" +
- " :units = \"m2\";\n" +
- " :comment = \"sampling input area (net mouth)\";\n" +
- "\n" +
- " float GMT_sample_start_time(casts=52);\n" +
- " :long_name = \"GMT_sample_start_time\";\n" +
- " :units = \"hour\";\n" +
- " :comment = \"Start time (GMT) of the sampling event\";\n" +
- "\n" +
- " int Biology_Accno(casts=52);\n" +
- " :long_name = \"Biology_Accn#\";\n" +
- " :units = \"NODC_code\";\n" +
- " :comment = \"Accession # for the biology component\";\n" +
- "\n" +
- "\n" +
- " Structure {\n" +
- " char taxa_name_bio(100);\n" +
- " float upper_z_bio;\n" +
- " float lower_z_bio;\n" +
- " int measure_abund_bio;\n" +
- " char measure_type_bio(15);\n" +
- " float measure_val_bio;\n" +
- " char measure_units_bio(10);\n" +
- " int measure_flag_bio;\n" +
- " float cbv_value_bio;\n" +
- " int cbv_flag_bio;\n" +
- " char cbv_units_bio(6);\n" +
- " float cbv_method_bio;\n" +
- " int pgc_code_bio;\n" +
- " int taxa_modifier_bio;\n" +
- " int taxa_sex_bio;\n" +
- " int taxa_stage_bio;\n" +
- " int taxa_troph_bio;\n" +
- " int taxa_realm_bio;\n" +
- " int taxa_feature_bio;\n" +
- " int taxa_method_bio;\n" +
- " int taxa_minsize_desc_bio;\n" +
- " float taxa_minsize_val_bio;\n" +
- " int taxa_maxsize_desc_bio;\n" +
- " float taxa_maxsize_val_bio;\n" +
- " float taxa_length_bio;\n" +
- " float taxa_width_bio;\n" +
- " float taxa_radius_bio;\n" +
- " float sample_volume_bio;\n" +
- " } plankton(biosets=52);\n" +
- "\n" +
- "\n" +
- " int plankton_row_size(casts=52);\n" +
- "\n" +
- " int crs;\n" +
- " :grid_mapping_name = \"latitude_longitude\";\n" +
- " :epsg_code = \"EPSG:4326\";\n" +
- " :longitude_of_prime_meridian = 0.0f; // float\n" +
- " :semi_major_axis = 6378137.0f; // float\n" +
- " :inverse_flattening = 298.25723f; // float\n" +
- "\n" +
- " short WODf;\n" +
- " :long_name = \"WOD_observation_flag\";\n" +
- " :flag_values = 0S, 1S, 2S, 3S, 4S, 5S, 6S, 7S, 8S, 9S; // short\n" +
- " :flag_meanings = \"accepted range_out inversion gradient anomaly gradient+inversion range+inversion range+gradient range+anomaly range+inversion+gradient\";\n"
- +
- "\n" +
- " short WODfp;\n" +
- " :long_name = \"WOD_profile_flag\";\n" +
- " :flag_values = 0S, 1S, 2S, 3S, 4S, 5S, 6S, 7S, 8S, 9S; // short\n" +
- " :flag_meanings = \"accepted annual_sd_out density_inversion cruise seasonal_sd_out monthly_sd_out annual+seasonal_sd_out anomaly_or_annual+monthly_sd_out seasonal+monthly_sd_out annual+seasonal+monthly_sd_out\";\n"
- +
- "\n" +
- " short WODfd;\n" +
- " :long_name = \"WOD_depth_level_\";\n" +
- " :flag_values = 0S, 1S, 2S; // short\n" +
- " :flag_meanings = \"accepted duplicate_or_inversion density_inversion\";\n" +
- "\n" +
- " // global attributes:\n" +
- " :institution = \"National Oceanographic Data Center(NODC), NOAA\";\n" +
- " :source = \"World Ocean Database\";\n" +
- " :references = \"World Ocean Database 2013. URL:https://data.nodc.noaa.gov/woa/WOD/DOC/wod_intro.pdf\";\n"
- +
- " :title = \"World Ocean Database - Multi-cast file\";\n" +
- " :summary = \"Data for multiple casts from the World Ocean Database\";\n" +
- " :id = \"biology_JP14323.nc\";\n" +
- " :naming_authority = \"gov.noaa.nodc\";\n" +
- " :geospatial_lat_min = 36.3f; // float\n" +
- " :geospatial_lat_max = 42.866665f; // float\n" +
- " :geospatial_lat_resolution = \"point\";\n" +
- " :geospatial_lon_min = 140.61667f; // float\n" +
- " :geospatial_lon_max = 147.0f; // float\n" +
- " :geospatial_lon_resolution = \"point\";\n" +
- " :time_coverage_start = \"1970-10-08\";\n" +
- " :time_coverage_end = \"1971-01-08\";\n" +
- " :geospatial_vertical_min = 0.0f; // float\n" +
- " :geospatial_vertical_max = 100.0f; // float\n" +
- " :geospatial_vertical_positive = \"down\";\n" +
- " :geospatial_vertical_units = \"meters\";\n" +
- " :creator_name = \"Ocean Climate Lab/NODC\";\n" +
- " :creator_email = \"OCLhelp@noaa.gov\";\n" +
- " :creator_url = \"https://www.nodc.noaa.gov\";\n" +
- " :project = \"World Ocean Database\";\n" +
- " :acknowledgements = \"\";\n" +
- " :processing_level = \"\";\n" +
- " :keywords = \"\";\n" +
- " :keywords_vocabulary = \"\";\n" +
- " :date_created = \"2016-05-20\";\n" +
- " :date_modified = \"2016-05-20\";\n" +
- " :publisher_name = \"US DOC; NESDIS; NATIONAL OCEANOGRAPHIC DATA CENTER - IN295\";\n" +
- " :publisher_url = \"https://www.nodc.noaa.gov\";\n" +
- " :publisher_email = \"NODC.Services@noaa.gov\";\n" +
- " :history = \"\";\n" +
- " :license = \"\";\n" +
- " :standard_name_vocabulary = \"CF-1.6\";\n" +
- " :featureType = \"Profile\";\n" +
- " :cdm_data_type = \"Profile\";\n" +
- " :Conventions = \"CF-1.6\";\n" +
- " data:\n" +
- "}\n";
+ expected =
+ "netcdf biology_JP14323.nc {\n"
+ + " dimensions:\n"
+ + " casts = 52;\n"
+ + " z_obs = 74;\n"
+ + " Temperature_obs = 74;\n"
+ + " strnlen = 170;\n"
+ + " strnlensmall = 35;\n"
+ + " biosets = 52;\n"
+ + " variables:\n"
+ + " char country(casts=52, strnlensmall=35);\n"
+ + "\n"
+ + " char WOD_cruise_identifier(casts=52, strnlensmall=35);\n"
+ + " :comment = \"two byte country code + WOD cruise number (unique to country code)\";\n"
+ + " :long_name = \"WOD_cruise_identifier\";\n"
+ + "\n"
+ + " int wod_unique_cast(casts=52);\n"
+ + " :cf_role = \"profile_id\";\n"
+ + "\n"
+ + " float lat(casts=52);\n"
+ + " :standard_name = \"latitude\";\n"
+ + " :long_name = \"latitude\";\n"
+ + " :units = \"degrees_north\";\n"
+ + "\n"
+ + " float lon(casts=52);\n"
+ + " :standard_name = \"longitude\";\n"
+ + " :long_name = \"longitude\";\n"
+ + " :units = \"degrees_east\";\n"
+ + "\n"
+ + " double time(casts=52);\n"
+ + " :standard_name = \"time\";\n"
+ + " :long_name = \"time\";\n"
+ + " :units = \"days since 1770-01-01 00:00:00\";\n"
+ + "\n"
+ + " int date(casts=52);\n"
+ + " :long_name = \"date\";\n"
+ + " :comment = \"YYYYMMDD\";\n"
+ + "\n"
+ + " float GMT_time(casts=52);\n"
+ + " :long_name = \"GMT_time\";\n"
+ + "\n"
+ + " int Access_no(casts=52);\n"
+ + " :long_name = \"NODC_accession_number\";\n"
+ + " :units_wod = \"NODC_code\";\n"
+ + " :comment = \"used to find original data at NODC\";\n"
+ + "\n"
+ + " char Platform(casts=52, strnlen=170);\n"
+ + " :long_name = \"Platform_name\";\n"
+ + " :comment = \"name of platform from which measurements were taken\";\n"
+ + "\n"
+ + " char Institute(casts=52, strnlen=170);\n"
+ + " :long_name = \"Responsible_institute\";\n"
+ + " :comment = \"name of institute which collected data\";\n"
+ + "\n"
+ + " char dataset(casts=52, strnlen=170);\n"
+ + " :long_name = \"WOD_dataset\";\n"
+ + "\n"
+ + " float z(z_obs=74);\n"
+ + " :standard_name = \"altitude\";\n"
+ + " :long_name = \"depth_below_sea_level\";\n"
+ + " :units = \"m\";\n"
+ + " :positive = \"down\";\n"
+ + "\n"
+ + " short z_WODflag(z_obs=74);\n"
+ + " :flag_definitions = \"WODfd\";\n"
+ + "\n"
+ + " short z_sigfig(z_obs=74);\n"
+ + "\n"
+ + " int z_row_size(casts=52);\n"
+ + " :long_name = \"number of depth observations for this cast\";\n"
+ + " :sample_dimension = \"z_obs\";\n"
+ + "\n"
+ + " float Temperature(Temperature_obs=74);\n"
+ + " :long_name = \"Temperature\";\n"
+ + " :standard_name = \"sea_water_temperature\";\n"
+ + " :units = \"degree_C\";\n"
+ + " :coordinates = \"time lat lon z\";\n"
+ + " :grid_mapping = \"crs\";\n"
+ + "\n"
+ + " short Temperature_sigfigs(Temperature_obs=74);\n"
+ + "\n"
+ + " short Temperature_row_size(casts=52);\n"
+ + " :long_name = \"number of Temperature observations for this cast\";\n"
+ + " :sample_dimension = \"Temperature_obs\";\n"
+ + "\n"
+ + " short Temperature_WODflag(Temperature_obs=74);\n"
+ + " :flag_definitions = \"WODf\";\n"
+ + "\n"
+ + " short Temperature_WODprofileflag(casts=52);\n"
+ + " :flag_definitions = \"WODfp\";\n"
+ + "\n"
+ + " float Mesh_size(casts=52);\n"
+ + " :long_name = \"Mesh_size\";\n"
+ + " :units = \"microns\";\n"
+ + "\n"
+ + " char Type_tow(casts=52, strnlen=170);\n"
+ + " :long_name = \"Type_of_tow\";\n"
+ + " :units = \"WOD_code\";\n"
+ + " :comment = \"0\";\n"
+ + "\n"
+ + " char Gear_code(casts=52, strnlen=170);\n"
+ + " :long_name = \"WOD_code\";\n"
+ + " :comment = \"Gear_code\";\n"
+ + "\n"
+ + " float net_mouth_area(casts=52);\n"
+ + " :long_name = \"net_mouth_area\";\n"
+ + " :units = \"m2\";\n"
+ + " :comment = \"sampling input area (net mouth)\";\n"
+ + "\n"
+ + " float GMT_sample_start_time(casts=52);\n"
+ + " :long_name = \"GMT_sample_start_time\";\n"
+ + " :units = \"hour\";\n"
+ + " :comment = \"Start time (GMT) of the sampling event\";\n"
+ + "\n"
+ + " int Biology_Accno(casts=52);\n"
+ + " :long_name = \"Biology_Accn#\";\n"
+ + " :units = \"NODC_code\";\n"
+ + " :comment = \"Accession # for the biology component\";\n"
+ + "\n"
+ + "\n"
+ + " Structure {\n"
+ + " char taxa_name_bio(100);\n"
+ + " float upper_z_bio;\n"
+ + " float lower_z_bio;\n"
+ + " int measure_abund_bio;\n"
+ + " char measure_type_bio(15);\n"
+ + " float measure_val_bio;\n"
+ + " char measure_units_bio(10);\n"
+ + " int measure_flag_bio;\n"
+ + " float cbv_value_bio;\n"
+ + " int cbv_flag_bio;\n"
+ + " char cbv_units_bio(6);\n"
+ + " float cbv_method_bio;\n"
+ + " int pgc_code_bio;\n"
+ + " int taxa_modifier_bio;\n"
+ + " int taxa_sex_bio;\n"
+ + " int taxa_stage_bio;\n"
+ + " int taxa_troph_bio;\n"
+ + " int taxa_realm_bio;\n"
+ + " int taxa_feature_bio;\n"
+ + " int taxa_method_bio;\n"
+ + " int taxa_minsize_desc_bio;\n"
+ + " float taxa_minsize_val_bio;\n"
+ + " int taxa_maxsize_desc_bio;\n"
+ + " float taxa_maxsize_val_bio;\n"
+ + " float taxa_length_bio;\n"
+ + " float taxa_width_bio;\n"
+ + " float taxa_radius_bio;\n"
+ + " float sample_volume_bio;\n"
+ + " } plankton(biosets=52);\n"
+ + "\n"
+ + "\n"
+ + " int plankton_row_size(casts=52);\n"
+ + "\n"
+ + " int crs;\n"
+ + " :grid_mapping_name = \"latitude_longitude\";\n"
+ + " :epsg_code = \"EPSG:4326\";\n"
+ + " :longitude_of_prime_meridian = 0.0f; // float\n"
+ + " :semi_major_axis = 6378137.0f; // float\n"
+ + " :inverse_flattening = 298.25723f; // float\n"
+ + "\n"
+ + " short WODf;\n"
+ + " :long_name = \"WOD_observation_flag\";\n"
+ + " :flag_values = 0S, 1S, 2S, 3S, 4S, 5S, 6S, 7S, 8S, 9S; // short\n"
+ + " :flag_meanings = \"accepted range_out inversion gradient anomaly gradient+inversion range+inversion range+gradient range+anomaly range+inversion+gradient\";\n"
+ + "\n"
+ + " short WODfp;\n"
+ + " :long_name = \"WOD_profile_flag\";\n"
+ + " :flag_values = 0S, 1S, 2S, 3S, 4S, 5S, 6S, 7S, 8S, 9S; // short\n"
+ + " :flag_meanings = \"accepted annual_sd_out density_inversion cruise seasonal_sd_out monthly_sd_out annual+seasonal_sd_out anomaly_or_annual+monthly_sd_out seasonal+monthly_sd_out annual+seasonal+monthly_sd_out\";\n"
+ + "\n"
+ + " short WODfd;\n"
+ + " :long_name = \"WOD_depth_level_\";\n"
+ + " :flag_values = 0S, 1S, 2S; // short\n"
+ + " :flag_meanings = \"accepted duplicate_or_inversion density_inversion\";\n"
+ + "\n"
+ + " // global attributes:\n"
+ + " :institution = \"National Oceanographic Data Center(NODC), NOAA\";\n"
+ + " :source = \"World Ocean Database\";\n"
+ + " :references = \"World Ocean Database 2013. URL:https://data.nodc.noaa.gov/woa/WOD/DOC/wod_intro.pdf\";\n"
+ + " :title = \"World Ocean Database - Multi-cast file\";\n"
+ + " :summary = \"Data for multiple casts from the World Ocean Database\";\n"
+ + " :id = \"biology_JP14323.nc\";\n"
+ + " :naming_authority = \"gov.noaa.nodc\";\n"
+ + " :geospatial_lat_min = 36.3f; // float\n"
+ + " :geospatial_lat_max = 42.866665f; // float\n"
+ + " :geospatial_lat_resolution = \"point\";\n"
+ + " :geospatial_lon_min = 140.61667f; // float\n"
+ + " :geospatial_lon_max = 147.0f; // float\n"
+ + " :geospatial_lon_resolution = \"point\";\n"
+ + " :time_coverage_start = \"1970-10-08\";\n"
+ + " :time_coverage_end = \"1971-01-08\";\n"
+ + " :geospatial_vertical_min = 0.0f; // float\n"
+ + " :geospatial_vertical_max = 100.0f; // float\n"
+ + " :geospatial_vertical_positive = \"down\";\n"
+ + " :geospatial_vertical_units = \"meters\";\n"
+ + " :creator_name = \"Ocean Climate Lab/NODC\";\n"
+ + " :creator_email = \"OCLhelp@noaa.gov\";\n"
+ + " :creator_url = \"https://www.nodc.noaa.gov\";\n"
+ + " :project = \"World Ocean Database\";\n"
+ + " :acknowledgements = \"\";\n"
+ + " :processing_level = \"\";\n"
+ + " :keywords = \"\";\n"
+ + " :keywords_vocabulary = \"\";\n"
+ + " :date_created = \"2016-05-20\";\n"
+ + " :date_modified = \"2016-05-20\";\n"
+ + " :publisher_name = \"US DOC; NESDIS; NATIONAL OCEANOGRAPHIC DATA CENTER - IN295\";\n"
+ + " :publisher_url = \"https://www.nodc.noaa.gov\";\n"
+ + " :publisher_email = \"NODC.Services@noaa.gov\";\n"
+ + " :history = \"\";\n"
+ + " :license = \"\";\n"
+ + " :standard_name_vocabulary = \"CF-1.6\";\n"
+ + " :featureType = \"Profile\";\n"
+ + " :cdm_data_type = \"Profile\";\n"
+ + " :Conventions = \"CF-1.6\";\n"
+ + " data:\n"
+ + "}\n";
Test.ensureEqual(results, expected, "results=\n" + results);
// read the table
table = new Table();
- table.readNcCF(dir + sampleName, null, 0, // standardizeWhat
- null, null, null);
+ table.readNcCF(
+ dir + sampleName,
+ null,
+ 0, // standardizeWhat
+ null,
+ null,
+ null);
results = table.dataToString(5);
expected = "zztop\n";
@@ -2721,7 +2912,7 @@ void testJP14323() throws Throwable {
* +
* "...\n";
* Test.ensureEqual(results, expected, "results=\n" + results);
- *
+ *
* //read the .ncml via table.readNcCF -- just station info
* table = new Table();
* table.readNcCF(baseName + ".ncml",
diff --git a/src/test/java/jetty/JettyTests.java b/src/test/java/jetty/JettyTests.java
index c3eafd18..7ef87989 100644
--- a/src/test/java/jetty/JettyTests.java
+++ b/src/test/java/jetty/JettyTests.java
@@ -236,7 +236,7 @@ void testMetadataIso19115() throws Exception {
" NUMBER\n" + //
" \n" + //
" \n" + //
- " 2.176416E8\n" + //
+ " VALUE\n" + //
" \n" + //
" \n" + //
" \n" + //
@@ -1232,6 +1232,8 @@ void testMetadataIso19115() throws Exception {
results = results.replaceAll("....-..-..", "YYYY-MM-DD");
results = results.replaceAll("[0-9]+.[0-9]+",
"VALUE");
+ results = results.replaceAll(".*",
+ "VALUE");
results = results.replaceAll("....-..-..T..:00:00Z",
"YYYY-MM-DDThh:00:00Z");
results = results.replaceAll("[0-9]+", "NUMBER");
@@ -2889,7 +2891,7 @@ void testErddap1FilesWAF2() throws Throwable {
// String2.log("\n*** FileVisitorDNLS.testErddapFilesWAF2()\n");
// *** test localhost
- String2.log("\nThis test requires erdMH1chla1day in localhost erddap.");
+ // String2.log("\nThis test requires erdMH1chla1day in localhost erddap.");
String url = "http://localhost:" + PORT + "/erddap/files/erdMH1chla1day/";
String tFileNameRegex = "A20.*\\.nc(|\\.gz)";
boolean tRecursive = true;
@@ -4704,7 +4706,7 @@ void testJsonld() throws Throwable {
" \"name\": \"Chlorophyll-a, Aqua MODIS, NPP, L3SMI, Global, 4km, Science Quality, 2003-present (1 Day Composite)\",\n" + //
" \"headline\": \"erdMH1chla1day\",\n" + //
" \"description\": \"This dataset has Level 3, Standard Mapped Image, 4km, chlorophyll-a concentration data from NASA's Aqua Spacecraft. Measurements are gathered by the Moderate Resolution Imaging Spectroradiometer (MODIS) carried aboard the spacecraft. This is Science Quality data. This is the August 2015 version of this dataset.\\n" + //
- "_lastModified=2016-10-18T06:45:00.000Z\\n" + //
+ "_lastModified=YYYY-MM-DDThh:mm:ss.000Z\\n" + //
"cdm_data_type=Grid\\n" + //
"Conventions=CF-1.6, COARDS, ACDD-1.3\\n" + //
"Easternmost_Easting=179.9792\\n" + //
@@ -4731,7 +4733,7 @@ void testJsonld() throws Throwable {
"Northernmost_Northing=89.97916\\n" + //
"platform=Aqua\\n" + //
"processing_level=L3 Mapped\\n" + //
- "processing_version=2014.0.1QL\\n" + //
+ "processing_version=VERSION\\n" + //
"product_name=A2016291.L3m_DAY_CHL_chlor_a_4km.nc\\n" + //
"project=Ocean Biology Processing Group (NASA/GSFC/OBPG)\\n" + //
"sourceUrl=(local files)\\n" + //
@@ -5050,6 +5052,8 @@ void testJsonld() throws Throwable {
results = results.replaceAll("dateCreated\\\": \\\"....-..-..T..:..:..Z",
"dateCreated\\\": \\\"yyyy-mm-ddThh:mm:ssZ");
results = results.replaceAll("100_multi_........1200", "100_multi_yyyymmdd1200");
+ results = results.replaceAll("_lastModified=....-..-..T..:..:...000Z", "_lastModified=YYYY-MM-DDThh:mm:ss.000Z");
+ results = results.replaceAll("processing_version=[0-9]+.[0-9].?.?.?.?\\\\n", "processing_version=VERSION\\\\n");
po = Math.max(0, results.indexOf(expected.substring(0, 80)));
Test.ensureEqual(results.substring(po, Math.min(results.length(), po + expected.length())),
expected, "results=\n" + results);
diff --git a/src/test/java/testDataset/EDDTestDataset.java b/src/test/java/testDataset/EDDTestDataset.java
index 5ba55957..331440e5 100644
--- a/src/test/java/testDataset/EDDTestDataset.java
+++ b/src/test/java/testDataset/EDDTestDataset.java
@@ -236,6 +236,7 @@ public static void generateDatasetsXml() throws URISyntaxException, FileNotFound
// get the data
datasetsXml.append(xmlFragment_testGridWav());
// datasetsXml.append(xmlFragment_TS_SLEV_TAD()); // dataset not available
+ datasetsXml.append(xmlFragment_TS_ATMP_AAD());
datasetsXml.append(
"\n" + //
@@ -7747,7 +7748,7 @@ private static String xmlFragment_testPrecision() throws URISyntaxException {
" false\n" + //
" .*\n" + //
" last\n" + //
- " true\n" + //
+ " false\n" + //
" time\n" + //
" time,temperature&time>=max(time)-3days&.draw=lines\n"
+ //
@@ -38903,7 +38904,7 @@ private static String xmlFragment_testNcCF7SampleDimensions() throws URISyntaxEx
" \n" + //
" \n" + //
" \n" + //
- " true\n" + //
+ " false\n" + //
" time\n" + //
" false\n" + //
" \n" + //
@@ -39141,7 +39142,7 @@ private static String xmlFragment_ArgoFloats() throws URISyntaxException {
" \n" + //
" false\n" + //
" \n" + //
- " true\n" + //
+ " false\n" + //
" \n" +
+ " TimeSeries\n" + //
+ " PLATFORMCODE,SOURCE,latitude,longitude\n" + //
+
+ " (local files)\n" +
+ " http://www.emodnet-physics.eu\n" + //
+ " AAD\n" + //
+ " AAD - Collection of atmospheric pressure at altitude (ATMP) TimeSeries - IN SITU MultiPointTimeSeriesObservation\n" + //
+ " AAD - Collection of atmospheric pressure at altitude (ATMP) TimeSeries - IN SITU MultiPointTimeSeriesObservation\n" + //
+ "\t \n" + //
+ "\t\n" + //
+ "\t\tglobal:platformCode\n" + //
+ "\t\tPLATFORMCODE\n" + //
+ "\t\tString\n" + //
+ "\t\t\n" + //
+ "\t\t\tEMODnet Platform Code\n" + //
+ "\t\t\ttimeseries_id\n" + //
+ " Unknown\n" +
+ "\t\t\n" + //
+ "\t \n" + //
+ "\t\n" + //
+ "\t\t=\"aad\"\n" + //
+ "\t\tSOURCE\n" + //
+ "\t\tString\n" + //
+ "\t\t\n" + //
+ "\t\t\tsource\n" + //
+ " Unknown\n" +
+ "\t\t\n" + //
+ "\t \n" + //
+ "\t\n" + //
+ "\t\tvariable:ATMP:sensor\n" + //
+ "\t\tSENSOR\n" + //
+ "\t\tString\n" + //
+ "\t\t\n" + //
+ "\t\t\tPlatform Sensor\n" +
+ " Unknown\n" +
+ "\t\t\n" + //
+ "\t \n" + //
+ "\t\n" + //
+ "\t\tTIME\n" + //
+ "\t\ttime\n" + //
+ "\t\tdouble\n" + //
+ "\t\t\n" + //
+ "\t\t\tnull\n" + //
+ " Unknown\n" +
+ "\t\t\n" + //
+ "\t\n" + //
+ "\t\n" + //
+ "\t\tTIME_QC\n" + //
+ "\t\tTIME_QC\n" + //
+ "\t\tshort\n" + //
+ "\t\t\n" + //
+ "\t\t\tnull\n" + //
+ " Unknown\n" +
+ "\t\t\n" + //
+ "\t\n" + //
+ "\t\n" + //
+ "\t\tDEPTH\n" + //
+ "\t\tdepth\n" + //
+ "\t\tdouble\n" + //
+ "\t\t\n" + //
+ "\t\t\tnull\n" + //
+ " Unknown\n" +
+ "\t\t\n" + //
+ "\t\n" + //
+ "\t\n" + //
+ "\t\tDEPTH_QC\n" + //
+ "\t\tDEPTH_QC\n" + //
+ "\t\tshort\n" + //
+ "\t\t\n" + //
+ "\t\t\tnull\n" + //
+ "\t\t\tdepth\n" + //
+ " Unknown\n" +
+ "\t\t\n" + //
+ "\t\n" + //
+ "\t\n" + //
+ "\t\tLATITUDE\n" + //
+ "\t\tlatitude\n" + //
+ "\t\tdouble\n" + //
+ "\t\t\n" + //
+ "\t\t\tnull\n" + //
+ "\t\t\tnull\n" + //
+ "\t\t\tgeographical coordinates, WGS84 projection\n" + //
+ " Unknown\n" +
+ "\t\t\n" + //
+ "\t \n" + //
+ "\t\n" + //
+ "\t\tLONGITUDE\n" + //
+ "\t\tlongitude\n" + //
+ "\t\tdouble\n" + //
+ "\t\t\n" + //
+ "\t\t\tnull\n" + //
+ " Unknown\n" +
+ "\t\t\tnull\n" + //
+ "\t\t\tgeographical coordinates, WGS84 projection\n" + //
+ "\t\t\n" + //
+ "\t\n" + //
+ "\t\n" + //
+ "\t\tPOSITION_QC\n" + //
+ "\t\tPOSITION_QC\n" + //
+ "\t\tshort\n" + //
+ "\t\t\n" + //
+ "\t\t\tnull\n" + //
+ " Unknown\n" +
+ "\t\t\n" + //
+ "\t\n" + //
+ "\t\n" + //
+ "\t\tATMP\n" + //
+ "\t\tATMP\n" + //
+ "\t\tdouble\n" + //
+ "\t\t\n" + //
+ "\t\t\tnull\n" + //
+ " Unknown\n" +
+ "\t\t\n" + //
+ "\t\n" + //
+ "\t\n" + //
+ "\t\tATMP_QC\n" + //
+ "\t\tATMP_QC\n" + //
+ "\t\tshort\n" + //
+ "\t\t\n" + //
+ "\t\t\tnull\n" + //
+ " Unknown\n" +
+ "\t\t\n" + //
+ "\t\n" + //
+ "\t\n" + //
+ "\t\tATMP_DM\n" + //
+ "\t\tATMP_DM\n" + //
+ "\t\tchar\n" + //
+ "\t\t\n" + //
+ "\t\t\tnull\n" + //
+ " Unknown\n" +
+ "\t\t\n" + //
+ "\t\n" + //
+ "\t\n" + //
+ "\t\t=var p=row.columnString(\"global:platformCode\");var s=\"aad\";var x=\"https://data-erddap.emodnet-physics.eu/erddap/tabledap/EP_PLATFORMS_METADATA.htmlTable?&PLATFORMCODE=%22\"+p+\"%22&integrator_id=%22\"+s+\"%22&distinct()\";return x\n" + //
+ "\t\turl_metadata\n" + //
+ "\t\tString\n" + //
+ "\t\t\n" + //
+ "\t\t\tMetadata Link\n" + //
+ " Unknown\n" +
+ "\t\t\n" + //
+ "\t\n" + //
+ "\t\n" + //
+ "\t\t=25\n" + //
+ "\t\tqc_entity\n" + //
+ "\t\tint\n" + //
+ "\t\t\n" + //
+ " Unknown\n" +
+ "\t\t\t\"https://er2webapps.emodnet-physics.eu/erddap/tabledap/qc_reference_table.htmlTable\n" + //
+ "\t\t\n" + //
+ "\t\n" + //
+ "
\n";
+ }
+
public static EDD gettestTreatDimensionsAs() throws Throwable {
return EDD.oneFromXmlFragment(null,
xmlFragment_testTreatDimensionsAs());
@@ -40931,7 +41105,7 @@ private static String xmlFragment_testLong() throws URISyntaxException {
" \n" + //
" \n" + //
" \n" + //
- " true\n" + //
+ " false\n" + //
" \n" + //
" false\n" + //
" \n" + //
" false\n" + //
" \n" + //
- " true\n" + //
+ " false\n" + //
" \n" + //
" false\n" + //
" \n" + //
- " true\n" + //
+ " false\n" + //
"