From cf41ac007ae569d573c8a1799c32fb2c6c63837f Mon Sep 17 00:00:00 2001 From: Bob Simons Date: Thu, 18 May 2017 15:11:22 -0700 Subject: [PATCH] v1.76 --- WEB-INF/ArchiveADataset.bat | 2 +- WEB-INF/ArchiveADataset.sh | 2 +- WEB-INF/DasDds.bat | 2 +- WEB-INF/DasDds.sh | 2 +- WEB-INF/FileVisitorDNLS.sh | 2 +- WEB-INF/GenerateDatasetsXml.bat | 2 +- WEB-INF/GenerateDatasetsXml.sh | 2 +- .../classes/com/cohort/array/Attributes.java | 103 +- .../classes/com/cohort/array/ByteArray.java | 23 +- .../classes/com/cohort/array/CharArray.java | 378 +- .../classes/com/cohort/array/DoubleArray.java | 17 +- .../classes/com/cohort/array/FloatArray.java | 16 +- .../classes/com/cohort/array/IntArray.java | 15 +- .../classes/com/cohort/array/LongArray.java | 35 +- .../com/cohort/array/PrimitiveArray.java | 561 ++- .../classes/com/cohort/array/ShortArray.java | 41 +- .../classes/com/cohort/array/StringArray.java | 349 +- WEB-INF/classes/com/cohort/ema/EmaClass.java | 4 +- .../classes/com/cohort/util/Calendar2.java | 328 +- WEB-INF/classes/com/cohort/util/Math2.java | 24 +- WEB-INF/classes/com/cohort/util/String2.java | 538 +- .../com/cohort/util/String2LogFactory.java | 2 +- WEB-INF/classes/com/cohort/util/Test.java | 7 + WEB-INF/classes/com/cohort/util/TestUtil.java | 417 +- WEB-INF/classes/com/cohort/util/XML.java | 12 +- WEB-INF/classes/dods/dap/DFloat64.java | 2 + WEB-INF/classes/dods/dap/DInt16.java | 1 + WEB-INF/classes/dods/dap/DString.java | 11 +- .../classes/dods/dap/parser/DASParser.java | 6 +- .../gov/noaa/pfel/coastwatch/Browser.java | 8 +- .../gov/noaa/pfel/coastwatch/HelloWorld.java | 4 +- .../gov/noaa/pfel/coastwatch/Projects.java | 418 +- .../gov/noaa/pfel/coastwatch/TestAll.java | 250 +- .../gov/noaa/pfel/coastwatch/TimePeriods.java | 2 +- .../pfel/coastwatch/TrajectoryScreen.java | 4 +- .../pfel/coastwatch/griddata/DataHelper.java | 9 +- .../coastwatch/griddata/FileNameUtility.java | 2 +- .../griddata/GenerateThreddsXml.java | 18 +- .../noaa/pfel/coastwatch/griddata/Grid.java | 36 +- .../pfel/coastwatch/griddata/GridDataSet.java | 2 +- .../griddata/GridDataSetAnomaly.java | 2 +- .../griddata/GridDataSetCWLocal.java | 9 +- .../GridDataSetCWLocalClimatology.java | 9 +- .../griddata/GridDataSetOpendap.java | 15 +- .../griddata/GridDataSetThredds.java | 37 +- .../pfel/coastwatch/griddata/NcHelper.java | 172 +- ...Nux10S1day_20050712_x-135_X-105_y22_Y50.nc | Bin 62168 -> 62168 bytes .../pfel/coastwatch/griddata/Opendap.java | 8 +- .../pfel/coastwatch/griddata/OpendapDump.java | 4 +- .../coastwatch/griddata/OpendapHelper.java | 125 +- .../pfel/coastwatch/griddata/SaveOpendap.java | 13 +- .../pfel/coastwatch/griddata/TwoGrids.java | 20 +- .../pfel/coastwatch/netcheck/HttpTest.java | 2 +- .../pfel/coastwatch/netcheck/NetCheck.java | 11 +- .../pfel/coastwatch/netcheck/OpendapTest.java | 10 +- .../noaa/pfel/coastwatch/netcheck/Touch.java | 4 +- .../pointdata/CacheOpendapStation.java | 21 +- .../coastwatch/pointdata/DigirHelper.java | 16 +- .../coastwatch/pointdata/NdbcMetStation.java | 121 +- .../PointDataSetFromStationVariables.java | 23 +- .../pointdata/StationVariableNc4D.java | 9 +- .../noaa/pfel/coastwatch/pointdata/Table.java | 4427 +++++++++++------ .../pointdata/TableDataSet4DNc.java | 8 +- .../pfel/coastwatch/pointdata/TwoTable.java | 8 +- .../pfel/coastwatch/sgt/CompoundColorMap.java | 2 +- .../pfel/coastwatch/util/FileVisitorDNLS.java | 471 +- .../gov/noaa/pfel/coastwatch/util/SSR.java | 149 +- .../pfel/coastwatch/util/SimpleXMLReader.java | 21 +- .../noaa/pfel/coastwatch/util/TestSSR.java | 2 +- .../gov/noaa/pfel/erddap/ArchiveADataset.java | 1121 ++++- .../classes/gov/noaa/pfel/erddap/DasDds.java | 14 +- .../classes/gov/noaa/pfel/erddap/Erddap.java | 476 +- .../noaa/pfel/erddap/GenerateDatasetsXml.java | 69 +- .../gov/noaa/pfel/erddap/dataset/EDD.java | 1008 +++- .../gov/noaa/pfel/erddap/dataset/EDDGrid.java | 612 ++- .../EDDGridAggregateExistingDimension.java | 23 +- .../noaa/pfel/erddap/dataset/EDDGridCopy.java | 12 +- .../pfel/erddap/dataset/EDDGridFromDap.java | 765 ++- .../erddap/dataset/EDDGridFromEDDTable.java | 13 +- .../erddap/dataset/EDDGridFromErddap.java | 53 +- .../pfel/erddap/dataset/EDDGridFromFiles.java | 144 +- .../erddap/dataset/EDDGridFromMatFiles.java | 11 +- .../dataset/EDDGridFromMergeIRFiles.java | 29 +- .../erddap/dataset/EDDGridFromNcFiles.java | 1122 ++++- .../dataset/EDDGridFromNcFilesUnpacked.java | 36 +- .../pfel/erddap/dataset/EDDGridFromNcLow.java | 28 +- .../pfel/erddap/dataset/EDDGridLonPM180.java | 169 +- .../noaa/pfel/erddap/dataset/EDDTable.java | 1117 +++-- .../pfel/erddap/dataset/EDDTableCopy.java | 15 +- .../pfel/erddap/dataset/EDDTableCopyPost.java | 14 +- .../dataset/EDDTableFromAllDatasets.java | 12 +- .../dataset/EDDTableFromAsciiFiles.java | 107 +- .../dataset/EDDTableFromAsciiService.java | 8 +- .../dataset/EDDTableFromAsciiServiceNOS.java | 276 +- .../dataset/EDDTableFromAwsXmlFiles.java | 11 +- .../pfel/erddap/dataset/EDDTableFromBMDE.java | 6 +- .../erddap/dataset/EDDTableFromCassandra.java | 177 +- .../EDDTableFromColumnarAsciiFiles.java | 110 +- .../dataset/EDDTableFromDapSequence.java | 227 +- .../erddap/dataset/EDDTableFromDatabase.java | 22 +- .../erddap/dataset/EDDTableFromEDDGrid.java | 29 +- .../erddap/dataset/EDDTableFromErddap.java | 1472 ++---- .../erddap/dataset/EDDTableFromFileNames.java | 8 +- .../erddap/dataset/EDDTableFromFiles.java | 361 +- .../erddap/dataset/EDDTableFromHttpGet.java | 29 +- .../dataset/EDDTableFromHyraxFiles.java | 60 +- .../pfel/erddap/dataset/EDDTableFromMWFS.java | 2 +- .../dataset/EDDTableFromMultidimNcFiles.java | 1148 ++++- .../pfel/erddap/dataset/EDDTableFromNOS.java | 40 +- .../erddap/dataset/EDDTableFromNWISDV.java | 48 +- .../erddap/dataset/EDDTableFromNcCFFiles.java | 77 +- .../erddap/dataset/EDDTableFromNcFiles.java | 1566 ++++-- .../dataset/EDDTableFromNccsvFiles.java | 2244 +++++++++ .../pfel/erddap/dataset/EDDTableFromOBIS.java | 13 +- .../dataset/EDDTableFromPostDatabase.java | 6 +- .../dataset/EDDTableFromPostNcFiles.java | 8 +- .../pfel/erddap/dataset/EDDTableFromSOS.java | 71 +- .../dataset/EDDTableFromThreddsFiles.java | 93 +- .../erddap/dataset/EDDTableFromWFSFiles.java | 17 +- .../noaa/pfel/erddap/dataset/FromErddap.java | 7 +- .../pfel/erddap/dataset/GridDataAccessor.java | 33 +- .../dataset/OutputStreamFromHttpResponse.java | 6 +- .../erddap/dataset/OutputStreamSource.java | 2 +- .../dataset/OutputStreamSourceSimple.java | 2 +- .../erddap/dataset/TableWriterDistinct.java | 2 +- .../erddap/dataset/TableWriterDodsAscii.java | 28 +- .../erddap/dataset/TableWriterEsriCsv.java | 41 +- .../erddap/dataset/TableWriterGeoJson.java | 10 +- .../erddap/dataset/TableWriterHtmlTable.java | 39 +- .../pfel/erddap/dataset/TableWriterJson.java | 30 +- .../pfel/erddap/dataset/TableWriterJsonl.java | 220 + .../pfel/erddap/dataset/TableWriterNccsv.java | 212 + .../erddap/dataset/TableWriterOrderBy.java | 2 +- .../dataset/TableWriterOrderByClosest.java | 150 + .../dataset/TableWriterOrderByLimit.java | 150 + .../erddap/dataset/TableWriterOrderByMax.java | 2 +- .../erddap/dataset/TableWriterOrderByMin.java | 2 +- .../dataset/TableWriterOrderByMinMax.java | 2 +- .../dataset/TableWriterSeparatedValue.java | 37 +- .../noaa/pfel/erddap/util/CfToFromGcmd.java | 6 +- .../gov/noaa/pfel/erddap/util/EDStatic.java | 123 +- .../gov/noaa/pfel/erddap/util/EDUnits.java | 12 +- .../gov/noaa/pfel/erddap/util/FishBase.java | 8 +- .../pfel/erddap/util/PersistentTable.java | 2 +- .../gov/noaa/pfel/erddap/util/Projects2.java | 12 +- .../gov/noaa/pfel/erddap/util/messages.xml | 218 +- .../gov/noaa/pfel/erddap/variable/EDV.java | 27 +- .../pfel/erddap/variable/EDVTimeStamp.java | 41 +- .../erddap/variable/EDVTimeStampGridAxis.java | 15 +- download/AccessToPrivateDatasets.html | 206 + download/EDDTableFromEML.html | 14 +- download/NCCSV.html | 857 ++++ download/changes.html | 322 +- download/grids.html | 101 +- download/sampleExcel_1.png | Bin 0 -> 81801 bytes download/sampleExcel_2.png | Bin 0 -> 20489 bytes download/setup.html | 565 ++- download/setupDatasetsXml.html | 1725 +++++-- images/favicon.ico | Bin 894 -> 4286 bytes 159 files changed, 21879 insertions(+), 7827 deletions(-) create mode 100644 WEB-INF/classes/gov/noaa/pfel/erddap/dataset/EDDTableFromNccsvFiles.java create mode 100644 WEB-INF/classes/gov/noaa/pfel/erddap/dataset/TableWriterJsonl.java create mode 100644 WEB-INF/classes/gov/noaa/pfel/erddap/dataset/TableWriterNccsv.java create mode 100644 WEB-INF/classes/gov/noaa/pfel/erddap/dataset/TableWriterOrderByClosest.java create mode 100644 WEB-INF/classes/gov/noaa/pfel/erddap/dataset/TableWriterOrderByLimit.java create mode 100644 download/AccessToPrivateDatasets.html create mode 100644 download/NCCSV.html create mode 100644 download/sampleExcel_1.png create mode 100644 download/sampleExcel_2.png diff --git a/WEB-INF/ArchiveADataset.bat b/WEB-INF/ArchiveADataset.bat index 3db17c9e3..fd730c0b5 100644 --- a/WEB-INF/ArchiveADataset.bat +++ b/WEB-INF/ArchiveADataset.bat @@ -1,4 +1,4 @@ rem This is the Windows batch file to run ArchiveADataset. rem See http://coastwatch.pfeg.noaa.gov/erddap/download/setupDatasetsXml.html#Tools -java -cp ./classes;../../../lib/servlet-api.jar;lib/activation.jar;lib/axis.jar;lib/cassandra-driver-core.jar;lib/netty-all.jar;lib/guava.jar;lib/metrics-core.jar;lib/lz4.jar;lib/snappy-java.jar;lib/commons-compress.jar;lib/commons-discovery.jar;lib/itext-1.3.1.jar;lib/jaxrpc.jar;lib/joda-time.jar;lib/lucene-core.jar;lib/mail.jar;lib/netcdfAll-latest.jar;lib/slf4j.jar;lib/postgresql.jdbc.jar;lib/saaj.jar;lib/wsdl4j.jar;lib/aws-java-sdk.jar;lib/commons-codec.jar;lib/commons-logging.jar;lib/fluent-hc.jar;lib/httpclient.jar;lib/httpclient-cache.jar;lib/httpcore.jar;lib/httpmime.jar;lib/jna.jar;lib/jna-platform.jar;lib/jackson-annotations.jar;lib/jackson-core.jar;lib/jackson-databind.jar -Xms1500M -Xmx1500M gov.noaa.pfel.erddap.ArchiveADataset %* +java -cp ./classes;../../../lib/servlet-api.jar;lib/activation.jar;lib/axis.jar;lib/cassandra-driver-core.jar;lib/netty-all.jar;lib/guava.jar;lib/metrics-core.jar;lib/lz4.jar;lib/snappy-java.jar;lib/commons-compress.jar;lib/commons-discovery.jar;lib/itext-1.3.1.jar;lib/jaxrpc.jar;lib/lucene-core.jar;lib/mail.jar;lib/netcdfAll-latest.jar;lib/slf4j.jar;lib/postgresql.jdbc.jar;lib/saaj.jar;lib/wsdl4j.jar;lib/aws-java-sdk.jar;lib/commons-codec.jar;lib/commons-logging.jar;lib/fluent-hc.jar;lib/httpclient.jar;lib/httpclient-cache.jar;lib/httpcore.jar;lib/httpmime.jar;lib/jna.jar;lib/jna-platform.jar;lib/jackson-annotations.jar;lib/jackson-core.jar;lib/jackson-databind.jar -Xms1500M -Xmx1500M gov.noaa.pfel.erddap.ArchiveADataset %* diff --git a/WEB-INF/ArchiveADataset.sh b/WEB-INF/ArchiveADataset.sh index 190659b93..a6a370209 100644 --- a/WEB-INF/ArchiveADataset.sh +++ b/WEB-INF/ArchiveADataset.sh @@ -5,7 +5,7 @@ cp1="./classes:../../../lib/servlet-api.jar:lib/activation.jar:lib/axis.jar" cp2=":lib/cassandra-driver-core.jar:lib/netty-all.jar:lib/guava.jar:lib/metrics-core.jar:lib/lz4.jar:lib/snappy-java.jar" cp3=":lib/commons-compress.jar:lib/commons-discovery.jar:lib/itext-1.3.1.jar" -cp4=":lib/jaxrpc.jar:lib/joda-time.jar:lib/lucene-core.jar" +cp4=":lib/jaxrpc.jar:lib/lucene-core.jar" cp5=":lib/mail.jar:lib/netcdfAll-latest.jar:lib/slf4j.jar:lib/postgresql.jdbc.jar" cp6=":lib/saaj.jar:lib/wsdl4j.jar" cp7=":lib/aws-java-sdk.jar:lib/commons-codec.jar:lib/commons-logging.jar" diff --git a/WEB-INF/DasDds.bat b/WEB-INF/DasDds.bat index a5209a613..841b92f3d 100644 --- a/WEB-INF/DasDds.bat +++ b/WEB-INF/DasDds.bat @@ -1,4 +1,4 @@ rem This is the Windows batch file to run DasDds. rem See http://coastwatch.pfeg.noaa.gov/erddap/download/setupDatasetsXml.html#Tools -java -cp ./classes;../../../lib/servlet-api.jar;lib/activation.jar;lib/axis.jar;lib/cassandra-driver-core.jar;lib/netty-all.jar;lib/guava.jar;lib/metrics-core.jar;lib/lz4.jar;lib/snappy-java.jar;lib/commons-compress.jar;lib/commons-discovery.jar;lib/itext-1.3.1.jar;lib/jaxrpc.jar;lib/joda-time.jar;lib/lucene-core.jar;lib/mail.jar;lib/netcdfAll-latest.jar;lib/slf4j.jar;lib/postgresql.jdbc.jar;lib/saaj.jar;lib/wsdl4j.jar;lib/aws-java-sdk.jar;lib/commons-codec.jar;lib/commons-logging.jar;lib/fluent-hc.jar;lib/httpclient.jar;lib/httpclient-cache.jar;lib/httpcore.jar;lib/httpmime.jar;lib/jna.jar;lib/jna-platform.jar;lib/jackson-annotations.jar;lib/jackson-core.jar;lib/jackson-databind.jar -Xms1000M -Xmx1000M gov.noaa.pfel.erddap.DasDds %* +java -cp ./classes;../../../lib/servlet-api.jar;lib/activation.jar;lib/axis.jar;lib/cassandra-driver-core.jar;lib/netty-all.jar;lib/guava.jar;lib/metrics-core.jar;lib/lz4.jar;lib/snappy-java.jar;lib/commons-compress.jar;lib/commons-discovery.jar;lib/itext-1.3.1.jar;lib/jaxrpc.jar;lib/lucene-core.jar;lib/mail.jar;lib/netcdfAll-latest.jar;lib/slf4j.jar;lib/postgresql.jdbc.jar;lib/saaj.jar;lib/wsdl4j.jar;lib/aws-java-sdk.jar;lib/commons-codec.jar;lib/commons-logging.jar;lib/fluent-hc.jar;lib/httpclient.jar;lib/httpclient-cache.jar;lib/httpcore.jar;lib/httpmime.jar;lib/jna.jar;lib/jna-platform.jar;lib/jackson-annotations.jar;lib/jackson-core.jar;lib/jackson-databind.jar -Xms1000M -Xmx1000M gov.noaa.pfel.erddap.DasDds %* diff --git a/WEB-INF/DasDds.sh b/WEB-INF/DasDds.sh index 7f3b131cd..c1c976ff0 100644 --- a/WEB-INF/DasDds.sh +++ b/WEB-INF/DasDds.sh @@ -5,7 +5,7 @@ cp1="./classes:../../../lib/servlet-api.jar:lib/activation.jar:lib/axis.jar" cp2=":lib/cassandra-driver-core.jar:lib/netty-all.jar:lib/guava.jar:lib/metrics-core.jar:lib/lz4.jar:lib/snappy-java.jar" cp3=":lib/commons-compress.jar:lib/commons-discovery.jar:lib/itext-1.3.1.jar" -cp4=":lib/jaxrpc.jar:lib/joda-time.jar:lib/lucene-core.jar" +cp4=":lib/jaxrpc.jar:lib/lucene-core.jar" cp5=":lib/mail.jar:lib/netcdfAll-latest.jar:lib/slf4j.jar:lib/postgresql.jdbc.jar" cp6=":lib/saaj.jar:lib/wsdl4j.jar" cp7=":lib/aws-java-sdk.jar:lib/commons-codec.jar:lib/commons-logging.jar" diff --git a/WEB-INF/FileVisitorDNLS.sh b/WEB-INF/FileVisitorDNLS.sh index 91f0f71e8..6dd60cdaf 100644 --- a/WEB-INF/FileVisitorDNLS.sh +++ b/WEB-INF/FileVisitorDNLS.sh @@ -4,7 +4,7 @@ cp1="./classes:../../../lib/servlet-api.jar:lib/activation.jar:lib/axis.jar" cp2=":lib/cassandra-driver-core.jar:lib/netty-all.jar:lib/guava.jar:lib/metrics-core.jar:lib/lz4.jar:lib/snappy-java.jar" cp3=":lib/commons-compress.jar:lib/commons-discovery.jar:lib/itext-1.3.1.jar" -cp4=":lib/jaxrpc.jar:lib/joda-time.jar:lib/lucene-core.jar" +cp4=":lib/jaxrpc.jar:lib/lucene-core.jar" cp5=":lib/mail.jar:lib/netcdfAll-latest.jar:lib/slf4j.jar:lib/postgresql.jdbc.jar" cp6=":lib/saaj.jar:lib/wsdl4j.jar" cp7=":lib/aws-java-sdk.jar:lib/commons-codec.jar:lib/commons-logging.jar" diff --git a/WEB-INF/GenerateDatasetsXml.bat b/WEB-INF/GenerateDatasetsXml.bat index 82ac08c53..3ec24cde8 100644 --- a/WEB-INF/GenerateDatasetsXml.bat +++ b/WEB-INF/GenerateDatasetsXml.bat @@ -1,4 +1,4 @@ rem This is the Windows batch file to run GenerateDatasetsXml. rem See http://coastwatch.pfeg.noaa.gov/erddap/download/setupDatasetsXml.html#Tools -java -cp ./classes;../../../lib/servlet-api.jar;lib/activation.jar;lib/axis.jar;lib/cassandra-driver-core.jar;lib/netty-all.jar;lib/guava.jar;lib/metrics-core.jar;lib/lz4.jar;lib/snappy-java.jar;lib/commons-compress.jar;lib/commons-discovery.jar;lib/itext-1.3.1.jar;lib/jaxrpc.jar;lib/joda-time.jar;lib/lucene-core.jar;lib/mail.jar;lib/netcdfAll-latest.jar;lib/slf4j.jar;lib/postgresql.jdbc.jar;lib/saaj.jar;lib/wsdl4j.jar;lib/aws-java-sdk.jar;lib/commons-codec.jar;lib/commons-logging.jar;lib/fluent-hc.jar;lib/httpclient.jar;lib/httpclient-cache.jar;lib/httpcore.jar;lib/httpmime.jar;lib/jna.jar;lib/jna-platform.jar;lib/jackson-annotations.jar;lib/jackson-core.jar;lib/jackson-databind.jar -Xms1000M -Xmx1000M gov.noaa.pfel.erddap.GenerateDatasetsXml %* +java -cp ./classes;../../../lib/servlet-api.jar;lib/activation.jar;lib/axis.jar;lib/cassandra-driver-core.jar;lib/netty-all.jar;lib/guava.jar;lib/metrics-core.jar;lib/lz4.jar;lib/snappy-java.jar;lib/commons-compress.jar;lib/commons-discovery.jar;lib/itext-1.3.1.jar;lib/jaxrpc.jar;lib/lucene-core.jar;lib/mail.jar;lib/netcdfAll-latest.jar;lib/slf4j.jar;lib/postgresql.jdbc.jar;lib/saaj.jar;lib/wsdl4j.jar;lib/aws-java-sdk.jar;lib/commons-codec.jar;lib/commons-logging.jar;lib/fluent-hc.jar;lib/httpclient.jar;lib/httpclient-cache.jar;lib/httpcore.jar;lib/httpmime.jar;lib/jna.jar;lib/jna-platform.jar;lib/jackson-annotations.jar;lib/jackson-core.jar;lib/jackson-databind.jar -Xms1000M -Xmx1000M gov.noaa.pfel.erddap.GenerateDatasetsXml %* diff --git a/WEB-INF/GenerateDatasetsXml.sh b/WEB-INF/GenerateDatasetsXml.sh index ba73a6742..d2b8cca7f 100644 --- a/WEB-INF/GenerateDatasetsXml.sh +++ b/WEB-INF/GenerateDatasetsXml.sh @@ -5,7 +5,7 @@ cp1="./classes:../../../lib/servlet-api.jar:lib/activation.jar:lib/axis.jar" cp2=":lib/cassandra-driver-core.jar:lib/netty-all.jar:lib/guava.jar:lib/metrics-core.jar:lib/lz4.jar:lib/snappy-java.jar" cp3=":lib/commons-compress.jar:lib/commons-discovery.jar:lib/itext-1.3.1.jar" -cp4=":lib/jaxrpc.jar:lib/joda-time.jar:lib/lucene-core.jar" +cp4=":lib/jaxrpc.jar:lib/lucene-core.jar" cp5=":lib/mail.jar:lib/netcdfAll-latest.jar:lib/slf4j.jar:lib/postgresql.jdbc.jar" cp6=":lib/saaj.jar:lib/wsdl4j.jar" cp7=":lib/aws-java-sdk.jar:lib/commons-codec.jar:lib/commons-logging.jar" diff --git a/WEB-INF/classes/com/cohort/array/Attributes.java b/WEB-INF/classes/com/cohort/array/Attributes.java index a1832db5f..2accae1cd 100644 --- a/WEB-INF/classes/com/cohort/array/Attributes.java +++ b/WEB-INF/classes/com/cohort/array/Attributes.java @@ -10,6 +10,7 @@ import java.util.concurrent.ConcurrentHashMap; import java.util.Enumeration; import java.util.Iterator; +import java.util.regex.Pattern; /** * This class holds a list of attributes (name=value, where name is a String @@ -28,7 +29,7 @@ public class Attributes { public static boolean verbose = false; /** The backing data structure. It is thread-safe. */ - private ConcurrentHashMap hashmap = new ConcurrentHashMap(16, 0.75f, 4); + private ConcurrentHashMap hashmap = new ConcurrentHashMap(16, 0.75f, 4); /** * This constructs a new, empty Attributes object. @@ -100,7 +101,7 @@ public Object clone() { * @return the attribute's value (a PrimitiveArray). */ public PrimitiveArray get(String name) { - return (PrimitiveArray)hashmap.get(name); + return hashmap.get(name); } /** @@ -314,7 +315,7 @@ public int getRawInt(String name) { * @return the previous value stored for attributeName, or null if none */ public PrimitiveArray remove(String name) { - return (PrimitiveArray)hashmap.remove(name); + return hashmap.remove(name); } /** @@ -330,8 +331,8 @@ public PrimitiveArray remove(String name) { public PrimitiveArray set(String name, PrimitiveArray value) { if (value == null || value.size() == 0 || (value.size() == 1 && value instanceof StringArray && value.getString(0).trim().length() == 0)) - return (PrimitiveArray)hashmap.remove(name); - return (PrimitiveArray)hashmap.put(String2.canonical(name), value); + return hashmap.remove(name); + return hashmap.put(String2.canonical(name), value); } /** @@ -647,21 +648,18 @@ public String toNcString(String prefix, String suffix) { String names[] = getNames(); for (int index = 0; index < names.length; index++) { sb.append(prefix + names[index] + " = "); - Object o = hashmap.get(names[index]); + PrimitiveArray pa = hashmap.get(names[index]); String connect = ""; - if (o instanceof StringArray) { - StringArray sa = (StringArray)o; - int n = sa.size(); + boolean isCharArray = pa instanceof CharArray; + if (pa instanceof StringArray || isCharArray) { + int n = pa.size(); for (int i = 0; i < n; i++) { sb.append(connect); connect = ", "; - //we don't need/want full String2.toJson encoding, just encode \ and ". - String s = String2.replaceAll(sa.get(i), "\\", "\\\\"); // \ becomes \\ - s = String2.replaceAll(s, "\"", "\\\""); // " becomes \" - sb.append("\"" + s + "\""); + sb.append(String2.toJson(pa.getString(i), 65536, isCharArray)); //encodeNewline? } - } else if (o instanceof FloatArray) { - FloatArray fa = (FloatArray)o; + } else if (pa instanceof FloatArray) { + FloatArray fa = (FloatArray)pa; int n = fa.size(); for (int i = 0; i < n; i++) { sb.append(connect); @@ -670,7 +668,7 @@ public String toNcString(String prefix, String suffix) { sb.append("f"); } } else { - sb.append(o.toString()); + sb.append(pa.toString()); } sb.append(suffix + "\n"); } @@ -708,6 +706,8 @@ public boolean equals(Object o) { * This doesn't throw an Exception if a difference is found. * * @param o an object, presumably an Attributes + * @return a string indicating the differents of this Attributes and o, + * or "" if no difference. */ public String testEquals(Object o) { if (o == null) @@ -806,6 +806,20 @@ public void trimAndMakeValidUnicode() { } + /** + * This uses StringArray.fromNccsv() on all StringArray values + * to de-JSON and convert "" to ". + */ + public void fromNccsvStrings() { + Iterator it = hashmap.keySet().iterator(); + while (it.hasNext()) { + String name = (String)it.next(); + PrimitiveArray pa = get(name); + if (pa.elementClass() == String.class) + ((StringArray)pa).fromNccsv(); + } + } + /** * This makes a set of addAttributes which are needed to change a into b. * If an attribute in 'a' needs to be set to null, this sets it to the String @@ -843,6 +857,56 @@ else if (!aPA.equals(bPA)) return addAtts; } + /** + * This writes the attributes for a variable (or *GLOBAL*) to an NCCSV String. + * This doesn't write *SCALAR* or dataType attributes. + * This doesn't change any of the attributes. + * + * @param varName + * @return a string with all of the attributes for a variable (or *GLOBAL*) + * formatted for NCCSV. + */ + public String toNccsvString(String varName) { + String nccsvVarName = String2.toNccsvDataString(varName); + StringBuilder sb = new StringBuilder(); + String tName; + + //special case: *GLOBAL* Conventions + if (varName.equals(String2.NCCSV_GLOBAL)) { + tName = "Conventions"; + String val = getString(tName); + if (String2.isSomething(val)) { + if (val.indexOf("NCCSV") < 0) + val += ", " + String2.NCCSV_VERSION; + } else { + val = "COARDS, CF-1.6, ACDD-1.3, " + String2.NCCSV_VERSION; + } + sb.append( + String2.toNccsvDataString(varName) + "," + + String2.toNccsvDataString(tName) + "," + + String2.toNccsvAttString(val) + "\n"); + } + + //each of the attributes + String names[] = getNames(); + for (int ni = 0; ni < names.length; ni++) { + tName = names[ni]; + if (varName.equals(String2.NCCSV_GLOBAL) && tName.equals("Conventions")) + continue; + if (!String2.isSomething(tName) || + tName.equals("_NCProperties")) + continue; + PrimitiveArray tValue = get(tName); + if (tValue == null || tValue.size() == 0 || tValue.toString().length() == 0) + continue; //do nothing + sb.append( + String2.toNccsvDataString(nccsvVarName) + "," + + String2.toNccsvDataString(tName) + "," + + tValue.toNccsvAttString() + "\n"); + } + return sb.toString(); + } + /** * This tests the methods in this class. * @@ -880,7 +944,7 @@ public static void test() throws Exception { Test.ensureEqual(atts.remove("zz"), new IntArray(new int[]{2}), ""); Test.ensureEqual(atts.size(), 9, ""); - ////empty string same as null; attribute removed + //empty string same as null; attribute removed atts.set("zz", "a"); Test.ensureEqual(atts.size(), 10, ""); atts.set("zz", ""); @@ -926,7 +990,7 @@ public static void test() throws Exception { //toString Test.ensureEqual(atts.toString(), " byte=1\n" + - " char=97\n" + + " char=a\n" + " double=3.141592653589793\n" + " float=2.5\n" + " int=1000000\n" + @@ -985,7 +1049,7 @@ public static void test() throws Exception { atts3.set(atts4); Test.ensureEqual(atts3.toString(), " byte=1\n" + - " char=100\n" + + " char=d\n" + " double=3.141592653589793\n" + " float=2.5\n" + " int=1000000\n" + @@ -1055,6 +1119,7 @@ public static void test() throws Exception { Test.ensureEqual(a, b, ""); Test.ensureEqual(a.toString(), b.toString(), ""); + String2.log("*** test Attributes finished successfully."); } diff --git a/WEB-INF/classes/com/cohort/array/ByteArray.java b/WEB-INF/classes/com/cohort/array/ByteArray.java index bb54bba44..0cd2e04ea 100644 --- a/WEB-INF/classes/com/cohort/array/ByteArray.java +++ b/WEB-INF/classes/com/cohort/array/ByteArray.java @@ -138,6 +138,7 @@ public ByteArray(String fileName) throws Exception { * * @param pa the values of pa are interpreted as boolean, which are then * converted to bytes. + * @return a ByteArray */ public static ByteArray toBooleanToByte(PrimitiveArray pa) { int size = pa.size(); @@ -763,7 +764,7 @@ public double getDouble(int index) { * with String2.parseDouble and so may return Double.NaN. */ public double getUnsignedDouble(int index) { - //or see http://www.unidata.ucar.edu/software/thredds/current/netcdf-java/reference/faq.html#Unsigned + //or see https://www.unidata.ucar.edu/software/thredds/current/netcdf-java/reference/faq.html#Unsigned return Byte.toUnsignedInt(get(index)); } @@ -952,6 +953,18 @@ public String toString() { return String2.toCSSVString(toArray()); //toArray() get just 'size' elements } + /** + * This converts the elements into an NCCSV attribute String, e.g.,: -128b, 127b + * + * @return an NCCSV attribute String + */ + public String toNccsvAttString() { + StringBuilder sb = new StringBuilder(size * 6); + for (int i = 0; i < size; i++) + sb.append((i == 0? "" : ",") + array[i] + "b"); + return sb.toString(); + } + /** * This sorts the elements in ascending order. * To get the elements in reverse order, just read from the end of the list @@ -1059,11 +1072,11 @@ public void readDis(DataInputStream dis, int n) throws Exception { * @throws Exception if trouble */ public void externalizeForDODS(DataOutputStream dos) throws Exception { - super.externalizeForDODS(dos); + super.externalizeForDODS(dos); //writes as bytes //pad to 4 bytes boundary at end int tSize = size; - while (tSize++ % 4 != 0) + while (tSize++ % 4 != 0) dos.writeByte(0); } @@ -1358,7 +1371,8 @@ public int firstTie() { /** This returns the minimum value that can be held by this class. */ public String minValue() {return "" + Byte.MIN_VALUE;} - /** This returns the maximum value that can be held by this class. */ + /** This returns the maximum value that can be held by this class + (not including the cohort missing value). */ public String maxValue() {return "" + (Byte.MAX_VALUE - 1);} /** @@ -1764,6 +1778,7 @@ public static void test() throws Throwable { Test.ensureEqual(anArray.getString(0), anArray.minValue(), ""); Test.ensureEqual(anArray.getString(0), "-128", ""); Test.ensureEqual(anArray.getString(1), anArray.maxValue(), ""); + } } diff --git a/WEB-INF/classes/com/cohort/array/CharArray.java b/WEB-INF/classes/com/cohort/array/CharArray.java index 31ac757cf..d3a766ddd 100644 --- a/WEB-INF/classes/com/cohort/array/CharArray.java +++ b/WEB-INF/classes/com/cohort/array/CharArray.java @@ -42,13 +42,6 @@ public CharArray() { array = new char[8]; } - /** This indicates if this class' type (e.g., short.class) can be contained in a long. - * The integer type classes overwrite this. - */ - public boolean isIntegerType() { - return true; - } - /** * This returns for cohort missing value for this class (e.g., Integer.MAX_VALUE), * expressed as a double. FloatArray and StringArray return Double.NaN. @@ -93,9 +86,21 @@ public CharArray(char[] anArray) { size = anArray.length; } + /** + * A special method which encodes all the Unicode chars in this to ISO_8859_1. + * + * @return this for convenience + */ + public CharArray toIso88591() { + for (int i = 0; i < size; i++) + array[i] = String2.toIso88591Char(array[i]); + return this; + } + /** * A special constructor which encodes all short values as char values via * ch[i] = (char)sh[i]. + * Thus negative short values become large positive char values. * Note that the cohort 'missingValue' of a CharArray is different from the * missingValue of a ShortArray. * 'size' will equal anArray.length. @@ -109,6 +114,28 @@ public CharArray(short[] shortArray) { array[i] = (char)shortArray[i]; } + /** + * A special method which decodes all short values as char values via + * ch[i] = (char)sa.array[i]. + * Thus negative short values become large positive char values. + * Note that the cohort 'missingValue' of a CharArray is different from the + * missingValue of a ShortArray and this method does nothing special + * for those values. This method does nothing special for the missingValues. + * 'capacity' and 'size' will equal sa.size. + * See ShortArray.decodeFromCharArray(). + * + * @param sa ShortArray + */ + public static CharArray fromShortArrayBytes(ShortArray sa) { + int size = sa.size(); + CharArray ca = new CharArray(size, true); //active + char carray[] = ca.array; + short sarray[] = sa.array; + for (int i = 0; i < size; i++) + carray[i] = (char)sarray[i]; + return ca; + } + /** * This is an alternative way to convert a String to a char: * by getting the first char (else Character.MAX_VALUE) @@ -264,7 +291,7 @@ public void atInsert(int index, char value) { * @param value the value, as a String. */ public void atInsertString(int index, String value) { - atInsert(index, Math2.narrowToChar(String2.parseInt(value))); + atInsert(index, firstChar(value)); } /** @@ -274,7 +301,7 @@ public void atInsertString(int index, String value) { * @param value the value, as a String. */ public void addNStrings(int n, String value) { - addN(n, Math2.narrowToChar(String2.parseInt(value))); + addN(n, firstChar(value)); } /** @@ -283,7 +310,7 @@ public void addNStrings(int n, String value) { * @param value the value, as a String. */ public void addString(String value) { - add(Math2.narrowToChar(String2.parseInt(value))); + add(firstChar(value)); } /** @@ -362,12 +389,16 @@ public PrimitiveArray addFromPA(PrimitiveArray otherPA, int otherIndex, int nVal ensureCapacity(size + nValues); System.arraycopy(((CharArray)otherPA).array, otherIndex, array, size, nValues); size += nValues; - return this; - } //add from different type - for (int i = 0; i < nValues; i++) - addInt(otherPA.getInt(otherIndex++)); //add and get do checking + } else if (otherPA.elementClass() == String.class) { + for (int i = 0; i < nValues; i++) + addString(otherPA.getString(otherIndex++)); //add and get do checking + + } else { + for (int i = 0; i < nValues; i++) + addInt(otherPA.getInt(otherIndex++)); //add and get do checking + } return this; } @@ -379,7 +410,9 @@ public PrimitiveArray addFromPA(PrimitiveArray otherPA, int otherIndex, int nVal * @param otherIndex */ public void setFromPA(int index, PrimitiveArray otherPA, int otherIndex) { - setInt(index, otherPA.getInt(otherIndex)); + if (otherPA.elementClass() == String.class) + set(index, firstChar(otherPA.getString(otherIndex))); //add and get do checking + else setInt(index, otherPA.getInt(otherIndex)); } /** @@ -555,16 +588,14 @@ public double[] toDoubleArray() { * This returns a String[] which has 'size' elements. * * @return a String[] which has 'size' elements. - * This treats chars as unsigned shorts. + * This treats chars as lenth=1 strings. * Character.MAX_VALUE appears as "". */ public String[] toStringArray() { - Math2.ensureMemoryAvailable(8L * size, "CharArray.toStringArray"); //8L is feeble minimal estimate + Math2.ensureMemoryAvailable(6L * size, "CharArray.toStringArray"); String sar[] = new String[size]; - for (int i = 0; i < size; i++) { - char c = array[i]; - sar[i] = c == Character.MAX_VALUE? "" : String.valueOf((int)c); //safe char to int type conversion - } + for (int i = 0; i < size; i++) + sar[i] = getString(i); return sar; } @@ -737,8 +768,38 @@ public void setDouble(int index, double d) { * @return This returns (int)(ar[index]), or "" for NaN or infinity. */ public String getString(int index) { - char b = get(index); - return b == Character.MAX_VALUE? "" : "" + (int)b; //safe char to int type conversion + char ch = get(index); + return ch == Character.MAX_VALUE? "" : "" + ch; + } + + /** + * Return a value from the array as a String suitable for the data section + * of an NCCSV file, e.g., z \t \u0000 , \", but perhaps (e.g., for chars in + * ",\" ") surrounded by "'[char]'". + * + * @param index the index number 0 ... size-1 + * @return For numeric types, this returns ("" + ar[index]), or "" if NaN or infinity. + * CharArray and StringArray overwrite this. + */ + public String getNccsvDataString(int index) { + char ch = get(index); + return ch == '\uFFFF'? "" : String2.toNccsvDataString("" + ch); + } + + /** + * Return a value from the array as a String suitable for the data section + * of an tsv file, e.g., z \t \u0000 , \". + * + * @param index the index number 0 ... size-1 + * @return For numeric types, this returns ("" + ar[index]), or "" if NaN or infinity. + * CharArray and StringArray overwrite this. + */ + public String getTsvString(int index) { + char ch = get(index); + if (ch == '\uFFFF') + return ""; + String s = String2.toJson("" + ch); + return s.substring(1, s.length() - 1); //remove enclosing quotes } /** @@ -754,18 +815,18 @@ public String getString(int index) { * with String2.parseDouble and so may return Double.NaN. */ public String getRawString(int index) { - return "" + (int)get(index); + return "" + get(index); } /** - * Set a value in the array as a String. + * Set a value in the array from a String. * * @param index the index number 0 .. * @param s the value. For numeric PrimitiveArray's, it is parsed * with String2.parseInt and narrowed by Math2.narrowToChar(i). */ public void setString(int index, String s) { - set(index, Math2.narrowToChar(String2.parseInt(s))); + set(index, firstChar(s)); } /** @@ -802,7 +863,7 @@ public int indexOf(char lookFor, int startIndex) { * @return the index where 'lookFor' is found, or -1 if not found. */ public int indexOf(String lookFor, int startIndex) { - return indexOf(Math2.roundToChar(String2.parseInt(lookFor)), startIndex); + return indexOf(firstChar(lookFor), startIndex); } /** @@ -831,7 +892,7 @@ public int lastIndexOf(char lookFor, int startIndex) { * @return the index where 'lookFor' is found, or -1 if not found. */ public int lastIndexOf(String lookFor, int startIndex) { - return lastIndexOf(Math2.roundToChar(String2.parseInt(lookFor)), startIndex); + return lastIndexOf(firstChar(lookFor), startIndex); } /** @@ -868,17 +929,21 @@ public String testEquals(Object o) { if (other.size() != size) return "The two CharArrays aren't equal: one has " + size + " value" + (size == 0? "s" : - size == 1? " (#" + (int)array[0] + ")" : //safe char to int type conversion - "s (from #" + (int)array[0] + " to #" + (int)array[size - 1] + ")") + //safe char to int type conversion + size == 1? " (" + getNccsvDataString(0) + ")" : //safe char to int type conversion + "s (from " + getNccsvDataString(0) + " to " + + getNccsvDataString(size - 1) + ")") + //safe char to int type conversion "; the other has " + other.size() + " value" + (other.size == 0? "s" : - other.size == 1? " (#" + (int)other.array[0] + ")" : //safe char to int type conversion - "s (from #" + (int)other.array[0] + " to #" + (int)other.array[other.size - 1] + ")") + //safe char to int type conversion + other.size == 1? " (" + other.getNccsvDataString(0) + ")" : //safe char to int type conversion + "s (from " + other.getNccsvDataString(0) + " to " + + other.getNccsvDataString(other.size - 1) + ")") + //safe char to int type conversion "."; for (int i = 0; i < size; i++) if (array[i] != other.array[i]) - return "The two CharArrays aren't equal: this[" + i + "]=#" + (int)array[i] + //safe char to int type conversion - "; other[" + i + "]=#" + (int)other.array[i] + "."; //safe char to int type conversion + return "The two CharArrays aren't equal: this[" + i + "]=" + + getNccsvDataString(i) + //safe char to int type conversion + "; other[" + i + "]=" + + other.getNccsvDataString(i) + "."; //safe char to int type conversion return ""; } @@ -892,6 +957,18 @@ public String toString() { return String2.toCSSVString(toArray()); //toArray() get just 'size' elements } + /** + * This converts the elements into an NCCSV attribute String, e.g.,: -128b, 127b + * + * @return an NCCSV attribute String + */ + public String toNccsvAttString() { + StringBuilder sb = new StringBuilder(size * 6); + for (int i = 0; i < size; i++) + sb.append((i == 0? "\"'" : ",\"'") + String2.toNccsvChar(array[i]) + "'\""); + return sb.toString(); + } + /** * This sorts the elements in ascending order. * To get the elements in reverse order, just read from the end of the list @@ -979,21 +1056,80 @@ public void readDis(DataInputStream dis, int n) throws Exception { array[size++] = dis.readChar(); } + /** - * This reads/appends char values to this PrimitiveArray from a DODS DataInputStream, + * This writes one String to a DataOutputStream in the format DODS + * wants (see www.opendap.org DAP 2.0 standard, section 7.3.2.1). + * See also the XDR standard (http://tools.ietf.org/html/rfc4506#section-4.11). + * Just 8 bits are stored: there is no utf or other unicode support. + * See DAP 2.0 section 3.2.3 says US-ASCII (7bit), so might as well go for compatible common 8bit. + * Ah: dods.dap.DString reader assumes ISO-8859-1, which is first page of unicode. + * + * @param dos + * @param c + * @throws Exception if trouble + */ + public static void externalizeForDODS(DataOutputStream dos, char c) throws Exception { + dos.writeInt(1); //for Strings, just write size once + dos.writeByte(c < 256? c : '?'); //dods.dap.DString reader assumes ISO-8859-1, which is first page of unicode + + //pad to 4 bytes boundary at end + for (int i = 0; i < 3; i++) + dos.writeByte(0); + } + + /** + * This writes all the data to a DataOutputStream in the + * DODS Array format (see www.opendap.org DAP 2.0 standard, section 7.3.2.1). + * See also the XDR standard (http://tools.ietf.org/html/rfc4506#section-4.11). + * + * @param dos + * @throws Exception if trouble + */ + public void externalizeForDODS(DataOutputStream dos) throws Exception { + dos.writeInt(size); + dos.writeInt(size); //yes, a second time + for (int i = 0; i < size; i++) + externalizeForDODS(dos, array[i]); + } + + /** + * This writes one element to a DataOutputStream in the + * DODS Atomic-type format (see www.opendap.org DAP 2.0 standard, section 7.3.2). + * See also the XDR standard (http://tools.ietf.org/html/rfc4506#section-4.11). + * + * @param dos + * @param i the index of the element to be written + * @throws Exception if trouble + */ + public void externalizeForDODS(DataOutputStream dos, int i) throws Exception { + externalizeForDODS(dos, array[i]); + } + + /** + * This reads/appends String values from a StringArray from a DODS DataInputStream, * and is thus the complement of externalizeForDODS. * * @param dis * @throws IOException if trouble */ public void internalizeFromDODS(DataInputStream dis) throws java.io.IOException { - int nValues = dis.readInt(); - dis.readInt(); //skip duplicate of nValues - ensureCapacity(size + (long)nValues); - for (int i = 0; i < nValues; i++) - array[size++] = dis.readChar(); + int nStrings = dis.readInt(); + ensureCapacity(size + (long)nStrings); + dis.readInt(); //skip duplicate of nStrings + byte buffer[] = new byte[80]; + for (int i = 0; i < nStrings; i++) { + int nChar = dis.readInt(); //always 1 + dis.readFully(buffer, 0, nChar); + add((char)buffer[0]); + + //pad to 4 bytes boundary at end + while (nChar++ % 4 != 0) + dis.readByte(); + } } + /** * This reads one value from a randomAccessFile. * @@ -1054,6 +1190,9 @@ public void append(PrimitiveArray pa) { ensureCapacity(size + (long)otherSize); if (pa instanceof CharArray) { System.arraycopy(((CharArray)pa).array, 0, array, size, otherSize); + } else if (pa instanceof StringArray) { + for (int i = 0; i < otherSize; i++) + array[size + i] = firstChar(pa.getString(i)); } else { for (int i = 0; i < otherSize; i++) array[size + i] = Math2.narrowToChar(pa.getInt(i)); //this converts mv's @@ -1075,6 +1214,9 @@ public void rawAppend(PrimitiveArray pa) { ensureCapacity(size + (long)otherSize); if (pa instanceof CharArray) { System.arraycopy(((CharArray)pa).array, 0, array, size, otherSize); + } else if (pa instanceof StringArray) { + for (int i = 0; i < otherSize; i++) + array[size + i] = firstChar(pa.getString(i)); } else { for (int i = 0; i < otherSize; i++) array[size + i] = Math2.narrowToChar(pa.getRawInt(i)); //this DOESN'T convert mv's @@ -1174,8 +1316,8 @@ public PrimitiveArray makeIndices(IntArray indices) { * @return the number of values switched */ public int switchFromTo(String tFrom, String tTo) { - char from = Math2.roundToChar(String2.parseDouble(tFrom)); - char to = Math2.roundToChar(String2.parseDouble(tTo)); + char from = firstChar(tFrom); + char to = firstChar(tTo); if (from == to) return 0; int count = 0; @@ -1255,10 +1397,11 @@ public int firstTie() { } /** This returns the minimum value that can be held by this class. */ - public String minValue() {return "0";} + public String minValue() {return "\u0000";} - /** This returns the maximum value that can be held by this class. */ - public String maxValue() {return "" + (int)(Character.MAX_VALUE - 1);} //safe char to int type conversion + /** This returns the maximum value that can be held by this class + (not including the cohort missing value). */ + public String maxValue() {return "\uFFFE";} /** * This finds the number of non-missing values, and the index of the min and @@ -1293,14 +1436,14 @@ public static void test() throws Throwable{ //** test default constructor and many of the methods CharArray anArray = new CharArray(); - Test.ensureEqual(anArray.isIntegerType(), true, ""); + Test.ensureEqual(anArray.isIntegerType(), false, ""); Test.ensureEqual(anArray.missingValue(), 65535, ""); anArray.addString(""); Test.ensureEqual(anArray.get(0), (char)65535, ""); Test.ensureEqual(anArray.getRawInt(0), 65535, ""); Test.ensureEqual(anArray.getRawDouble(0), 65535, ""); Test.ensureEqual(anArray.getUnsignedDouble(0), 65535, ""); - Test.ensureEqual(anArray.getRawString(0), "" + 65535, ""); + Test.ensureEqual(anArray.getRawString(0), "\uFFFF", ""); Test.ensureEqual(anArray.getRawNiceDouble(0), 65535, ""); Test.ensureEqual(anArray.getInt(0), Integer.MAX_VALUE, ""); Test.ensureEqual(anArray.getDouble(0), Double.NaN, ""); @@ -1309,23 +1452,27 @@ public static void test() throws Throwable{ //unsignedFactory, which uses unsignedAppend anArray = (CharArray)unsignedFactory(char.class, - new CharArray(new char[] {0, 1, Character.MAX_VALUE, Character.MIN_VALUE})); - Test.ensureEqual(anArray.toString(), "0, 1, 65535, 0", ""); // -> mv + new CharArray(new char[] {0, 1, 65, 252, Character.MAX_VALUE, Character.MIN_VALUE})); + Test.ensureEqual(anArray.toString(), + "\\u0000, \\u0001, A, \\u00fc, \\uffff, \\u0000", ""); // -> mv anArray.clear(); anArray = (CharArray)unsignedFactory(char.class, - new ByteArray(new byte[] {0, 1, Byte.MAX_VALUE, Byte.MIN_VALUE, -1})); - Test.ensureEqual(anArray.toString(), "0, 1, 127, 128, 255", ""); + new ByteArray(new byte[] {0, 1, 65, (byte)252, Byte.MAX_VALUE, Byte.MIN_VALUE, -1})); + Test.ensureEqual(anArray.toString(), + "\\u0000, \\u0001, A, \\u00fc, \\u007f, \\u0080, \\u00ff", ""); anArray.clear(); anArray = (CharArray)unsignedFactory(char.class, - new ShortArray(new short[] {0, 1, Short.MAX_VALUE, Short.MIN_VALUE, -1})); - Test.ensureEqual(anArray.toString(), "0, 1, 32767, 32768, 65535", ""); + new ShortArray(new short[] {0, 1, 65, 252, Short.MAX_VALUE, Short.MIN_VALUE, -1})); + Test.ensureEqual(anArray.toString(), + "\\u0000, \\u0001, A, \\u00fc, \\u7fff, \\u8000, \\uffff", ""); anArray.clear(); anArray = (CharArray)unsignedFactory(char.class, - new IntArray(new int[] {0, 1, Integer.MAX_VALUE, Integer.MIN_VALUE, -1})); - Test.ensureEqual(anArray.toString(), "0, 1, 65535, 65535, 65535", ""); // ->mv + new IntArray(new int[] {0, 1, 65, 252, Integer.MAX_VALUE, Integer.MIN_VALUE, -1})); + Test.ensureEqual(anArray.toString(), + "\\u0000, \\u0001, A, \\u00fc, \\uffff, \\uffff, \\uffff", ""); // ->mv anArray.clear(); Test.ensureEqual(anArray.size(), 0, ""); @@ -1335,7 +1482,7 @@ public static void test() throws Throwable{ Test.ensureEqual(anArray.getInt(0), 122, ""); Test.ensureEqual(anArray.getFloat(0), 122, ""); Test.ensureEqual(anArray.getDouble(0), 122, ""); - Test.ensureEqual(anArray.getString(0), "122", ""); + Test.ensureEqual(anArray.getString(0), "z", ""); Test.ensureEqual(anArray.elementClass(), char.class, ""); char tArray[] = anArray.toArray(); Test.ensureEqual(tArray, new char[]{'z'}, ""); @@ -1436,69 +1583,69 @@ public static void test() throws Throwable{ //** test array constructor - anArray = new CharArray(new char[]{0,2,4,6,8}); + anArray = new CharArray(new char[]{'a','e','i','o','u'}); Test.ensureEqual(anArray.size(), 5, ""); - Test.ensureEqual(anArray.get(0), 0, ""); - Test.ensureEqual(anArray.get(1), 2, ""); - Test.ensureEqual(anArray.get(2), 4, ""); - Test.ensureEqual(anArray.get(3), 6, ""); - Test.ensureEqual(anArray.get(4), 8, ""); + Test.ensureEqual(anArray.get(0), 'a', ""); + Test.ensureEqual(anArray.get(1), 'e', ""); + Test.ensureEqual(anArray.get(2), 'i', ""); + Test.ensureEqual(anArray.get(3), 'o', ""); + Test.ensureEqual(anArray.get(4), 'u', ""); //test compare - Test.ensureEqual(anArray.compare(1, 3), -4, ""); - Test.ensureEqual(anArray.compare(1, 1), 0, ""); - Test.ensureEqual(anArray.compare(3, 1), 4, ""); + Test.ensureEqual(anArray.compare(1, 3), -10, ""); + Test.ensureEqual(anArray.compare(1, 1), 0, ""); + Test.ensureEqual(anArray.compare(3, 1), 10, ""); //test toString - Test.ensureEqual(anArray.toString(), "0, 2, 4, 6, 8", ""); + Test.ensureEqual(anArray.toString(), "a, e, i, o, u", ""); //test calculateStats anArray.addString(""); double stats[] = anArray.calculateStats(); anArray.remove(5); Test.ensureEqual(stats[STATS_N], 5, ""); - Test.ensureEqual(stats[STATS_MIN], 0, ""); - Test.ensureEqual(stats[STATS_MAX], 8, ""); - Test.ensureEqual(stats[STATS_SUM], 20, ""); + Test.ensureEqual(stats[STATS_MIN], 97, ""); + Test.ensureEqual(stats[STATS_MAX], 117, ""); + Test.ensureEqual(stats[STATS_SUM], 531, ""); //test indexOf(int) indexOf(String) - Test.ensureEqual(anArray.indexOf((char)0, 0), 0, ""); - Test.ensureEqual(anArray.indexOf((char)0, 1), -1, ""); - Test.ensureEqual(anArray.indexOf((char)8, 0), 4, ""); - Test.ensureEqual(anArray.indexOf((char)9, 0), -1, ""); + Test.ensureEqual(anArray.indexOf('a', 0), 0, ""); + Test.ensureEqual(anArray.indexOf('a', 1), -1, ""); + Test.ensureEqual(anArray.indexOf('u', 0), 4, ""); + Test.ensureEqual(anArray.indexOf('t', 0), -1, ""); - Test.ensureEqual(anArray.indexOf("0", 0), 0, ""); - Test.ensureEqual(anArray.indexOf("0", 1), -1, ""); - Test.ensureEqual(anArray.indexOf("8", 0), 4, ""); - Test.ensureEqual(anArray.indexOf("9", 0), -1, ""); + Test.ensureEqual(anArray.indexOf("a", 0), 0, ""); + Test.ensureEqual(anArray.indexOf("a", 1), -1, ""); + Test.ensureEqual(anArray.indexOf("u", 0), 4, ""); + Test.ensureEqual(anArray.indexOf("t", 0), -1, ""); //test remove anArray.remove(1); - Test.ensureEqual(anArray.size(), 4, ""); - Test.ensureEqual(anArray.get(0), 0, ""); - Test.ensureEqual(anArray.get(1), 4, ""); - Test.ensureEqual(anArray.get(3), 8, ""); + Test.ensureEqual(anArray.size(), 4, ""); + Test.ensureEqual(anArray.get(0), 'a', ""); + Test.ensureEqual(anArray.get(1), 'i', ""); + Test.ensureEqual(anArray.get(3), 'u', ""); //test atInsert(index, value) anArray.atInsert(1, (char)22); - Test.ensureEqual(anArray.size(), 5, ""); - Test.ensureEqual(anArray.get(0), 0, ""); - Test.ensureEqual(anArray.get(1),22, ""); - Test.ensureEqual(anArray.get(2), 4, ""); - Test.ensureEqual(anArray.get(4), 8, ""); + Test.ensureEqual(anArray.size(), 5, ""); + Test.ensureEqual(anArray.get(0), 'a', ""); + Test.ensureEqual(anArray.get(1), 22, ""); + Test.ensureEqual(anArray.get(2), 'i', ""); + Test.ensureEqual(anArray.get(4), 'u', ""); anArray.remove(1); //test removeRange anArray.removeRange(4, 4); //make sure it is allowed anArray.removeRange(1, 3); - Test.ensureEqual(anArray.size(), 2, ""); - Test.ensureEqual(anArray.get(0), 0, ""); - Test.ensureEqual(anArray.get(1), 8, ""); + Test.ensureEqual(anArray.size(), 2, ""); + Test.ensureEqual(anArray.get(0), 'a', ""); + Test.ensureEqual(anArray.get(1), 'u', ""); //test (before trimToSize) that toString, toDoubleArray, and toStringArray use 'size' - Test.ensureEqual(anArray.toString(), "0, 8", ""); - Test.ensureEqual(anArray.toDoubleArray(), new double[]{0, 8}, ""); - Test.ensureEqual(anArray.toStringArray(), new String[]{"0", "8"}, ""); + Test.ensureEqual(anArray.toString(), "a, u", ""); + Test.ensureEqual(anArray.toDoubleArray(), new double[]{97, 117}, ""); + Test.ensureEqual(anArray.toStringArray(), new String[]{"a", "u"}, ""); //test trimToSize anArray.trimToSize(); @@ -1506,17 +1653,17 @@ public static void test() throws Throwable{ //test equals CharArray anArray2 = new CharArray(); - anArray2.add((char)0); + anArray2.add('a'); Test.ensureEqual(anArray.testEquals("A String"), "The two objects aren't equal: this object is a CharArray; the other is a java.lang.String.", ""); Test.ensureEqual(anArray.testEquals(anArray2), - "The two CharArrays aren't equal: one has 2 values (from #0 to #8); the other has 1 value (#0).", ""); + "The two CharArrays aren't equal: one has 2 values (from a to u); the other has 1 value (a).", ""); Test.ensureTrue(!anArray.equals(anArray2), ""); anArray2.addString("7"); Test.ensureEqual(anArray.testEquals(anArray2), - "The two CharArrays aren't equal: this[1]=#8; other[1]=#7.", ""); + "The two CharArrays aren't equal: this[1]=u; other[1]=7.", ""); Test.ensureTrue(!anArray.equals(anArray2), ""); - anArray2.setString(1, "8"); + anArray2.setString(1, "u"); Test.ensureEqual(anArray.testEquals(anArray2), "", ""); Test.ensureTrue(anArray.equals(anArray2), ""); @@ -1524,22 +1671,22 @@ public static void test() throws Throwable{ Test.ensureEqual(anArray.toArray(), anArray.toObjectArray(), ""); //test toDoubleArray - Test.ensureEqual(anArray.toDoubleArray(), new double[]{0, 8}, ""); + Test.ensureEqual(anArray.toDoubleArray(), new double[]{97, 117}, ""); //test reorder int rank[] = {1, 0}; anArray.reorder(rank); - Test.ensureEqual(anArray.toDoubleArray(), new double[]{8, 0}, ""); + Test.ensureEqual(anArray.toDoubleArray(), new double[]{117, 97}, ""); //** test append and clone anArray = new CharArray(new char[]{(char)1}); anArray.append(new ByteArray(new byte[]{5, 2})); Test.ensureEqual(anArray.toDoubleArray(), new double[]{1, 5, 2}, ""); - anArray.append(new StringArray(new String[]{"a", "9"})); - Test.ensureEqual(anArray.toDoubleArray(), new double[]{1, 5, 2, Double.NaN, 9}, ""); + anArray.append(new StringArray(new String[]{"", "9"})); + Test.ensureEqual(anArray.toDoubleArray(), new double[]{1, 5, 2, Double.NaN, 57}, ""); anArray2 = (CharArray)anArray.clone(); - Test.ensureEqual(anArray2.toDoubleArray(), new double[]{1, 5, 2, Double.NaN, 9}, ""); + Test.ensureEqual(anArray2.toDoubleArray(), new double[]{1, 5, 2, Double.NaN, 57}, ""); //test move anArray = new CharArray(new char[]{0,1,2,3,4}); @@ -1566,40 +1713,40 @@ public static void test() throws Throwable{ //makeIndices anArray = new CharArray(new char[] {25,1,1,10}); IntArray indices = new IntArray(); - Test.ensureEqual(anArray.makeIndices(indices).toString(), "1, 10, 25", ""); + Test.ensureEqual(anArray.makeIndices(indices).toString(), "\\u0001, \\n, \\u0019", ""); Test.ensureEqual(indices.toString(), "2, 0, 0, 1", ""); anArray = new CharArray(new char[] {35,35,Character.MAX_VALUE,1,2}); - Test.ensureEqual(anArray.makeIndices(indices).toString(), "1, 2, 35, 65535", ""); + Test.ensureEqual(anArray.makeIndices(indices).toString(), "\\u0001, \\u0002, #, \\uffff", ""); Test.ensureEqual(indices.toString(), "2, 2, 3, 0, 1", ""); anArray = new CharArray(new char[] {10,20,30,40}); - Test.ensureEqual(anArray.makeIndices(indices).toString(), "10, 20, 30, 40", ""); + Test.ensureEqual(anArray.makeIndices(indices).toString(), "\\n, \\u0014, \\u001e, (", ""); Test.ensureEqual(indices.toString(), "0, 1, 2, 3", ""); //switchToFakeMissingValue anArray = new CharArray(new char[] {Character.MAX_VALUE,1,2,Character.MAX_VALUE,3,Character.MAX_VALUE}); Test.ensureEqual(anArray.switchFromTo("", "75"), 3, ""); - Test.ensureEqual(anArray.toString(), "75, 1, 2, 75, 3, 75", ""); + Test.ensureEqual(anArray.toString(), "7, \\u0001, \\u0002, 7, \\u0003, 7", ""); anArray.switchFromTo("75", ""); - Test.ensureEqual(anArray.toString(), "65535, 1, 2, 65535, 3, 65535", ""); + Test.ensureEqual(anArray.toString(), "\\uffff, \\u0001, \\u0002, \\uffff, \\u0003, \\uffff", ""); Test.ensureEqual(anArray.getNMinMaxIndex(), new int[]{3, 1, 4}, ""); //addN anArray = new CharArray(new char[] {25}); anArray.addN(2, (char)5); - Test.ensureEqual(anArray.toString(), "25, 5, 5", ""); + Test.ensureEqual(anArray.toString(), "\\u0019, \\u0005, \\u0005", ""); Test.ensureEqual(anArray.getNMinMaxIndex(), new int[]{3, 2, 0}, ""); //add array anArray.add(new char[]{17, 19}); - Test.ensureEqual(anArray.toString(), "25, 5, 5, 17, 19", ""); + Test.ensureEqual(anArray.toString(), "\\u0019, \\u0005, \\u0005, \\u0011, \\u0013", ""); //subset PrimitiveArray ss = anArray.subset(1, 3, 4); - Test.ensureEqual(ss.toString(), "5, 19", ""); + Test.ensureEqual(ss.toString(), "\\u0005, \\u0013", ""); ss = anArray.subset(0, 1, 0); - Test.ensureEqual(ss.toString(), "25", ""); + Test.ensureEqual(ss.toString(), "\\u0019", ""); ss = anArray.subset(0, 1, -1); Test.ensureEqual(ss.toString(), "", ""); ss = anArray.subset(1, 1, 0); @@ -1657,15 +1804,14 @@ public static void test() throws Throwable{ bitset.set(1); bitset.set(4); anArray.justKeep(bitset); - Test.ensureEqual(anArray.toString(), "11, 44", ""); + Test.ensureEqual(anArray.toString(), "\\u000b, \",\"", ""); //min max anArray = new CharArray(); - anArray.addString(anArray.minValue()); - anArray.addString(anArray.maxValue()); - Test.ensureEqual(anArray.getString(0), anArray.minValue(), ""); - Test.ensureEqual(anArray.getString(0), "0", ""); - Test.ensureEqual(anArray.getString(1), anArray.maxValue(), ""); + anArray.addString("\u0000"); + anArray.addString("\uffff"); + Test.ensureEqual(anArray.getString(0), "\u0000", ""); + Test.ensureEqual(anArray.getString(1), "", ""); } } diff --git a/WEB-INF/classes/com/cohort/array/DoubleArray.java b/WEB-INF/classes/com/cohort/array/DoubleArray.java index becb5223f..7645f8e09 100644 --- a/WEB-INF/classes/com/cohort/array/DoubleArray.java +++ b/WEB-INF/classes/com/cohort/array/DoubleArray.java @@ -589,7 +589,7 @@ public long getLong(int index) { * to Double.NaN. */ public void setLong(int index, long i) { - set(index, i == Long.MAX_VALUE? Double.NaN : i); + set(index, Math2.longToDoubleNaN(i)); } /** @@ -790,6 +790,18 @@ public String toString() { return String2.toCSSVString(toArray()); //toArray() gets just 'size' elements } + /** + * This converts the elements into an NCCSV attribute String, e.g.,: -128b, 127b + * + * @return an NCCSV attribute String + */ + public String toNccsvAttString() { + StringBuilder sb = new StringBuilder(size * 15); + for (int i = 0; i < size; i++) + sb.append((i == 0? "" : ",") + array[i] + "d"); + return sb.toString(); + } + /** * This sorts the elements in ascending order. * To get the elements in reverse order, just read from the end of the list @@ -1221,7 +1233,8 @@ public String isEvenlySpaced() { /** This returns the minimum value that can be held by this class. */ public String minValue() {return "" + -Double.MAX_VALUE;} - /** This returns the maximum value that can be held by this class. */ + /** This returns the maximum value that can be held by this class + (not including the cohort missing value). */ public String maxValue() {return "" + Double.MAX_VALUE;} /** diff --git a/WEB-INF/classes/com/cohort/array/FloatArray.java b/WEB-INF/classes/com/cohort/array/FloatArray.java index 45ce03be6..2ab56c59b 100644 --- a/WEB-INF/classes/com/cohort/array/FloatArray.java +++ b/WEB-INF/classes/com/cohort/array/FloatArray.java @@ -819,6 +819,18 @@ public String toString() { return String2.toCSSVString(toArray()); //toArray() get just 'size' elements } + /** + * This converts the elements into an NCCSV attribute String, e.g.,: -128b, 127b + * + * @return an NCCSV attribute String + */ + public String toNccsvAttString() { + StringBuilder sb = new StringBuilder(size * 11); + for (int i = 0; i < size; i++) + sb.append((i == 0? "" : ",") + array[i] + "f"); + return sb.toString(); + } + /** * This sorts the elements in ascending order. * To get the elements in reverse order, just read from the end of the list @@ -1114,6 +1126,7 @@ public int switchFromTo(String tFrom, String tTo) { } } else { for (int i = 0; i < size; i++) { + //String2.log(">> float.switchFromTo from=" + from + " to=" + to + " i=" + i + " a[i]=" + array[i] + " eq5=" + Math2.almostEqual(5, array[i], from)); if (Math2.almostEqual(5, array[i], from)) { array[i] = to; count++; @@ -1276,7 +1289,8 @@ public String isCrudelyEvenlySpaced() { /** This returns the minimum value that can be held by this class. */ public String minValue() {return "" + -Float.MAX_VALUE;} - /** This returns the maximum value that can be held by this class. */ + /** This returns the maximum value that can be held by this class + (not including the cohort missing value). */ public String maxValue() {return "" + Float.MAX_VALUE;} /** diff --git a/WEB-INF/classes/com/cohort/array/IntArray.java b/WEB-INF/classes/com/cohort/array/IntArray.java index 7f3e59dd7..ee52d43ce 100644 --- a/WEB-INF/classes/com/cohort/array/IntArray.java +++ b/WEB-INF/classes/com/cohort/array/IntArray.java @@ -884,6 +884,18 @@ public String toString() { return String2.toCSSVString(toArray()); //toArray() get just 'size' elements } + /** + * This converts the elements into an NCCSV attribute String, e.g.,: -128b, 127b + * + * @return an NCCSV attribute String + */ + public String toNccsvAttString() { + StringBuilder sb = new StringBuilder(size * 10); + for (int i = 0; i < size; i++) + sb.append((i == 0? "" : ",") + array[i] + "i"); + return sb.toString(); + } + /** * This sorts the elements in ascending order. * To get the elements in reverse order, just read from the end of the list @@ -1271,7 +1283,8 @@ public int firstTie() { /** This returns the minimum value that can be held by this class. */ public String minValue() {return "" + Integer.MIN_VALUE;} - /** This returns the maximum value that can be held by this class. */ + /** This returns the maximum value that can be held by this class + (not including the cohort missing value). */ public String maxValue() {return "" + (Integer.MAX_VALUE - 1);} /** diff --git a/WEB-INF/classes/com/cohort/array/LongArray.java b/WEB-INF/classes/com/cohort/array/LongArray.java index 24b59dfab..fbe153e26 100644 --- a/WEB-INF/classes/com/cohort/array/LongArray.java +++ b/WEB-INF/classes/com/cohort/array/LongArray.java @@ -520,10 +520,8 @@ public Object toObjectArray() { public double[] toDoubleArray() { Math2.ensureMemoryAvailable(8L * size, "LongArray.toDoubleArray"); double dar[] = new double[size]; - for (int i = 0; i < size; i++) { - long tl = array[i]; - dar[i] = tl == Long.MAX_VALUE? Double.NaN : tl; - } + for (int i = 0; i < size; i++) + dar[i] = Math2.longToDoubleNaN(array[i]); return dar; } @@ -646,8 +644,7 @@ public void setFloat(int index, float d) { * Long.MAX_VALUE is returned as Double.NaN. */ public double getDouble(int index) { - long tl = get(index); - return tl == Long.MAX_VALUE? Double.NaN : tl; + return Math2.longToDoubleNaN(get(index)); } /** @@ -660,7 +657,7 @@ public double getDouble(int index) { * with String2.parseDouble and so may return Double.NaN. */ public double getUnsignedDouble(int index) { - //http://www.unidata.ucar.edu/software/thredds/current/netcdf-java/reference/faq.html#Unsigned + //https://www.unidata.ucar.edu/software/thredds/current/netcdf-java/reference/faq.html#Unsigned // 9,223,372,036,854,775,808 // +9,223,372,036,854,775,808 //=18 446 744 073 709 551 616 @@ -854,6 +851,18 @@ public String toString() { return String2.toCSSVString(toArray()); //toArray() get just 'size' elements } + /** + * This converts the elements into an NCCSV attribute String, e.g.,: -128b, 127b + * + * @return an NCCSV attribute String + */ + public String toNccsvAttString() { + StringBuilder sb = new StringBuilder(size * 16); + for (int i = 0; i < size; i++) + sb.append((i == 0? "" : ",") + array[i] + "L"); + return sb.toString(); + } + /** * This sorts the elements in ascending order. * To get the elements in reverse order, just read from the end of the list @@ -971,8 +980,7 @@ public static double rafReadDouble(RandomAccessFile raf, long start, long index) throws Exception { raf.seek(start + 8*index); - long tl = raf.readLong(); - return tl == Long.MAX_VALUE? Double.NaN : tl; + return Math2.longToDoubleNaN(raf.readLong()); } /** @@ -1020,7 +1028,7 @@ public void append(PrimitiveArray pa) { System.arraycopy(((LongArray)pa).array, 0, array, size, otherSize); } else { for (int i = 0; i < otherSize; i++) - array[size + i] = Math2.roundToLong(pa.getDouble(i)); //this converts mv's + array[size + i] = pa.getLong(i); //this converts mv's } size += otherSize; //do last to minimize concurrency problems } @@ -1040,6 +1048,9 @@ public void rawAppend(PrimitiveArray pa) { ensureCapacity(size + (long)otherSize); if (pa instanceof LongArray) { System.arraycopy(((LongArray)pa).array, 0, array, size, otherSize); + } else if (pa instanceof StringArray) { + for (int i = 0; i < otherSize; i++) + array[size + i] = pa.getLong(i); //just parses the string } else { for (int i = 0; i < otherSize; i++) array[size + i] = Math2.roundToLong(pa.getRawDouble(i)); //this DOESN'T convert mv's @@ -1222,7 +1233,8 @@ public int firstTie() { /** This returns the minimum value that can be held by this class. */ public String minValue() {return "" + Long.MIN_VALUE;} - /** This returns the maximum value that can be held by this class. */ + /** This returns the maximum value that can be held by this class + (not including the cohort missing value). */ public String maxValue() {return "" + (Long.MAX_VALUE - 1);} /** @@ -1245,6 +1257,7 @@ public int[] getNMinMaxIndex() { if (v >= tmax) {tmaxi = i; tmax = v; } } } + //String2.log(">> LongArray.getNMinMaxIndex size=" + size + " n=" + n + " min=" + tmin + " max=" + tmax); return new int[]{n, tmini, tmaxi}; } diff --git a/WEB-INF/classes/com/cohort/array/PrimitiveArray.java b/WEB-INF/classes/com/cohort/array/PrimitiveArray.java index be4d9cf52..e92f81b40 100644 --- a/WEB-INF/classes/com/cohort/array/PrimitiveArray.java +++ b/WEB-INF/classes/com/cohort/array/PrimitiveArray.java @@ -29,7 +29,7 @@ * by designating their MAX_VALUE as the missing value. This is consistent with JGOFS * ("the server will set the missing value field to the largest value * possible for specified type.", - * http://www.opendap.org/server/install-html/install_22.html). + * https://www.opendap.org/server/install-html/install_22.html). * This has the convenient side effect that missing values sort high * (as to NaNs for floats and doubles). * @@ -61,6 +61,8 @@ public abstract class PrimitiveArray { * See also Table.REGEX_OP. */ public final static String REGEX_OP = "=~"; + + /** * These are *not* final so EDStatic can replace them with translated Strings. * These are MessageFormat-style strings, so any single quote ' must be escaped as ''. @@ -184,7 +186,7 @@ public static PrimitiveArray factory(Class elementClass, int capacity, boolean a * or a new pa of a specified type. * * @param elementClass e.g., float.class - * @return a PrimitiveArray + * @return a PrimitiveArray */ public static PrimitiveArray factory(Class elementClass, PrimitiveArray pa) { if (pa.elementClass() == elementClass) @@ -329,20 +331,23 @@ public static PrimitiveArray csvFactory(Class elementClass, String csv) { * This returns a PrimitiveArray of the specified type from the space or comma-separated values. * * @param elementClass e.g., float.class or String.class - * @param ssv For elementClass=String.class, individual values with interior spaces or commas - * must be completely enclosed in double quotes with interior double - * quotes converted to 2 double quotes. For String values without interior spaces or commas, - * you don't have to double quote the whole value. + * @param ssv For elementClass=char.class, encode and special characters (e.g., space, + double quotes, backslash, <#32, or >#127) via their + JSON or NCCSV encodings (e.g., " ", "\"", "\\" or """", "\n", "\u20ac"). * @return a PrimitiveArray */ public static PrimitiveArray ssvFactory(Class elementClass, String ssv) { StringArray sa = StringArray.wordsAndQuotedPhrases(ssv); - if (elementClass == String.class) - return sa; int n = sa.size(); PrimitiveArray pa = factory(elementClass, n, false); - for (int i = 0; i < n; i++) - pa.addString(sa.get(i)); + if (elementClass == char.class) { + CharArray ca = (CharArray)pa; + for (int i = 0; i < n; i++) + ca.add(String2.fromNccsvChar(sa.get(i))); + } else { + for (int i = 0; i < n; i++) + pa.addString(sa.get(i)); + } return pa; } @@ -422,6 +427,26 @@ public static Class elementStringToClass(String type) { throw new IllegalArgumentException("PrimitiveArray.elementStringToClass unsupported type: " + type); } + /** + * This converts an element type String (e.g., "float") to an element type (e.g., float.class). + * + * @param type an element type string (e.g., "float") + * @return the corresponding element type (e.g., float.class) + */ + public static Class caseInsensitiveElementStringToClass(String type) { + type = type.toLowerCase(); + if (type.equals("double")) return double.class; + if (type.equals("float")) return float.class; + if (type.equals("long")) return long.class; + if (type.equals("int")) return int.class; + if (type.equals("short")) return short.class; + if (type.equals("byte") || + type.equals("boolean"))return byte.class; //erddap stores booleans as bytes + if (type.equals("char")) return char.class; + if (type.equals("string")) return String.class; + throw new IllegalArgumentException("PrimitiveArray.caseInsensitiveElementStringToClass unsupported type: " + type); + } + /** * This indicates the number of bytes per element of the given type. * The value for "String" isn't a constant, so this returns 20. @@ -541,7 +566,7 @@ public String getSqlTypeString(double stringLengthFactor) { * But choices below are fairly safe. * I can't find a table to link java.sql.Types constants to Postgres types. * See postgresql types at - * http://www.postgresql.org/docs/8.2/static/datatype-numeric.html + * https://www.postgresql.org/docs/8.2/static/datatype-numeric.html * * @param sqlType a java.sql.Types constant * @return a PrimitiveArray of the suggested type. @@ -598,8 +623,7 @@ public static boolean isIntegerType(Class type) { type == long.class || type == int.class || type == short.class || - type == byte.class || - type == char.class; + type == byte.class; } /** @@ -953,6 +977,33 @@ public double getRawNiceDouble(int index) { */ abstract public String getString(int index); + /** + * Return a value from the array as a String suitable for the data section + * of an NCCSV file. This is close to a json string. + * StringArray and CharArray overwrite this. + * Note that LongArray doesn't append L -- that is done separately + * by file writers. + * + * @param index the index number 0 ... size-1 + * @return For numeric types, this returns ("" + ar[index]), or "" if NaN or infinity. + * CharArray and StringArray overwrite this. + */ + public String getNccsvDataString(int index) { + return getString(index); + } + + /** + * Return a value from the array as a String suitable for the data section + * of a tsv file. This is close to a json string. + * + * @param index the index number 0 ... size-1 + * @return For numeric types, this returns ("" + ar[index]), or "" if NaN or infinity. + * CharArray and StringArray overwrite this. + */ + public String getTsvString(int index) { + return getString(index); + } + /** * Return a value from the array as a String. * This "raw" variant leaves missingValue from integer data types @@ -1032,15 +1083,14 @@ public String toCSVString() { for (int i = 0; i < size; i++) { if (i > 0) sb.append(","); - sb.append(getString(i)); + sb.append(getNccsvDataString(i)); } return sb.toString(); } /** * This converts the elements into a Comma-Space-Separated-Value (CSSV) String. - * Chars acting like unsigned shorts. - * StringArray overwrites this to specially encode the strings. + * CharArray and StringArray overwrite this to specially encode the strings. * * @return a Comma-Space-Separated-Value (CSSV) String representation */ @@ -1049,11 +1099,18 @@ public String toString() { for (int i = 0; i < size; i++) { if (i > 0) sb.append(", "); - sb.append(getString(i)); + sb.append(getNccsvDataString(i)); } return sb.toString(); } + /** + * This converts the elements into an NCCSV attribute String, e.g.,: -128b, 127b + * + * @return an NCCSV attribute String + */ + abstract public String toNccsvAttString(); + /** * This returns a JSON-style comma-separated-value list of the elements. * StringArray overwrites this to make a JSON-style representation. @@ -2231,6 +2288,7 @@ public PrimitiveArray simplify() { * IntArray -1 is interpreted as 4294967295). * WARNING: information may be lost from the incoming pa if this * primitiveArray is of a simpler type. + * Also, MAX_VALUE isn't converted to NaN. * * @param pa pa must be the same or a narrower * data type, or the data will be narrowed with Math2.narrowToByte. @@ -2238,7 +2296,7 @@ public PrimitiveArray simplify() { public void unsignedAppend(PrimitiveArray pa) { //this code is used by all subclasses; it isn't over-ridden - if (pa.isIntegerType()) { + if (pa.isIntegerType() || pa.elementClass() == char.class) { int otherSize = pa.size(); ensureCapacity(size + (long)otherSize); if (pa.elementClass() == elementClass()) { //both are the same integer type @@ -2749,7 +2807,7 @@ public PrimitiveArray addOffsetScale(Class destElementClass, double addOffset, d public abstract int switchFromTo(String from, String to); /** - * For non-StringArray, + * For non-StringArray and non-CharArray, * if the primitiveArray has fake _FillValue and/or missing_values (e.g., -9999999), * those values are converted to PrimitiveArray-style missing values * (NaN, or MAX_VALUE for integer types). @@ -2759,18 +2817,19 @@ public PrimitiveArray addOffsetScale(Class destElementClass, double addOffset, d * @return the number of missing values converted */ public int convertToStandardMissingValues(double fakeFillValue, double fakeMissingValue) { - //do nothing to String columns - if (elementClass() == String.class) + //do nothing to String or char columns + if (elementClass() == String.class || + elementClass() == char.class) return 0; //is _FillValue used? switch data to standard mv - //String2.log("Table.convertToStandardMissingValues col=" + column + " fillValue=" + fillValue); + //String2.log(">> Table.convertToStandardMissingValues fillValue=" + fakeFillValue + " missingValue=" + fakeMissingValue); int nSwitched = 0; if (!Double.isNaN(fakeFillValue)) nSwitched += switchFromTo("" + fakeFillValue, ""); //is missing_value used? switch data to standard mv - //String2.log("Table.convertToStandardMissingValues col=" + column + " missingValue=" + missingValue); + //String2.log ... if (!Double.isNaN(fakeMissingValue) && fakeMissingValue != fakeFillValue) //if fakeFillValue==NaN 2nd clause always true (good) nSwitched += switchFromTo("" + fakeMissingValue, ""); @@ -2778,14 +2837,16 @@ public int convertToStandardMissingValues(double fakeFillValue, double fakeMissi } /** - * For any non-StringArray, this changes all standard + * For any non-StringArray and non-CharArray, this changes all standard * missing values (MAX_VALUE or NaN's) to fakeMissingValues. * * @param fakeMissingValue * @return the number of values switched */ public int switchNaNToFakeMissingValue(double fakeMissingValue) { - if (Math2.isFinite(fakeMissingValue) && elementClass() != String.class) + if (Math2.isFinite(fakeMissingValue) && + elementClass() != String.class || + elementClass() != char.class) return switchFromTo("", "" + fakeMissingValue); return 0; } @@ -2834,6 +2895,19 @@ public int switchFakeMissingValueToNaN(double fakeMissingValue) { */ public abstract int firstTie(); + /** + * This tests if all of the values are identical. + * + * @return true if size == 0 or all of the values are identical. + */ + public boolean allSame() { + for (int i = 1; i < size; i++) { + if (compare(i - 1, i) != 0) + return false; + } + return true; + } + /** * This compares this PrimitiveArray's values to anothers, string representation by string representation, * and returns the first index where different. @@ -2960,7 +3034,8 @@ public String smallestBiggestSpacing() { /** This returns the minimum value that can be held by this class. */ public abstract String minValue(); - /** This returns the maximum value that can be held by this class. */ + /** This returns the maximum value that can be held by this class + (not including the cohort missing value). */ public abstract String maxValue(); /** @@ -2973,6 +3048,19 @@ public String smallestBiggestSpacing() { */ public abstract int[] getNMinMaxIndex(); + /** + * This returns the min and max of the non-null or "" strings (by simple comparison). + * + * @return String[3], 0=""+n (the number of non-null or "" strings), + * 1=min (as a string), 2=max (as a string). min and max are "" if n=0. + */ + public String[] getNMinMax() { + int nmm[] = getNMinMaxIndex(); + if (nmm[0] == 0) + return new String[]{"0", "", ""}; + return new String[]{"" + nmm[0], getString(nmm[1]), getString(nmm[2])}; + } + /** * Given nHave values and stride, this returns the actual number of points that will be found. * @@ -2991,7 +3079,7 @@ public static int strideWillFind(int nHave, int stride) { * @param startIndex must be a valid index * @param stride must be at least 1 * @param stopIndex (inclusive) If >= size, it will be changed to size-1. - * @return a new PrimitiveArray with the desired subset. + * @return a new PrimitiveArray of the same type with the desired subset. * It will have a new backing array with a capacity equal to its size. * If stopIndex < startIndex, this returns a PrimitiveArray with size=0; */ @@ -3208,7 +3296,8 @@ public int applyConstraint(boolean morePrecise, BitSet keep, String op, String v } //string - if (elementClass() == String.class) { + if (elementClass() == String.class || + elementClass() == char.class) { //String2.log("applyConstraint(String)"); int nStillGood = 0; for (int row = keep.nextSetBit(0); row >= 0; row = keep.nextSetBit(row + 1)) { @@ -3272,6 +3361,407 @@ public int applyConstraint(boolean morePrecise, BitSet keep, String op, String v return nStillGood; } + /** + * This converts a StringArray with NCCSV attribute values into + * a typed PrimitiveArray. + * + * @param sa Almost always from StringArray.fromNccsv(). E.g., ["7b", "-12b"] + * @return a typed PrimitiveArray e,g, ByteArray with [7, -12]. + * If sa is null or sa.length() == 0, this returns this sa. + * If sa is interpreted as a StringArray, sa will be returned with + * canonical (perhaps modified) values. + * @throws SimpleException if trouble. + */ + public static PrimitiveArray parseNccsvAttributes(StringArray sa) { + if (sa == null || sa.size() == 0) + return sa; + //String2.log("nccsv(sa)=" + sa.toNccsvAttString()); + + //are first and lastChar all the same? e.g., 7b, -12b + int saSize = sa.size(); + boolean firstCharSame = sa.get(0).length() > 0; //initially just first value + boolean lastCharSame = sa.get(0).length() > 0; //initially just first value + char firstChar = ' '; //junk for now + char lastChar = ' '; //junk for now + if (lastCharSame) { + String s = sa.get(0); //it will be length() > 0 (tested above) + firstChar = s.charAt(0); + lastChar = s.charAt(s.length() - 1); + for (int i = 1; i < saSize; i++) { + s = sa.get(i); + if (s.length() == 0 || s.charAt(0) != firstChar) + firstCharSame = false; + if (s.length() == 0 || s.charAt(s.length() - 1) != lastChar) { + lastCharSame = false; + break; + } + } + } + + //what type is it? + if (lastCharSame) { + if (lastChar == 'b') { + if (sa.firstNonMatch(String2.NCCSV_BYTE_ATT_PATTERN) < 0) { + ByteArray ba = new ByteArray(saSize, false); + for (int i = 0; i < saSize; i++) { + String s = sa.get(i); + ba.addInt(String2.parseInt(s.substring(0, s.length() - 1))); + if (ba.get(i) == Byte.MAX_VALUE && !"127b".equals(s)) + throw new SimpleException("Invalid byte value: " + s); + } + return ba; + } //all of these: else fall through to StringArray + } else if (lastChar == 's') { + if (sa.firstNonMatch(String2.NCCSV_SHORT_ATT_PATTERN) < 0) { + ShortArray ba = new ShortArray(saSize, false); + for (int i = 0; i < saSize; i++) { + String s = sa.get(i); + ba.addInt(String2.parseInt(s.substring(0, s.length() - 1))); + if (ba.get(i) == Short.MAX_VALUE && !"32767s".equals(s)) + throw new SimpleException("Invalid short value: " + s); + } + return ba; + } + } else if (lastChar == 'L') { + if (sa.firstNonMatch(String2.NCCSV_LONG_ATT_PATTERN) < 0) { + LongArray la = new LongArray(saSize, false); + for (int i = 0; i < saSize; i++) { + String s = sa.get(i); + la.add(String2.parseLong(s.substring(0, s.length() - 1))); + if (la.get(i) == Long.MAX_VALUE && !"9223372036854775807L".equals(s)) + throw new SimpleException("Invalid long value: " + s); + } + return la; + } + } else if (lastChar == 'f') { + if (sa.firstNonMatch(String2.NCCSV_FLOAT_ATT_PATTERN) < 0) { + FloatArray fa = new FloatArray(saSize, false); + for (int i = 0; i < saSize; i++) { + String s = sa.get(i); + float f = String2.parseFloat(s.substring(0, s.length() - 1)); + fa.add(Math2.isFinite(f)? f : Float.NaN); //Infinity -> NaN + if (Float.isNaN(fa.get(i)) && !"NaNf".equals(s)) + throw new SimpleException("Invalid float value: " + s); + } + return fa; + } + } else if (lastChar == 'd') { + if (sa.firstNonMatch(String2.NCCSV_DOUBLE_ATT_PATTERN) < 0) { + //String2.log(">> doubles? " + sa.firstNonMatch(String2.NCCSV_DOUBLE_ATT_PATTERN) + ": " + sa.toString()); + DoubleArray da = new DoubleArray(saSize, false); + for (int i = 0; i < saSize; i++) { + String s = sa.get(i); + double d = String2.parseDouble(s); + da.add(Math2.isFinite(d)? d : Double.NaN); //Infinity -> NaN + if (Double.isNaN(da.get(i)) && !"NaNd".equals(s)) + throw new SimpleException("Invalid double value: " + s); + } + return da; + } + } + } + + //ints? + if (sa.firstNonMatch(String2.NCCSV_INT_ATT_PATTERN) < 0) { + IntArray ia = new IntArray(saSize, false); + for (int i = 0; i < saSize; i++) { + String s = sa.get(i); + ia.add(String2.parseInt(s.substring(0, s.length() - 1))); + if (ia.get(i) == Integer.MAX_VALUE && !"2147483647i".equals(s)) + throw new SimpleException("Invalid int value: " + s); + } + return ia; + } + + + //char? + if (sa.firstNonMatch(String2.NCCSV_CHAR_ATT_PATTERN) < 0) { + CharArray ca = new CharArray(saSize, false); + for (int i = 0; i < saSize; i++) + ca.add(String2.fromNccsvChar(sa.get(i))); + return ca; + } + + //if nothing else matched - > StringArray + //convert nccsv strings to true strings (and canonical) and return StringArray + sa.fromNccsv(); + return sa; + } + + /** + * This tests the methods of this class. + * + * @throws Exception if trouble. + */ + public static void testNccsv() throws Throwable { + String2.log("*** PrimitiveArray.testNccsv"); + String s; + StringArray sa; + PrimitiveArray pa; + String msg; + + //String2.toNccsvChar + Test.ensureEqual(String2.toNccsvChar(' '), " ", ""); + Test.ensureEqual(String2.toNccsvChar('\u20AC'), "\\u20ac", ""); + + //String2.toNccsvDataString won't be quoted + Test.ensureEqual(String2.toNccsvDataString(""), "", ""); + Test.ensureEqual(String2.toNccsvDataString("a"), "a", ""); + Test.ensureEqual(String2.toNccsvDataString("a ~"), "a ~", ""); + s = String2.toNccsvDataString("a\n\f\t\r"); + Test.ensureEqual(s, "a\\n\\f\\t\\r", s); + Test.ensureEqual(String2.toNccsvDataString("a"), "a", ""); + Test.ensureEqual(String2.toNccsvDataString("5"), "5", ""); //number + Test.ensureEqual(String2.toNccsvDataString("'c'"), "'c'", ""); //char + + //String2.toNccsvDataString will be quoted + Test.ensureEqual(String2.toNccsvDataString(" "), "\" \"", ""); //start/end ' ' + Test.ensureEqual(String2.toNccsvDataString("a "), "\"a \"", ""); //start/end ' ' + Test.ensureEqual(String2.toNccsvDataString(" b"), "\" b\"", ""); //start/end ' ' + Test.ensureEqual(String2.toNccsvDataString("a,"), "\"a,\"", ""); // , + Test.ensureEqual(String2.toNccsvDataString("b\""), "\"b\"\"\"", ""); // " + + + //String2.toNccsvAttString won't be quoted + Test.ensureEqual(String2.toNccsvAttString(""), "", ""); + Test.ensureEqual(String2.toNccsvAttString("a"), "a", ""); + Test.ensureEqual(String2.toNccsvAttString("a ~"), "a ~", ""); + s = String2.toNccsvAttString("a\n\f\t\r"); + Test.ensureEqual(s, "a\\n\\f\\t\\r", s); + Test.ensureEqual(String2.toNccsvAttString("a"), "a", ""); + + + //String2.toNccsvAttString will be quoted + Test.ensureEqual(String2.toNccsvAttString(" "), "\" \"", ""); //start/end ' ' + Test.ensureEqual(String2.toNccsvAttString("a "), "\"a \"", ""); //start/end ' ' + Test.ensureEqual(String2.toNccsvAttString(" b"), "\" b\"", ""); //start/end ' ' + Test.ensureEqual(String2.toNccsvAttString("a,"), "\"a,\"", ""); // , + Test.ensureEqual(String2.toNccsvAttString("b\""), "\"b\"\"\"", ""); // " + Test.ensureEqual(String2.toNccsvAttString("\'c\'"), "\"'c'\"", ""); //char + Test.ensureEqual(String2.toNccsvAttString("5"), "\"5\"", ""); //number + + //ByteArray + s = "1b"; + pa = parseNccsvAttributes(StringArray.simpleFromNccsv(s)); + Test.ensureEqual(pa.elementClassString(), "byte", ""); + Test.ensureEqual(pa.toString(), "1", ""); + Test.ensureEqual(pa.toNccsvAttString(), s, ""); + + pa = parseNccsvAttributes(StringArray.simpleFromNccsv("-128b,-0b,0b,127b")); + Test.ensureEqual(pa.elementClassString(), "byte", ""); + Test.ensureEqual(pa.toString(), "-128, 0, 0, 127", ""); + Test.ensureEqual(pa.toNccsvAttString(), "-128b,0b,0b,127b", ""); + + try { + pa = parseNccsvAttributes(StringArray.simpleFromNccsv("128b")); + msg = "shouldn't get here"; + } catch (Throwable t) { + msg = t.toString(); + } + Test.ensureEqual(msg, "com.cohort.util.SimpleException: Invalid byte value: 128b", ""); + + pa = parseNccsvAttributes(StringArray.simpleFromNccsv("1b,3")); //doesn't match regex + Test.ensureEqual(pa.elementClassString(), "String", ""); + Test.ensureEqual(pa.toString(), "1b, 3", ""); + + pa = parseNccsvAttributes(StringArray.simpleFromNccsv("1b,1234b")); //doesn't match regex + Test.ensureEqual(pa.elementClassString(), "String", ""); + Test.ensureEqual(pa.toString(), "1b, 1234b", ""); + + //ShortArray + pa = parseNccsvAttributes(StringArray.simpleFromNccsv("1s")); + Test.ensureEqual(pa.elementClassString(), "short", ""); + Test.ensureEqual(pa.toString(), "1", ""); + Test.ensureEqual(pa.toNccsvAttString(), "1s", ""); + + pa = parseNccsvAttributes(StringArray.simpleFromNccsv("-32768s,-0s,0s,32767s")); + Test.ensureEqual(pa.elementClassString(), "short", ""); + Test.ensureEqual(pa.toString(), "-32768, 0, 0, 32767", ""); + Test.ensureEqual(pa.toNccsvAttString(), "-32768s,0s,0s,32767s", ""); + + try { + pa = parseNccsvAttributes(StringArray.simpleFromNccsv("32768s")); + msg = "shouldn't get here"; + } catch (Throwable t) { + msg = t.toString(); + } + Test.ensureEqual(msg, "com.cohort.util.SimpleException: Invalid short value: 32768s", ""); + + pa = parseNccsvAttributes(StringArray.simpleFromNccsv("1s,3")); //doesn't match regex + Test.ensureEqual(pa.elementClassString(), "String", ""); + Test.ensureEqual(pa.toString(), "1s, 3", ""); + + pa = parseNccsvAttributes(StringArray.simpleFromNccsv("1s,123456s")); //doesn't match regex + Test.ensureEqual(pa.elementClassString(), "String", ""); + Test.ensureEqual(pa.toString(), "1s, 123456s", ""); + + //IntArray + pa = parseNccsvAttributes(StringArray.simpleFromNccsv("1i")); + Test.ensureEqual(pa.elementClassString(), "int", ""); + Test.ensureEqual(pa.toString(), "1", ""); + Test.ensureEqual(pa.toNccsvAttString(), "1i", ""); + + pa = parseNccsvAttributes(StringArray.simpleFromNccsv("-2147483648i,-0i,0i,2147483647i")); + Test.ensureEqual(pa.elementClassString(), "int", ""); + Test.ensureEqual(pa.toString(), "-2147483648, 0, 0, 2147483647", ""); + Test.ensureEqual(pa.toNccsvAttString(), "-2147483648i,0i,0i,2147483647i", ""); + + try { + pa = parseNccsvAttributes(StringArray.simpleFromNccsv("2147483648i")); + msg = "shouldn't get here"; + } catch (Throwable t) { + msg = t.toString(); + } + Test.ensureEqual(msg, "com.cohort.util.SimpleException: Invalid int value: 2147483648i", ""); + + pa = parseNccsvAttributes(StringArray.simpleFromNccsv("1i,123456789091i")); //doesn't match regex + Test.ensureEqual(pa.elementClassString(), "String", ""); + Test.ensureEqual(pa.toString(), "1i, 123456789091i", ""); + + pa = parseNccsvAttributes(StringArray.simpleFromNccsv("1i,3.00i")); //doesn't match regex + Test.ensureEqual(pa.elementClassString(), "String", ""); + Test.ensureEqual(pa.toString(), "1i, 3.00i", ""); + + //LongArray + pa = parseNccsvAttributes(StringArray.simpleFromNccsv("1L")); + Test.ensureEqual(pa.elementClassString(), "long", ""); + Test.ensureEqual(pa.toString(), "1", ""); + Test.ensureEqual(pa.toNccsvAttString(), "1L", ""); + + pa = parseNccsvAttributes(StringArray.simpleFromNccsv("-9223372036854775808L,-0L,0L,9223372036854775807L")); + Test.ensureEqual(pa.elementClassString(), "long", ""); + Test.ensureEqual(pa.toString(), "-9223372036854775808, 0, 0, 9223372036854775807", ""); + Test.ensureEqual(pa.toNccsvAttString(), "-9223372036854775808L,0L,0L,9223372036854775807L", ""); + + try { + pa = parseNccsvAttributes(StringArray.simpleFromNccsv("9223372036854775808L")); + msg = "shouldn't get here"; + } catch (Throwable t) { + msg = t.toString(); + } + Test.ensureEqual(msg, "com.cohort.util.SimpleException: Invalid long value: 9223372036854775808L", ""); + + pa = parseNccsvAttributes(StringArray.simpleFromNccsv("1L,12345678901234567890L")); //doesn't match regex + Test.ensureEqual(pa.elementClassString(), "String", ""); + Test.ensureEqual(pa.toString(), "1L, 12345678901234567890L", ""); + + pa = parseNccsvAttributes(StringArray.simpleFromNccsv("1L,123456")); //doesn't match regex + Test.ensureEqual(pa.elementClassString(), "String", ""); + Test.ensureEqual(pa.toString(), "1L, 123456", ""); + + //FloatArray + pa = parseNccsvAttributes(StringArray.simpleFromNccsv("1f")); + Test.ensureEqual(pa.elementClassString(), "float", ""); + Test.ensureEqual(pa.toString(), "1.0", ""); + Test.ensureEqual(pa.toNccsvAttString(), "1.0f", ""); + + pa = parseNccsvAttributes(StringArray.simpleFromNccsv( + "-3e-38f,-.12e3f,0f,3e0f,3.e4f,.12e+3f,1.2E38f,NaNf")); + Test.ensureEqual(pa.elementClassString(), "float", ""); + Test.ensureEqual(pa.toString(), + "-3.0E-38, -120.0, 0.0, 3.0, 30000.0, 120.0, 1.2E38, NaN", ""); + Test.ensureEqual(pa.toNccsvAttString(), + "-3.0E-38f,-120.0f,0.0f,3.0f,30000.0f,120.0f,1.2E38f,NaNf", ""); + + try { + pa = parseNccsvAttributes(StringArray.simpleFromNccsv("1.2E39f")); + msg = "shouldn't get here"; + } catch (Throwable t) { + msg = t.toString(); + } + Test.ensureEqual(msg, "com.cohort.util.SimpleException: Invalid float value: 1.2E39f", ""); + + pa = parseNccsvAttributes(StringArray.simpleFromNccsv("1f,3..0e23f")); //doesn't match regex + Test.ensureEqual(pa.elementClassString(), "String", ""); + Test.ensureEqual(pa.toString(), "1f, 3..0e23f", ""); + + //DoubleArray + pa = parseNccsvAttributes(StringArray.simpleFromNccsv("1d")); + Test.ensureEqual(pa.elementClassString(), "double", ""); + Test.ensureEqual(pa.toString(), "1.0", ""); + Test.ensureEqual(pa.toNccsvAttString(), "1.0d", ""); + + pa = parseNccsvAttributes(StringArray.simpleFromNccsv( + "-3.0e-300d,-.12e3d,1.d,.1d,3.e4d,.12e3d,1.2E+300d,NaNd")); + Test.ensureEqual(pa.elementClassString(), "double", ""); + Test.ensureEqual(pa.toString(), + "-3.0E-300, -120.0, 1.0, 0.1, 30000.0, 120.0, 1.2E300, NaN", ""); + Test.ensureEqual(pa.toNccsvAttString(), + "-3.0E-300d,-120.0d,1.0d,0.1d,30000.0d,120.0d,1.2E300d,NaNd", ""); + + try { + pa = parseNccsvAttributes(StringArray.simpleFromNccsv("1.e310d")); + msg = "shouldn't get here"; + } catch (Throwable t) { + msg = t.toString(); + } + Test.ensureEqual(msg, "com.cohort.util.SimpleException: Invalid double value: 1.e310d", ""); + + pa = parseNccsvAttributes(StringArray.simpleFromNccsv("3.0d,3..0d")); //doesn't match regex + Test.ensureEqual(pa.elementClassString(), "String", ""); + Test.ensureEqual(pa.toString(), "3.0d, 3..0d", ""); + + pa = parseNccsvAttributes(StringArray.simpleFromNccsv("1.0d,3")); //doesn't match regex + Test.ensureEqual(pa.elementClassString(), "String", ""); + Test.ensureEqual(pa.toString(), "1.0d, 3", ""); + + //StringArray + pa = parseNccsvAttributes(StringArray.simpleFromNccsv( + //in the nccsv file, it's a string with characters like \ + "\"a~ \\f \\n \\r \\t \\\\ \\/ \\u00C0 \\u0000 \\uffFf\"")); + Test.ensureEqual(pa.elementClassString(), "String", ""); + //now it's a string with control chars and unicode chars + Test.ensureEqual(String2.annotatedString(pa.getString(0)), + "a~ [12] [10]\n" + + " [13] [9] \\ / [192] [0] [65535][end]", ""); + Test.ensureEqual(pa.toNccsvAttString(), + "a~ \\f \\n \\r \\t \\\\ / \\u00c0 \\u0000 \\uffff", ""); + + + //CharArray + pa = parseNccsvAttributes(StringArray.simpleFromNccsv("\"'a'\"")); + Test.ensureEqual(pa.elementClassString(), "char", ""); + Test.ensureEqual(pa.toString(), "a", ""); + Test.ensureEqual(pa.toNccsvAttString(), "\"'a'\"", ""); + + pa = parseNccsvAttributes(StringArray.simpleFromNccsv( // \\b is not supported + "\"'\\f'\", \"'\\n'\", \"'\\r'\", \"'\\t'\", \"'\\\\'\"")); + Test.ensureEqual(pa.elementClassString(), "char", ""); + Test.ensureEqual(pa.toString(), + "\\f, \\n, \\r, \\t, \\\\", ""); + Test.ensureEqual(pa.toNccsvAttString(), + "\"'\\f'\",\"'\\n'\",\"'\\r'\",\"'\\t'\",\"'\\\\'\"", ""); + + pa = parseNccsvAttributes(StringArray.simpleFromNccsv( + "\"'\\/'\", \"'/'\", \"'\"\"'\", \"' '\", \"'''\", \"'a'\"")); + Test.ensureEqual(pa.elementClassString(), "char", ""); + Test.ensureEqual(pa.toString(), + "/, /, \"\"\"\", \" \", ', a", ""); + Test.ensureEqual(pa.toNccsvAttString(), + "\"'/'\",\"'/'\",\"'\"\"'\",\"' '\",\"'''\",\"'a'\"", ""); + + pa = parseNccsvAttributes(StringArray.simpleFromNccsv( + "\"'~'\", '\\u00C0', \"'\\u0000'\", \"'\\uffFf'\"")); + Test.ensureEqual(pa.elementClassString(), "char", ""); + Test.ensureEqual(pa.toString(), + "~, \\u00c0, \\u0000, \\uffff", ""); + Test.ensureEqual(pa.toNccsvAttString(), + "\"'~'\",\"'\\u00c0'\",\"'\\u0000'\",\"'\\uffff'\"", ""); + + try { + pa = parseNccsvAttributes(StringArray.simpleFromNccsv("'\\b'")); + msg = "shouldn't get here"; + } catch (Throwable t) { + msg = t.toString(); + } + //Test.ensureEqual(msg, "zztop", ""); + + pa = parseNccsvAttributes(StringArray.simpleFromNccsv("'a', ''")); //doesn't match regex + Test.ensureEqual(pa.elementClassString(), "String", ""); + Test.ensureEqual(pa.toString(), "'a', ''", ""); + + } + /** @@ -3308,7 +3798,7 @@ public static void testBasic() throws Throwable { Test.ensureEqual(pa.getString(0), "", ""); Test.ensureEqual(factory(byte.class, 1, "10").toString(), "10", ""); - Test.ensureEqual(factory(char.class, 2, "abc").toString(), "97, 97", ""); + Test.ensureEqual(factory(char.class, 2, "abc").toString(),"a, a", ""); Test.ensureEqual(factory(short.class, 3, "30").toString(), "30, 30, 30", ""); Test.ensureEqual(factory(int.class, 4, "40").toString(), "40, 40, 40, 40", ""); Test.ensureEqual(factory(long.class, 5, "50").toString(), "50, 50, 50, 50, 50", ""); @@ -3538,7 +4028,7 @@ public static void testBasic() throws Throwable { StringArray Sar = new StringArray(new String[]{"22","4444","666666","666666","666666","88888888"}); Test.ensureEqual(bar.indexOf("6"), 2, ""); - Test.ensureEqual(car.indexOf("6"), 2, ""); + Test.ensureEqual(car.indexOf("\u0006"), 2, ""); Test.ensureEqual(dar.indexOf("6"), 2, ""); Test.ensureEqual(far.indexOf("6"), 2, ""); Test.ensureEqual(Iar.indexOf("6"), 2, ""); @@ -3556,7 +4046,7 @@ public static void testBasic() throws Throwable { Test.ensureEqual(Sar.indexOf("a"), -1, ""); Test.ensureEqual(bar.indexOf("6", 3), 3, ""); - Test.ensureEqual(car.indexOf("6", 3), 3, ""); + Test.ensureEqual(car.indexOf("\u0006", 3), 3, ""); Test.ensureEqual(dar.indexOf("6", 3), 3, ""); Test.ensureEqual(far.indexOf("6", 3), 3, ""); Test.ensureEqual(Iar.indexOf("6", 3), 3, ""); @@ -3565,7 +4055,7 @@ public static void testBasic() throws Throwable { Test.ensureEqual(Sar.indexOf("666666", 3), 3, ""); Test.ensureEqual(bar.lastIndexOf("6"), 4, ""); - Test.ensureEqual(car.lastIndexOf("6"), 4, ""); + Test.ensureEqual(car.lastIndexOf("\u0006"), 4, ""); Test.ensureEqual(dar.lastIndexOf("6"), 4, ""); Test.ensureEqual(far.lastIndexOf("6"), 4, ""); Test.ensureEqual(Iar.lastIndexOf("6"), 4, ""); @@ -3583,7 +4073,7 @@ public static void testBasic() throws Throwable { Test.ensureEqual(Sar.lastIndexOf("a"), -1, ""); Test.ensureEqual(bar.lastIndexOf("6", 3), 3, ""); - Test.ensureEqual(car.lastIndexOf("6", 3), 3, ""); + Test.ensureEqual(car.lastIndexOf("\u0006", 3), 3, ""); Test.ensureEqual(dar.lastIndexOf("6", 3), 3, ""); Test.ensureEqual(far.lastIndexOf("6", 3), 3, ""); Test.ensureEqual(Iar.lastIndexOf("6", 3), 3, ""); @@ -3886,7 +4376,7 @@ public static void testBasic() throws Throwable { //addFromPA( DoubleArray other = (DoubleArray)csvFactory(double.class, "11.1, 22.2, 33.3"); Test.ensureEqual(csvFactory(byte.class, "1.1, 2.2").addFromPA(other, 1, 2).toString(), "1, 2, 22, 33", ""); - Test.ensureEqual(csvFactory(char.class, "1.1, 2.2").addFromPA(other, 1, 2).toString(), "1, 2, 22, 33", ""); + Test.ensureEqual(csvFactory(char.class, "1.1, 2.2").addFromPA(other, 1, 2).toString(), "1, 2, \\u0016, !", ""); Test.ensureEqual(csvFactory(double.class, "1.1, 2.2").addFromPA(other, 1, 2).toString(), "1.1, 2.2, 22.2, 33.3", ""); Test.ensureEqual(csvFactory(float.class, "1.1, 2.2").addFromPA(other, 1, 2).toString(), "1.1, 2.2, 22.2, 33.3", ""); Test.ensureEqual(csvFactory(int.class, "1.1, 2.2").addFromPA(other, 1, 2).toString(), "1, 2, 22, 33", ""); @@ -3897,7 +4387,7 @@ public static void testBasic() throws Throwable { Test.ensureEqual(ia.addFromPA(other, 2).toString(), "10, 12, 13, 15, 2147483647, 33", ""); Test.ensureEqual(csvFactory(byte.class, "1.1, 2.2").addFromPA(csvFactory(byte.class, "11.1, 22.2, 33.3"), 1, 2).toString(), "1, 2, 22, 33", ""); - Test.ensureEqual(csvFactory(char.class, "1.1, 2.2").addFromPA(csvFactory(char.class, "11.1, 22.2, 33.3"), 1, 2).toString(), "1, 2, 22, 33", ""); + Test.ensureEqual(csvFactory(char.class, "1.1, 2.2").addFromPA(csvFactory(char.class, "11.1, 22.2, 33.3"), 1, 2).toString(), "1, 2, 2, 3", ""); Test.ensureEqual(csvFactory(double.class, "1.1, 2.2").addFromPA(csvFactory(double.class, "11.1, 22.2, 33.3"), 1, 2).toString(), "1.1, 2.2, 22.2, 33.3", ""); Test.ensureEqual(csvFactory(float.class, "1.1, 2.2").addFromPA(csvFactory(float.class, "11.1, 22.2, 33.3"), 1, 2).toString(), "1.1, 2.2, 22.2, 33.3", ""); Test.ensureEqual(csvFactory(int.class, "1.1, 2.2").addFromPA(csvFactory(int.class, "11.1, 22.2, 33.3"), 1, 2).toString(), "1, 2, 22, 33", ""); @@ -4229,6 +4719,7 @@ public static void test() throws Throwable { String2.log("*** PrimitiveArray.test"); testBasic(); testTestValueOpValue(); + testNccsv(); } diff --git a/WEB-INF/classes/com/cohort/array/ShortArray.java b/WEB-INF/classes/com/cohort/array/ShortArray.java index afb793850..c2a32c6c4 100644 --- a/WEB-INF/classes/com/cohort/array/ShortArray.java +++ b/WEB-INF/classes/com/cohort/array/ShortArray.java @@ -122,6 +122,28 @@ public ShortArray(char[] charArray) { array[i] = (short)charArray[i]; } + /** + * A special method which encodes all char values as short values via + * sa.array[i] = (short)array[i]. + * Thus negative short values become large positive char values. + * Note that the cohort 'missingValue' of a CharArray is different from the + * missingValue of a ShortArray and this method does nothing special + * for those values. This method does nothing special for the missingValues. + * 'capacity' and 'size' will equal ca.size. + * See CharArray.decodeFromShortArray(). + * + * @param ca CharArray + */ + public static ShortArray fromCharArrayBytes(CharArray ca) { + int size = ca.size(); + ShortArray sa = new ShortArray(size, true); //active + short sarray[] = sa.array; + char carray[] = ca.array; + for (int i = 0; i < size; i++) + sarray[i] = (short)carray[i]; + return sa; + } + /** * This returns the current capacity (number of elements) of the internal data array. * @@ -704,7 +726,7 @@ public double getDouble(int index) { * with String2.parseDouble and so may return Double.NaN. */ public double getUnsignedDouble(int index) { - //or see http://www.unidata.ucar.edu/software/thredds/current/netcdf-java/reference/faq.html#Unsigned + //or see https://www.unidata.ucar.edu/software/thredds/current/netcdf-java/reference/faq.html#Unsigned return Short.toUnsignedInt(get(index)); } @@ -894,6 +916,18 @@ public String toString() { return String2.toCSSVString(toArray()); //toArray() get just 'size' elements } + /** + * This converts the elements into an NCCSV attribute String, e.g.,: -128b, 127b + * + * @return an NCCSV attribute String + */ + public String toNccsvAttString() { + StringBuilder sb = new StringBuilder(size * 8); + for (int i = 0; i < size; i++) + sb.append((i == 0? "" : ",") + array[i] + "s"); + return sb.toString(); + } + /** * This sorts the elements in ascending order. * To get the elements in reverse order, just read from the end of the list @@ -991,7 +1025,7 @@ public void readDis(DataInputStream dis, int n) throws Exception { public void externalizeForDODS(DataOutputStream dos) throws Exception { dos.writeInt(size); dos.writeInt(size); //yes, a second time - //shorts are written as ints (see dods.dap.Int16PrimitiveArray.externalize) + //shorts are written as ints (see dods.dap.Int16PrimitiveVector.externalize) //since XDR doesn't support shorts for (int i = 0; i < size; i++) dos.writeInt(array[i]); //yes, as ints @@ -1287,7 +1321,8 @@ public int firstTie() { /** This returns the minimum value that can be held by this class. */ public String minValue() {return "" + Short.MIN_VALUE;} - /** This returns the maximum value that can be held by this class. */ + /** This returns the maximum value that can be held by this class + (not including the cohort missing value). */ public String maxValue() {return "" + (Short.MAX_VALUE - 1);} /** diff --git a/WEB-INF/classes/com/cohort/array/StringArray.java b/WEB-INF/classes/com/cohort/array/StringArray.java index 16fe2cf5a..1ece98bbd 100644 --- a/WEB-INF/classes/com/cohort/array/StringArray.java +++ b/WEB-INF/classes/com/cohort/array/StringArray.java @@ -28,6 +28,7 @@ import java.util.HashSet; import java.util.Iterator; import java.util.Map; +import java.util.regex.Pattern; import java.util.Set; /** @@ -124,6 +125,75 @@ public StringArray(Enumeration enumeration) { } } + /** + * A special method which encodes all the Unicode strings in this to ISO_8859_1. + * + * @return this for convenience + */ + public StringArray toIso88591() { + for (int i = 0; i < size; i++) + array[i] = String2.canonical(String2.toIso88591String(array[i])); + return this; + } + + + /** + * A special method which decodes all the UTF-8 strings to Unicode. + * See toUTF8(). + * + * @return this for convenience + */ + public StringArray fromUTF8() { + for (int i = 0; i < size; i++) + array[i] = String2.canonical(String2.fromUTF8String(array[i])); + //String2.log(">>after fromUTF8: " + toNccsvAttString()); + return this; + } + + /** + * A special method which encodes all the Unicode strings in this to UTF-8. + * See fromUTF8(). + * + * @return this for convenience + */ + public StringArray toUTF8() { + for (int i = 0; i < size; i++) + array[i] = String2.canonical(String2.toUTF8String(array[i])); + //String2.log(">>after toUTF8: " + toNccsvAttString()); + return this; + } + + /** + * A special method which encodes all the Unicode strings in this toJson(,127) encoding. + * + * @return this for convenience + */ + public StringArray toJson() { + for (int i = 0; i < size; i++) + array[i] = String2.canonical(String2.toJson(array[i], 127)); + return this; + } + + /** + * This converts a StringArray with JSON-encoded Strings into + * the actual (canonical) Strings. + * This doesn't require that the JSON strings have enclosing double quotes. + */ + public void fromJson() { + for (int i = 0; i < size; i++) + array[i] = String2.canonical(String2.fromJson(array[i])); //doesn't require enclosing "'s + } + + /** + * This converts a StringArray with NCCSV-encoded Strings into + * the actual (canonical) Strings. + * This doesn't require that the NCCSV strings have enclosing double quotes. + */ + public void fromNccsv() { + for (int i = 0; i < size; i++) + array[i] = String2.canonical(String2.fromNccsvString(array[i])); //doesn't require enclosing "'s + } + /* * probably works, but not tested * This makes a StringArray with the contents of a map. * Each entry will be from .toString() = .toString(). @@ -210,7 +280,7 @@ public void toFile(String fileName, String charset, String lineSeparator) try { //open the file if (charset == null || charset.length() == 0) - charset = "ISO-8859-1"; + charset = String2.ISO_8859_1; Writer w = new OutputStreamWriter(new FileOutputStream(fileName, append), charset); bufferedWriter = new BufferedWriter(w); @@ -349,6 +419,19 @@ public void trimAll() { } } + /** + * This trims the end of each of the strings. + * + */ + public void trimEndAll() { + for (int i = 0; i < size; i++) { + String s = array[i]; + String st = String2.trimEnd(s); + if (st.length() < s.length()) + array[i] = String2.canonical(st); + } + } + /** * Use this for temporary arrays to add an item to the array (increasing 'size' by 1) * without using String2.canonical. @@ -583,6 +666,19 @@ public void removeRange(int from, int to) { Arrays.fill(array, size, size + to - from, null); } + /** + * This removes any/all the 0-length strings at the end. + * + * @return the new size + */ + public int removeEmptyAtEnd() { + int last = size; + while (last > 0 && array[last - 1].length() == 0) + last--; + removeRange(last, size); + return size; + } + /** * Moves elements 'first' through 'last' (inclusive) * to 'destination'. @@ -668,7 +764,7 @@ public void ensureCapacity(long minCapacity) { int newCapacity = (int)Math.min(Integer.MAX_VALUE - 1, array.length + (long)array.length); if (newCapacity < minCapacity) newCapacity = (int)minCapacity; //safe since checked above - Math2.ensureMemoryAvailable(8L * newCapacity, "StringArray"); //8 is feeble minimal estimate + Math2.ensureMemoryAvailable(4L * newCapacity, "StringArray"); //4 since just copying pointers String[] newArray = new String[newCapacity]; System.arraycopy(array, 0, newArray, 0, size); array = newArray; //do last to minimize concurrency problems @@ -683,7 +779,7 @@ public void ensureCapacity(long minCapacity) { public String[] toArray() { if (array.length == size) return array; - Math2.ensureMemoryAvailable(8L * size, "StringArray.toArray"); //8L is feeble minimal estimate + Math2.ensureMemoryAvailable(4L * size, "StringArray.toArray"); //4L since just copying pointers String[] tArray = new String[size]; System.arraycopy(array, 0, tArray, 0, size); return tArray; @@ -706,7 +802,7 @@ public Object toObjectArray() { * Non-finite values are returned as Double.NaN's. */ public double[] toDoubleArray() { - Math2.ensureMemoryAvailable(8L * size, "StringArray.toDoubleArray"); //8L is feeble minimal estimate + Math2.ensureMemoryAvailable(8L * size, "StringArray.toDoubleArray"); double dar[] = new double[size]; for (int i = 0; i < size; i++) dar[i] = String2.parseDouble(array[i]); @@ -845,6 +941,34 @@ public String getString(int index) { return get(index); } + /** + * Return a value from the array as a String suitable for the data section + * of an NCCSV file. + * + * @param index the index number 0 ... size-1 + * @return For numeric types, this returns ("" + ar[index]), or "" if NaN or infinity. + * CharArray and StringArray overwrite this. + */ + public String getNccsvDataString(int index) { + return String2.toNccsvDataString(get(index)); + } + + /** + * Return a value from the array as a String suitable for the data section + * of an tsv file, e.g., z \t \u0000 , \". + * + * @param index the index number 0 ... size-1 + * @return For numeric types, this returns ("" + ar[index]), or "" if NaN or infinity. + * CharArray and StringArray overwrite this. + */ + public String getTsvString(int index) { + String s = get(index); + if (s == null) + return ""; + s = String2.toJson(s); + return s.substring(1, s.length() - 1); //remove enclosing quotes + } + /** * Set a value in the array as a String. * @@ -1003,57 +1127,14 @@ public String testEquals(Object o) { /** - * This converts the elements into a comma-separated (CSV) String. - * If a value has an internal comma or double quotes, the value is surrounded by - * double quotes and the internal quotes are replaced by 2 double quotes. + * This converts the elements into an NCCSV attribute String, e.g.,: -128b, 127b + * There is no trailing \n. + * Strings are handled specially: make a newline-separated string, then encode it. * - * @return a Comma-Separated-Value (not comma space) String representation + * @return an NCCSV attribute String */ - public String toCSVString() { - Math2.ensureMemoryAvailable(8L * size, "StringArray.toCSVString"); //8L is lame estimate of bytes/element - StringBuilder sb = new StringBuilder(); - for (int i = 0; i < size; i++) { - if (i > 0) - sb.append(','); - String s = array[i]; - if (s == null) { - } else if (s.indexOf('"') >= 0) { //check for '"' before check for ',' - s = String2.replaceAll(s, "\"", "\"\""); - sb.append("\"" + s + "\""); - } else if (s.indexOf(',') >= 0) { - sb.append("\"" + s + "\""); - } else { - sb.append(s); - } - } - return sb.toString(); - } - - /** - * This converts the elements into a Comma-Space-Separated-Value (CSSV) String. - * If a value has an internal comma or double quotes, the value is surrounded by - * double quotes and the internal quotes are replaced by 2 double quotes. - * - * @return a Comma-Space-Separated-Value String representation - */ - public String toString() { - Math2.ensureMemoryAvailable(8L * size, "StringArray.toString"); //8L is lame estimate of bytes/element - StringBuilder sb = new StringBuilder(); - for (int i = 0; i < size; i++) { - if (i > 0) - sb.append(", "); - String s = array[i]; - if (s == null) { - } else if (s.indexOf('"') >= 0) { //check for '"' before check for ',' - s = String2.replaceAll(s, "\"", "\"\""); - sb.append("\"" + s + "\""); - } else if (s.indexOf(',') >= 0) { - sb.append("\"" + s + "\""); - } else { - sb.append(s); - } - } - return sb.toString(); + public String toNccsvAttString() { + return String2.toNccsvAttString(String2.toSVString(toArray(), "\n", false)); } /** @@ -1076,6 +1157,7 @@ public String toJsonCsvString() { /** * This converts the elements into a newline-separated String. + * There is a trailing newline! * * @return the newline-separated String representation of o */ @@ -1208,6 +1290,7 @@ public void readDis(DataInputStream dis, int n) throws Exception { * See also the XDR standard (http://tools.ietf.org/html/rfc4506#section-4.11). * Just 8 bits are stored: there is no utf or other unicode support. * See DAP 2.0 section 3.2.3 says US-ASCII (7bit), so might as well go for compatible common 8bit. + * Ah: dods.dap.DString reader assumes ISO-8859-1, which is first page of unicode. * * @param dos * @param s @@ -1216,8 +1299,10 @@ public void readDis(DataInputStream dis, int n) throws Exception { public static void externalizeForDODS(DataOutputStream dos, String s) throws Exception { int n = s.length(); dos.writeInt(n); //for Strings, just write size once - for (int i = 0; i < n; i++) - dos.writeByte(s.charAt(i)); //eek; just 8 bits stored; no utf or other unicode support + for (int i = 0; i < n; i++) { //just low 8 bits written; no utf or other unicode support, + char c = s.charAt(i); //2016-11-29 I added: char>255 -> '?', it's better than low 8 bits + dos.writeByte(c < 256? c : '?'); //dods.dap.DString reader assumes ISO-8859-1, which is first page of unicode + } //pad to 4 bytes boundary at end while (n++ % 4 != 0) @@ -1309,6 +1394,14 @@ public void append(PrimitiveArray pa) { ensureCapacity(size + (long)otherSize); if (pa instanceof StringArray) { System.arraycopy(((StringArray)pa).array, 0, array, size, otherSize); + //2017-04-06 this was contemplated, but better to handle this some other way, + // e.g., CharArray.getString() + //} else if (pa instanceof CharArray) { //for Argo + // CharArray ca = (CharArray)pa; + // for (int i = 0; i < otherSize; i++) { + // char ch = ca.get(i); + // array[size + i] = String2.canonical(ch == Character.MAX_VALUE? "" : ch + ""); + // } } else { for (int i = 0; i < otherSize; i++) array[size + i] = String2.canonical(pa.getString(i)); //this converts mv's @@ -1704,6 +1797,65 @@ public static StringArray fromCSV(String csv) { return sa; } */ + /** + * This is a purposely simple, 2-double-quotes-aware, backslash-aware, splitter + * that makes an ArrayString from the items in an NCCSV-style string. + * + *
This avoids String2.canonical(to), so will be faster if just parsing + * then discarding or storing in some other data structure. + * + *

Strings should be JSON-like, but \char are left as-is (not converted + * to special char) and 2 double quotes are still 2 double quotes. + *
null becomes sa.length() == 0. + *
"" becomes sa.length() == 1. + * + * @param csv + * @return a StringArray with the items. + *
Quoted strings are still in quoted strings. + *
Backslashed characters are not converted to the special character + * (e.g., double quotes or newline). + *
Items are trimmed. + */ + public static StringArray simpleFromNccsv(String csv) { + StringArray sa = new StringArray(); + if (csv == null) + return sa; + int start = 0; //start of this item + int po = 0; //next char to be looked at + int n = csv.length(); + while (po < n) { + char ch = csv.charAt(po++); + + if (ch == '"') { + while (po < n) { + ch = csv.charAt(po++); + if (ch == '\\' && po < n) { + po++; + continue; + } else if (ch == '"') { + //matching close quote + break; + } + } + + } else if (ch == '\\' && po < n) { + po++; + continue; + + } else if (ch == ',') { + //end of item + if (sa.size == sa.array.length) //if we're at capacity + sa.ensureCapacity(sa.size + 1L); + sa.array[sa.size++] = csv.substring(start, po - 1).trim(); //avoid canonical + start = po; + } + } + if (sa.size == sa.array.length) //if we're at capacity + sa.ensureCapacity(sa.size + 1L); + sa.array[sa.size++] = csv.substring(start, po).trim(); //avoid canonical + return sa; + } + /** * This tests if the values in the array are sorted in ascending order (tied is ok). * The details of this test are geared toward determining if the @@ -1822,7 +1974,8 @@ public void intraReplaceAllIgnoreCase(String from, String to) { /** This returns the minimum value that can be held by this class. */ public String minValue() {return "\u0000";} - /** This returns the maximum value that can be held by this class. */ + /** This returns the maximum value that can be held by this class + (not including the cohort missing value). */ public String maxValue() {return "\uFFFF";} /** @@ -1848,19 +2001,6 @@ public int[] getNMinMaxIndex() { return new int[]{n, tmini, tmaxi}; } - /** - * This returns the min and max of the non-null or "" strings (by simple comparison). - * - * @return String[3], 0=""+n (the number of non-null or "" strings), - * 1=min, 2=max. min and max are "" if n=0. - */ - public String[] getNMinMax() { - int nmm[] = getNMinMaxIndex(); - if (nmm[0] == 0) - return new String[]{"0", "", ""}; - return new String[]{"" + nmm[0], array[nmm[1]], array[nmm[2]]}; - } - /** * This compares two text files, line by line, and throws Exception indicating * line where different. @@ -1923,6 +2063,32 @@ public StringArray addHashSet(HashSet hs) { } + /** + * This returns the index of the first value that doesn't match the regex. + * + * @param regex + * @return the index of the first value that doesn't match the regex, or -1 if they all match. + * @throws RuntimeException if regex won't compile. + */ + public int firstNonMatch(String regex) { + return firstNonMatch(Pattern.compile(regex)); + } + + /** + * This returns the index of the first value that doesn't match the regex pattern p. + * + * @param p + * @return the index of the first value that doesn't match the regex pattern p, or -1 if they all match. + */ + public int firstNonMatch(Pattern p) { + for (int i = 0; i < size; i++) { + String s = get(i); + if (s == null || !p.matcher(s).matches()) + return i; + } + return -1; + } + /** * This tests the methods of this class. * @@ -2469,6 +2635,51 @@ public static void test() throws Throwable{ anArray2.sort(); Test.ensureEqual(anArray.toArray(), anArray2.toArray(), ""); + //fromNccsv + anArray = simpleFromNccsv(""); + Test.ensureEqual(anArray.toJsonCsvString(), + "\"\"", ""); + Test.ensureEqual(anArray.size(), 1, ""); + + anArray = simpleFromNccsv("a"); + Test.ensureEqual(anArray.toJsonCsvString(), + "\"a\"", ""); + Test.ensureEqual(anArray.size(), 1, ""); + + anArray = simpleFromNccsv(" a , b ,"); + Test.ensureEqual(anArray.toJsonCsvString(), + "\"a\", \"b\", \"\"", ""); + Test.ensureEqual(anArray.size(), 3, ""); + + anArray = simpleFromNccsv(" \" a\t\n\b\'z\"\" \" , 1.23f, a\""); + // \\b is removed + Test.ensureEqual(String2.annotatedString(String2.replaceAll(anArray.get(0), "\n", "")), + "\" a[9][8]'z\"\" \"[end]", ""); + Test.ensureEqual(anArray.toJsonCsvString(), + "\"\\\" a\\t\\n'z\\\"\\\" \\\"\", \"1.23f\", \"a\\\"\"", ""); + Test.ensureEqual(anArray.size(), 3, ""); + + anArray = simpleFromNccsv( + // \\b is not allowed + "'\\f', '\\n', '\\r', '\\t', '\\\\', '\\/', '\\\"', 'a', '~', '\\u00C0', '\\u0000', '\\uffFf'"); + Test.ensureEqual(anArray.toJsonCsvString(), + "\"'\\\\f'\", \"'\\\\n'\", \"'\\\\r'\", \"'\\\\t'\", \"'\\\\\\\\'\", \"'\\\\/'\", \"'\\\\\\\"'\", \"'a'\", \"'~'\", \"'\\\\u00C0'\", \"'\\\\u0000'\", \"'\\\\uffFf'\"", + anArray.toJsonCsvString()); + Test.ensureEqual(anArray.size(), 12, ""); + + //removeEmptyAtEnd(); + anArray = new StringArray(new String[] {"hi", "go", "to"}); + Test.ensureEqual(anArray.size(), 3, ""); + anArray.removeEmptyAtEnd(); + Test.ensureEqual(anArray.size(), 3, ""); + anArray.set(0, ""); + anArray.set(2, ""); + anArray.removeEmptyAtEnd(); + Test.ensureEqual(anArray.size(), 2, ""); + anArray.set(1, ""); + anArray.removeEmptyAtEnd(); + Test.ensureEqual(anArray.size(), 0, ""); + } } diff --git a/WEB-INF/classes/com/cohort/ema/EmaClass.java b/WEB-INF/classes/com/cohort/ema/EmaClass.java index 61f786c2c..3701a220b 100644 --- a/WEB-INF/classes/com/cohort/ema/EmaClass.java +++ b/WEB-INF/classes/com/cohort/ema/EmaClass.java @@ -1049,8 +1049,8 @@ public void doGet(HttpServletRequest request, //output the response html page response.setContentType("text/html"); - response.setCharacterEncoding("UTF-8"); - OutputStreamWriter out = new OutputStreamWriter(response.getOutputStream(), "UTF-8"); + response.setCharacterEncoding(String2.UTF_8); + OutputStreamWriter out = new OutputStreamWriter(response.getOutputStream(), String2.UTF_8); out.write(getHTMLPage(request, request.getContentLength() > 0)); //displayErrorMessages } diff --git a/WEB-INF/classes/com/cohort/util/Calendar2.java b/WEB-INF/classes/com/cohort/util/Calendar2.java index b55e92c57..3986f4be6 100644 --- a/WEB-INF/classes/com/cohort/util/Calendar2.java +++ b/WEB-INF/classes/com/cohort/util/Calendar2.java @@ -5,6 +5,7 @@ package com.cohort.util; import com.cohort.array.DoubleArray; +import com.cohort.array.PrimitiveArray; import com.cohort.array.StringArray; import java.text.SimpleDateFormat; @@ -18,9 +19,14 @@ import java.util.regex.Pattern; import java.util.TimeZone; -import org.joda.time.DateTimeZone; -import org.joda.time.format.DateTimeFormat; -import org.joda.time.format.DateTimeFormatter; +import java.time.format.DateTimeFormatter; +import java.time.format.DateTimeFormatterBuilder; +import java.time.temporal.ChronoField; +import java.time.temporal.TemporalAccessor; +import java.time.OffsetDateTime; +import java.time.ZonedDateTime; +import java.time.ZoneId; +import java.time.ZoneOffset; /** * This class has static methods for dealing with dates and times. @@ -63,7 +69,7 @@ * *

Calendar2 does not use ERA designations. It uses negative year values for B.C years * (calendar2Year = 1 - BCYear). Note that BCYears are 1..., so 1 BC is calendar2Year 0 (or 0000), - * and 2 BC is calendar2Year -1 (or -0001). + * and 2 BC is calendar2Year -1 (or -0001). Thus, use getYear(gc) instead gc.get(YEAR). * */ public class Calendar2 { @@ -71,7 +77,7 @@ public class Calendar2 { //useful static variables public final static int ERA = Calendar.ERA; public final static int BC = GregorianCalendar.BC; - public final static int YEAR = Calendar.YEAR; + public final static int YEAR = Calendar.YEAR; //BEWARE: use getYear() not gc.get(YEAR) public final static int MONTH = Calendar.MONTH; //java counts 0.. public final static int DATE = Calendar.DATE; //1.. of month public final static int DAY_OF_YEAR = Calendar.DAY_OF_YEAR; //1.. @@ -96,7 +102,10 @@ public class Calendar2 { public final static String SECONDS_SINCE_1970 = "seconds since 1970-01-01T00:00:00Z"; - public final static TimeZone zuluTimeZone = TimeZone.getTimeZone("Zulu"); + public final static String zulu = "Zulu"; + public final static TimeZone zuluTimeZone = TimeZone.getTimeZone(zulu); + public final static ZoneId zuluZoneId = ZoneId.of(zulu); + private final static String[] MONTH_3 = { "Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec"}; private final static String[] MONTH_FULL = { @@ -109,16 +118,16 @@ public class Calendar2 { /** special Formats for ISO date time without a suffix (assumed to be UTC) */ public final static String ISO8601T_FORMAT = "yyyy-MM-dd'T'HH:mm:ss"; public final static String ISO8601T3_FORMAT = "yyyy-MM-dd'T'HH:mm:ss.SSS"; - public final static String ISO8601TTZZ_FORMAT = "yyyy-MM-dd'T'HH:mm:ssZZ"; //ZZ shows offset with a colon - public final static DateTimeFormatter ISO8601TTZZ_FORMATTER = - DateTimeFormat.forPattern(ISO8601TTZZ_FORMAT); //.withZone(DateTimeZone.UTC); /** special case format supports suffix 'Z' or +/-HH:MM */ public final static String ISO8601TZ_FORMAT = "yyyy-MM-dd'T'HH:mm:ssZ"; public final static String ISO8601T3Z_FORMAT = "yyyy-MM-dd'T'HH:mm:ss.SSSZ"; + public final static DateTimeFormatter ISO_OFFSET_LOCAL_FORMATTER = + //since this is formatter (not parser), bypass stuff in makeDateTimeFormatter + DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ssxxxxx"); //offset always ISO formatted e.g., -07:00 public final static Pattern ISO_DATE_PATTERN = Pattern.compile("-?\\d{4}-\\d{2}.*"); - public final static Pattern NUMERIC_TIME_PATTERN = Pattern.compile(" *[a-z]+ +since +[0-9].+"); + public final static Pattern NUMERIC_TIME_PATTERN = Pattern.compile(" *[a-z]+ +since +-?[0-9].+"); /** * This has alternating regex/timeFormat for formats where the first char is a digit. @@ -128,37 +137,37 @@ public class Calendar2 { //* Compact (number-only) formats only support years 0000 - 4999. // That makes it likely that numbers won't be interpreted as compact date times. //check for julian date before ISO 8601 format - "[0-9]{4}-[0-3][0-9]{2}", "yyyy-DDD", + "-?[0-9]{4}-[0-3][0-9]{2}", "yyyy-DDD", "[0-4][0-9]{3}[0-3][0-9]{2}", "yyyyDDD", //compact //variants of space-separated 1970-01-01 00:00:00.000 - "[0-9]{4}-[0-1][0-9]-[0-3][0-9] [0-2][0-9]:[0-5][0-9]:[0-5][0-9].[0-9]{1,3}[+-][0-9].*", - "yyyy-MM-dd HH:mm:ss.sssZ", - "[0-9]{4}-[0-1][0-9]-[0-3][0-9] [0-2][0-9]:[0-5][0-9]:[0-5][0-9].[0-9]{1,3}", - "yyyy-MM-dd HH:mm:ss.sss", - "[0-9]{4}-[0-1][0-9]-[0-3][0-9] [0-2][0-9]:[0-5][0-9]:[0-5][0-9][+-][0-9].*", + "-?[0-9]{4}-[0-1][0-9]-[0-3][0-9] [0-2][0-9]:[0-5][0-9]:[0-5][0-9].[0-9]{1,3}[+-][0-9].*", + "yyyy-MM-dd HH:mm:ss.SSSZ", + "-?[0-9]{4}-[0-1][0-9]-[0-3][0-9] [0-2][0-9]:[0-5][0-9]:[0-5][0-9].[0-9]{1,3}", + "yyyy-MM-dd HH:mm:ss.SSS", + "-?[0-9]{4}-[0-1][0-9]-[0-3][0-9] [0-2][0-9]:[0-5][0-9]:[0-5][0-9][+-][0-9].*", "yyyy-MM-dd HH:mm:ssZ", - "[0-9]{4}-[0-1][0-9]-[0-3][0-9] [0-2][0-9]:[0-5][0-9]:[0-5][0-9]", + "-?[0-9]{4}-[0-1][0-9]-[0-3][0-9] [0-2][0-9]:[0-5][0-9]:[0-5][0-9]", "yyyy-MM-dd HH:mm:ss", - "[0-9]{4}-[0-1][0-9]-[0-3][0-9] [0-2][0-9]:[0-5][0-9]", + "-?[0-9]{4}-[0-1][0-9]-[0-3][0-9] [0-2][0-9]:[0-5][0-9]", "yyyy-MM-dd HH:mm", - "[0-9]{4}-[0-1][0-9]-[0-3][0-9] [0-2][0-9]", + "-?[0-9]{4}-[0-1][0-9]-[0-3][0-9] [0-2][0-9]", "yyyy-MM-dd HH", //all other variants go to T-separated 1970-01-01T00:00:00.000 (for formatting date times) - "[0-9]{4}-[0-1][0-9]-[0-3][0-9].[0-2][0-9]:[0-5][0-9]:[0-5][0-9].[0-9]{1,3}[+-][0-9].*", - "yyyy-MM-dd'T'HH:mm:ss.sssZ", - "[0-9]{4}-[0-1][0-9]-[0-3][0-9].[0-2][0-9]:[0-5][0-9]:[0-5][0-9].[0-9]{1,3}", - "yyyy-MM-dd'T'HH:mm:ss.sss", - "[0-9]{4}-[0-1][0-9]-[0-3][0-9].[0-2][0-9]:[0-5][0-9]:[0-5][0-9][+-][0-9].*", + "-?[0-9]{4}-[0-1][0-9]-[0-3][0-9].[0-2][0-9]:[0-5][0-9]:[0-5][0-9].[0-9]{1,3}[+-][0-9].*", + "yyyy-MM-dd'T'HH:mm:ss.SSSZ", + "-?[0-9]{4}-[0-1][0-9]-[0-3][0-9].[0-2][0-9]:[0-5][0-9]:[0-5][0-9].[0-9]{1,3}", + "yyyy-MM-dd'T'HH:mm:ss.SSS", + "-?[0-9]{4}-[0-1][0-9]-[0-3][0-9].[0-2][0-9]:[0-5][0-9]:[0-5][0-9][+-][0-9].*", "yyyy-MM-dd'T'HH:mm:ssZ", - "[0-9]{4}-[0-1][0-9]-[0-3][0-9].[0-2][0-9]:[0-5][0-9]:[0-5][0-9]", + "-?[0-9]{4}-[0-1][0-9]-[0-3][0-9].[0-2][0-9]:[0-5][0-9]:[0-5][0-9]", "yyyy-MM-dd'T'HH:mm:ss", - "[0-9]{4}-[0-1][0-9]-[0-3][0-9].[0-2][0-9]:[0-5][0-9]", + "-?[0-9]{4}-[0-1][0-9]-[0-3][0-9].[0-2][0-9]:[0-5][0-9]", "yyyy-MM-dd'T'HH:mm", - "[0-9]{4}-[0-1][0-9]-[0-3][0-9].[0-2][0-9]", + "-?[0-9]{4}-[0-1][0-9]-[0-3][0-9].[0-2][0-9]", "yyyy-MM-dd'T'HH", //remaining ISO dates - "[0-9]{4}-[0-1][0-9]-[0-3][0-9]", "yyyy-MM-dd", - "[0-9]{4}-[0-1][0-9].*", "yyyy-MM", + "-?[0-9]{4}-[0-1][0-9]-[0-3][0-9]", "yyyy-MM-dd", + "-?[0-9]{4}-[0-1][0-9].*", "yyyy-MM", //compact ISO "[0-4][0-9]{3}[0-1][0-9][0-3][0-9][0-2][0-9][0-5][0-9][0-5][0-9]", "yyyyMMddHHmmss", @@ -168,21 +177,29 @@ public class Calendar2 { "yyyyMMddHH", "[0-4][0-9]{3}[0-1][0-9][0-3][0-9]", "yyyyMMdd", "[0-4][0-9]{3}[0-1][0-9]", "yyyyMM", - //note that yy handles conversion of 2 digit year to 4 digits (e.g., 85 -> 1985) - "[0-9]{1,2}/[0-9]{1,2}/[0-9]{2,4}", "M/d/yy", //assume US ordering - "[0-9]{1,2} [a-zA-Z]{3} [0-9]{2,4}", "d MMM yy", //2 Jan 85 - "[0-9]{1,2}-[a-zA-Z]{3}-[0-9]{2,4}", "d-MMM-yy" //02-JAN-1985 + //2017-03-23 2 digit year (yy) is no longer supported + "[0-9]{1,2}/[0-9]{1,2}/[0-9]{4} [0-9]{1,2}:[0-9]{2}:[0-9]{2}", "M/d/yyyy H:mm:ss", //assume US ordering + "[0-9]{1,2} [a-zA-Z]{3} [0-9]{4} [0-9]{1,2}:[0-9]{2}:[0-9]{2}", "d MMM yyyy H:mm:ss", //2 Jan 85 + "[0-9]{1,2}-[a-zA-Z]{3}-[0-9]{4} [0-9]{1,2}:[0-9]{2}:[0-9]{2}", "d-MMM-yyyy H:mm:ss", //02-JAN-1985 + + "[0-9]{1,2}/[0-9]{1,2}/[0-9]{4} [0-9]{1,2}:[0-9]{2}", "M/d/yyyy H:m", //assume US ordering + "[0-9]{1,2} [a-zA-Z]{3} [0-9]{4} [0-9]{1,2}:[0-9]{2}", "d MMM yyyy H:mm", //2 Jan 85 + "[0-9]{1,2}-[a-zA-Z]{3}-[0-9]{4} [0-9]{1,2}:[0-9]{2}", "d-MMM-yyyy H:mm", //02-JAN-1985 + + "[0-9]{1,2}/[0-9]{1,2}/[0-9]{4}", "M/d/yyyy", //assume US ordering + "[0-9]{1,2} [a-zA-Z]{3} [0-9]{4}", "d MMM yyyy", //2 Jan 85 + "[0-9]{1,2}-[a-zA-Z]{3}-[0-9]{4}", "d-MMM-yyyy" //02-JAN-1985 }; /** - * This has alternating regex/timeFormat for formats where the first char is a digit. + * This has alternating regex/timeFormat for formats where the first char is a letter. * This is used by suggestDateTimeFormat. */ public final static String letterRegexTimeFormat[] = { //test formats that start with a letter - "[a-zA-Z]{3} [0-9]{1,2}, [0-9]{2,4}", - "MMM d, yy", //Jan 2, 1985 - // "Sun, 06 Nov 1994 08:49:37 GMT" //GMT is literal. Joda doesn't parse z + "[a-zA-Z]{3} [0-9]{1,2}, [0-9]{4}", //2017-03-23 was {2,4} but 2 digit year (yy) no longer supported + "MMM d, yyyy", //Jan 2, 1985 + // "Sun, 06 Nov 1994 08:49:37 GMT" //GMT is literal. java.time (was Joda) doesn't parse z "[a-zA-Z]{3}, [0-9]{2} [a-zA-Z]{3} [0-9]{4} [0-9]{2}:[0-9]{2}:[0-9]{2} GMT", "EEE, dd MMM yyyy HH:mm:ss 'GMT'", //RFC 822 format date time // "Sun, 06 Nov 1994 08:49:37 -0800" or -08:00 @@ -277,8 +294,7 @@ public static boolean isTimeUnits(String tUnits) { if (tUnits == null) return false; tUnits = tUnits.toLowerCase(); - return tUnits.indexOf("yy") >= 0 || - NUMERIC_TIME_PATTERN.matcher(tUnits).matches(); + return tUnits.indexOf("yy") >= 0 || isNumericTimeUnits(tUnits); } /** This variant assumes Zulu time zone. */ @@ -320,6 +336,29 @@ public static double[] getTimeBaseAndFactor(String tsUnits, TimeZone timeZone) return new double[]{baseSeconds, factorToGetSeconds}; } + /** + * This converts a unitsSince value into epochSeconds. + * This properly handles 'special' factorToGetSeconds values (for month and year). + * + * @param baseSeconds from getTimeBaseAndFactor[0] + * @param factorToGetSeconds from getTimeBaseAndFactor[1] + * @param sourceUnitsSince a numeric PrimitiveArray with time values in the source units + * @return a DoubleArray with seconds since 1970-01-01 (or NaN if unitsSince is NaN). + * If sourceUnitsSince was a DoubleArray, it will be the same DoubleArray. + */ + public static DoubleArray unitsSinceToEpochSeconds(double baseSeconds, + double factorToGetSeconds, PrimitiveArray sourceUnitsSince) { + int n = sourceUnitsSince.size(); + DoubleArray epSec = sourceUnitsSince instanceof DoubleArray? + (DoubleArray)sourceUnitsSince : + new DoubleArray(n, true); //active + for (int i = 0; i < n; i++) + epSec.set(i, + unitsSinceToEpochSeconds(baseSeconds, factorToGetSeconds, sourceUnitsSince.getDouble(i))); + return epSec; + } + + /** * This converts a unitsSince value into epochSeconds. * This properly handles 'special' factorToGetSeconds values (for month and year). @@ -364,7 +403,7 @@ else throw new RuntimeException( return epSec; } - + /** * This converts an epochSeconds value into a unitsSince value. * This properly handles 'special' factorToGetSeconds values (for month and year). @@ -476,6 +515,49 @@ public static int unitsToConstant(String units) throws Exception { return -1; //won't happen, but method needs return statement } + /** + * This converts a string with number[timeUnits] into the number + * (with timeUnits applied), e.g., 10.4 or 10 minutes (becomes 600). + * If timeUnits are specified, this returns the number of seconds. + * + * @param ntu optional number + optional timeUnits. + * But one of them must be specified. + * @return [0]=the number (1 if not specified), + * [1]=factorToGetSeconds (1 if not specified) + * @throws RuntimeException if trouble, e.g, ntu is null or "", or + * number is not a number, or optional timeUnits not valid. + */ + public static double[] parseNumberTimeUnits(String ntu) { + String errIn = "ERROR in parseNumberTimeUnits: "; + if (ntu == null) + throw new SimpleException(errIn + "nothing specified."); + ntu = ntu.trim(); + if (ntu.length() == 0) + throw new SimpleException(errIn + "nothing specified."); + + //find last non-letter by walking backward, e.g., '9' in 1.4e9minutes + int po = ntu.length() - 1; + while (po >= 0) { + if (!Character.isLetter(ntu.charAt(po))) + break; + po--; + } + + //extract the number + double results[] = new double[2]; + String num = ntu.substring(0, po + 1); + results[0] = po == -1? 1 : //1 if not specified + String2.parseDouble(num); + if (!Math2.isFinite(results[0])) + throw new SimpleException(errIn + "invalid number=" + ntu.substring(0, po + 1)); + + //extract the timeUnits + String units = ntu.substring(po+1).trim(); + results[1] = units.length() == 0? 1 : + Calendar2.factorToGetSeconds(units); //throws exception + + return results; + } /** @@ -1075,7 +1157,7 @@ public static GregorianCalendar newGCalendarZulu(int year, int month, int dayOfM gc.clear(); gc.set(year, month - 1, dayOfMonth, hour, minute, second); gc.set(MILLISECOND, millis); - gc.get(YEAR); //force recalculations + gc.get(MONTH); //force recalculations return gc; } @@ -1095,7 +1177,7 @@ public static GregorianCalendar newGCalendarLocal(int year, int dayOfYear) { Test.error(String2.ERROR + " in newGCalendarLocal: year value is Integer.MAX_VALUE!"); GregorianCalendar gc = new GregorianCalendar(year, 0, 1); gc.set(Calendar.DAY_OF_YEAR, dayOfYear); - gc.get(YEAR); //force recalculations + gc.get(MONTH); //force recalculations return gc; } @@ -1115,7 +1197,7 @@ public static GregorianCalendar newGCalendarZulu(int year, int dayOfYear) { Test.error(String2.ERROR + " in newGCalendarLocal: year value is Integer.MAX_VALUE!"); GregorianCalendar gc = newGCalendarZulu(year, 1, 1); gc.set(Calendar.DAY_OF_YEAR, dayOfYear); - gc.get(YEAR); //force recalculations + gc.get(MONTH); //force recalculations return gc; } @@ -1195,7 +1277,7 @@ public static String formatAsISODateTimeT(GregorianCalendar gc) { * @throws RuntimeException if trouble (e.g., gc is null) */ public static String formatAsISODateTimeTTZ(GregorianCalendar gc) { - return ISO8601TTZZ_FORMATTER.print(gc.getTimeInMillis()); + return ISO_OFFSET_LOCAL_FORMATTER.format(gc.toZonedDateTime()); } /** @@ -1213,6 +1295,25 @@ public static String formatAsISODateTimeT3(GregorianCalendar gc) { String2.zeroPad("" + gc.get(MILLISECOND), 3); } + /** + * This converts a time_precision string into a time format string. + * + * @param pre a time_precision string, e.g., "1970-01-01T00Z" + * @return the corresponding time format string, e.g., "yyyy-MM-dd'T'HHZ", + * or ISO8601T_FORMAT if trouble + */ + public static String timePrecisionToTimeFormat(String pre) { + if (!String2.isSomething(pre) || + !pre.startsWith("1970-01")) + return ISO8601TZ_FORMAT; + if (pre.endsWith("Z")) + pre = pre.substring(0, pre.length() - 1); + //ISO8601T3_FORMAT = "yyyy-MM-dd'T'HH:mm:ss.SSS"; + String iso = String2.replaceAll(ISO8601T3_FORMAT, "'T'", "T"); + iso = iso.substring(0, Math.min(pre.length(), iso.length())); + iso = String2.replaceAll(iso, "T", "'T'"); + return iso + (iso.length() >= 15? "Z" : ""); + } /** * This is like formatAsISODateTime, but returns a @@ -1695,7 +1796,7 @@ public static GregorianCalendar parseISODateTime(GregorianCalendar gc, gc.set((negative? -1 : 1) * ymdhmsmom[0], ymdhmsmom[1] - 1, ymdhmsmom[2], ymdhmsmom[3], ymdhmsmom[4], ymdhmsmom[5]); gc.set(MILLISECOND, ymdhmsmom[6]); - gc.get(YEAR); //force recalculations + gc.get(MONTH); //force recalculations //synchronized (isoDateTimeFormat) { // gc.setTime(isoDateTimeFormat.parse(isoDateTimeString)); @@ -1758,7 +1859,7 @@ public static GregorianCalendar parseUSSlash24(GregorianCalendar gc, //set as ymdhms month -1 since gc month is 0.. gc.set(mdyhms[2], mdyhms[0] - 1, mdyhms[1], mdyhms[3], mdyhms[4], mdyhms[5]); gc.set(MILLISECOND, 0); - gc.get(YEAR); //force recalculations + gc.get(MONTH); //force recalculations //synchronized (isoDateTimeFormat) { // gc.setTime(isoDateTimeFormat.parse(isoDateTimeString)); @@ -1815,7 +1916,7 @@ public static GregorianCalendar parseCompactDateTime(GregorianCalendar gc, String2.parseInt(s.substring(10, 12)), String2.parseInt(s.substring(12, 14))); gc.set(MILLISECOND, 0); - gc.get(YEAR); //force recalculations + gc.get(MONTH); //force recalculations //synchronized (CompactDateTimeFormat) { // gc.setTime(CompactDateTimeFormat.parse(s)); @@ -1906,7 +2007,7 @@ public static GregorianCalendar parseDDMonYYYY(GregorianCalendar gc, String s) { String2.parseInt(s.substring(0, 2)), hour, min, sec); - gc.get(YEAR); //force recalculations + gc.get(MONTH); //force recalculations return gc; } @@ -1957,7 +2058,7 @@ public static GregorianCalendar parseYYYYDDD(GregorianCalendar gc, 1, 0, 0, 0); gc.set(Calendar.DAY_OF_YEAR, String2.parseInt(s.substring(4, 7))); gc.set(MILLISECOND, 0); - gc.get(YEAR); //force recalculations + gc.get(MONTH); //force recalculations //synchronized (YYYYDDDFormat) { // gc.setTime(YYYYDDDFormat.parse(YYYYDDDString)); @@ -2330,13 +2431,14 @@ public static String elapsedTimeString(double millis) { return "infinity"; long time = Math2.roundToLong(millis); + if (time < Long.MIN_VALUE + 10000 || + time > Long.MAX_VALUE - 10000) + return "infinity"; String negative = ""; if (time < 0) { negative = "-"; time = Math.abs(time); } - if (time == Long.MAX_VALUE) - return "infinity"; long ms = time % 1000; long sec = time / 1000; long min = sec / 60; sec = sec % 60; @@ -2580,11 +2682,11 @@ public static GregorianCalendar roundToIdealGC(double epochSeconds, /** - * Given a date time string, this suggests a Java/Joda date/time format suitable + * Given a date time string, this suggests a Java/java.time (was Joda) date/time format suitable * for parsing and output formatting. * * @param sample - * @return an appropriate Java/Joda date/time format + * @return an appropriate Java/java.time (was Joda) date/time format * or "" if not matched. * If the response starts with "yyyy-MM", parse with Calendar2.parseISODateTimeZulu(); * else parse with Joda. @@ -2594,6 +2696,8 @@ public static String suggestDateTimeFormat(String sample) { return ""; char ch = Character.toLowerCase(sample.charAt(0)); + if (ch == '-' && sample.length() > 1) + ch = Character.toLowerCase(sample.charAt(1)); if (ch >= '0' && ch <= '9') { for (int i = 0; i < digitRegexTimeFormat.length; i += 2) { if (sample.matches(digitRegexTimeFormat[i])) @@ -2620,7 +2724,7 @@ public static String suggestDateTimeFormat(String sample) { * (other than nulls and ""'s), or "" if no suggestion. * The format is suitable for parsing and output formatting * If the response starts with "yyyy-MM", parse with Calendar2.parseISODateTimeZulu(); - * else parse with Joda. + * else parse with java.time (was Joda). */ public static String suggestDateTimeFormat(StringArray sa) { boolean debugMode = false; @@ -2658,7 +2762,7 @@ public static String suggestDateTimeFormat(StringArray sa) { } /** - * Given one of the known dateTimeFormats, this returns a Joda Pattern for it. + * Given one of the known dateTimeFormats, this returns a java.time (was Joda) Pattern for it. * Patterns are thread safe. * * @return the relevant pattern, or null if not matched. @@ -2667,13 +2771,102 @@ public static Pattern dateTimeFormatToPattern(String dateTimeFormat) { return dateTimeFormatPatternHM.get(dateTimeFormat); } + /** + * + */ + public static String format(double epochSeconds, DateTimeFormatter dtf) { + String s = dtf.format(epochSecondsToGc(epochSeconds).toZonedDateTime()); + s = String2.replaceAll(s, "[XXX][X]", "Z"); + return s; + } + + /** + * This makes a case insensitive DateTimeFormatter + * with -01-01T00:00:00.000 defaults. + * + * @param pattern see + * https://docs.oracle.com/javase/8/docs/api/java/time/format/DateTimeFormatter.html + */ + public static DateTimeFormatter makeDateTimeFormatter(String pattern, String zone) { + //always deal with proleptic YEAR (-1=2 BCE, 0=1 BCE, 1=1 CE), not YEAR_OF_ERA + //https://docs.oracle.com/javase/8/docs/api/java/time/temporal/ChronoField.html#YEAR_OF_ERA + //??? Are there cases where y is used not as year? eg as literal? + String yy = "yy"; + int po = pattern.indexOf(yy); + if (po < 0) { + yy = "YY"; + po = pattern.indexOf(yy); + } + if (po >= 0 && pattern.indexOf(yy + yy) < 0) + throw new SimpleException("DateTime formats with " + yy + " are not allowed. " + + "Change the source values to use 4-digit years, and use " + yy + yy + " in the dateTime format."); + + pattern = String2.replaceAll(pattern, yy.charAt(0), 'u'); + + //http://stackoverflow.com/questions/34637626/java-datetimeformatter-for-time-zone-with-an-optional-colon-separator + pattern = String2.replaceAll(pattern, "Z", "[XXX][X]"); //most flexible time offset support + + //http://stackoverflow.com/questions/38307816/datetimeformatterbuilder-with-specified-parsedefaulting-conflicts-for-year-field + DateTimeFormatter dtf = new DateTimeFormatterBuilder() + .parseCaseInsensitive() //needed for supporting e.g., WED in addition to official Wed + //.parseLenient() //My test pass without this. It's effect is unclear. + .appendPattern(pattern) + //.parseDefaulting(ChronoField.MONTH_OF_YEAR, 1) //this approach didn't work + .toFormatter(); + //so dtf has either an offset (via X) or a timezone + if (pattern.indexOf('X') < 0 && pattern.indexOf('x') < 0) + dtf = dtf.withZone(ZoneId.of(zone)); + return dtf; + } + + /** + * + * @throws RuntimeException + */ + public static double toEpochSeconds(String s, DateTimeFormatter dtf) { + TemporalAccessor ta = dtf.parse(s); + //Who designed this?! It's brutally complex. + //If it's a date, it doesn't have a time zone or a way to get time at start of day. + //I miss Joda. + + //convert year month into dateTime + if (dtf.getZone() == null) { + //OffsetDateTime + if (!ta.isSupported(ChronoField.DAY_OF_MONTH)) + ta = OffsetDateTime.of( + ta.get(ChronoField.YEAR), ta.get(ChronoField.MONTH_OF_YEAR), 1, + 0, 0, 0, 0, ZoneOffset.ofTotalSeconds(ta.get(ChronoField.OFFSET_SECONDS))); + //convert year month date into dateTime + else if (!ta.isSupported(ChronoField.INSTANT_SECONDS)) + ta = OffsetDateTime.of( + ta.get(ChronoField.YEAR), ta.get(ChronoField.MONTH_OF_YEAR), + ta.get(ChronoField.DAY_OF_MONTH), + 0, 0, 0, 0, ZoneOffset.ofTotalSeconds(ta.get(ChronoField.OFFSET_SECONDS))); + } else { + //ZonedDateTime + if (!ta.isSupported(ChronoField.DAY_OF_MONTH)) + ta = ZonedDateTime.of( + ta.get(ChronoField.YEAR), ta.get(ChronoField.MONTH_OF_YEAR), 1, + 0, 0, 0, 0, dtf.getZone()); + //convert year month date into dateTime + else if (!ta.isSupported(ChronoField.INSTANT_SECONDS)) + ta = ZonedDateTime.of( + ta.get(ChronoField.YEAR), ta.get(ChronoField.MONTH_OF_YEAR), + ta.get(ChronoField.DAY_OF_MONTH), + 0, 0, 0, 0, dtf.getZone()); + } + + return ta.getLong(ChronoField.INSTANT_SECONDS) + + ta.get( ChronoField.MILLI_OF_SECOND) / 1000.0; + } + /** * This converts s into a double with epochSeconds. * - * @param dateTimeFormat one of the ISO8601 formats above, or a Joda format. + * @param dateTimeFormat one of the ISO8601 formats above, or a java.time (was Joda) format. * If it starts with "yyyy-MM", sourceTime will be parsed with Calendar2.parseISODateTimeZulu(); - * else parse with Joda. + * else parse with java.time (was Joda). * @return the epochSeconds value or NaN if trouble */ public static double toEpochSeconds(String sourceTime, String dateTimeFormat) { @@ -2682,10 +2875,9 @@ public static double toEpochSeconds(String sourceTime, String dateTimeFormat) { //parse with Calendar2.parseISODateTime return safeIsoStringToEpochSeconds(sourceTime); - //parse with Joda - DateTimeFormatter formatter = - DateTimeFormat.forPattern(dateTimeFormat).withZone(DateTimeZone.UTC); - return formatter.parseMillis(sourceTime) / 1000.0; //thread safe + //parse with java.time (was Joda) + DateTimeFormatter formatter = makeDateTimeFormatter(dateTimeFormat, zulu); + return toEpochSeconds(sourceTime, formatter); //thread safe } catch (Throwable t) { if (verbose && sourceTime != null && sourceTime.length() > 0) @@ -2699,9 +2891,9 @@ public static double toEpochSeconds(String sourceTime, String dateTimeFormat) { /** * This converts sa into a DoubleArray with epochSeconds. * - * @param dateTimeFormat one of the ISO8601 formats above, or a Joda format. + * @param dateTimeFormat one of the ISO8601 formats above, or a java.time (was Joda) format. * If it starts with "yyyy-MM", sa strings will be parsed with Calendar2.parseISODateTimeZulu(); - * else parse with Joda. + * else parse with java.time (was Joda). * @return a DoubleArray with the epochSeconds values (any/all will be NaN if touble) */ public static DoubleArray toEpochSeconds(StringArray sa, String dateTimeFormat) { @@ -2719,16 +2911,15 @@ public static DoubleArray toEpochSeconds(StringArray sa, String dateTimeFormat) for (int i = 0; i < n; i++) da.add(safeIsoStringToEpochSeconds(sa.get(i))); } else { - //use Joda + //use java.time (was Joda) boolean printError = verbose; - DateTimeFormatter formatter = - DateTimeFormat.forPattern(dateTimeFormat).withZone(DateTimeZone.UTC); + DateTimeFormatter formatter = makeDateTimeFormatter(dateTimeFormat, zulu); da.addN(n, Double.NaN); for (int i = 0; i < n; i++) { String s = sa.get(i); if (s != null && s.length() > 0) { try { - da.set(i, formatter.parseMillis(s) / 1000.0); //thread safe + da.set(i, toEpochSeconds(s, formatter)); //thread safe } catch (Throwable t2) { if (printError) { String2.log(" EDVTimeStamp.sourceTimeToEpochSeconds: error while parsing sourceTime=" + @@ -2752,7 +2943,8 @@ public static DoubleArray toEpochSeconds(StringArray sa, String dateTimeFormat) /** * If s is a crude String date time format, this converts it to the * proper Java Date Time Format, e.g., yyyy-MM-dd'T'HH:mm:ssZ - * http://www.joda.org/joda-time/apidocs/org/joda/time/format/DateTimeFormat.html + * https://docs.oracle.com/javase/8/docs/api/java/time/format/DateTimeFormatter.html + * was http://www.joda.org/joda-time/apidocs/org/joda/time/format/DateTimeFormat.html * This does the best it can but it is an impossible task without seeing * all the actual String date time data values. * This assumes hour, if present, is 24 hour. This doesn't handle am pm. diff --git a/WEB-INF/classes/com/cohort/util/Math2.java b/WEB-INF/classes/com/cohort/util/Math2.java index 57e00d535..a22e361b4 100644 --- a/WEB-INF/classes/com/cohort/util/Math2.java +++ b/WEB-INF/classes/com/cohort/util/Math2.java @@ -4,6 +4,7 @@ */ package com.cohort.util; +import java.nio.ByteBuffer; import java.text.MessageFormat; import java.util.Arrays; @@ -137,6 +138,9 @@ public class Math2 { 30, 35, 40, 45, 50, 60, 70, 80, 90, 100, 110 }; + public static final double[] COMMON_MV9 = { + -99, -99.9, -99.99, -999, -9999, -99999, -999999, -9999999, + 99, 99.9, 99.99, 999, 9999, 99999, 999999, 9999999}; /** @@ -857,7 +861,7 @@ public static final int roundToInt(double d) { * Undesirable: d.5 rounds up for positive numbers, down for negative. */ public static final long roundToLong(double d) { - return d > Long.MAX_VALUE || d <= Long.MIN_VALUE - 0.5 || !isFinite(d)? + return d > Long.MAX_VALUE || d < -9.223372036854776E18 || !isFinite(d)? Long.MAX_VALUE : Math.round(d); } @@ -1219,6 +1223,23 @@ public static String guessFracString(double d) { } + /** + * This converts a long to a double (Long.MAX_VALUE becomes NaN). + * + * @param tl + * @return a double. + * If f is NaN or +-INFINITY, this returns Double.NaN. + */ + public static final double longToDoubleNaN(long tl) { + if (tl == Long.MAX_VALUE) + return Double.NaN; + //make sure round(d) is legit long. Low numbers are not a problem. + //ideally 9223372036854775806 + if (tl > 9223372036854774784L) //best available + return 9.223372036854774784E18; + return tl; + } + /** * Safely converts a float to a double. * @@ -1902,4 +1923,5 @@ public static double finiteMax(double a, double b) { Double.isNaN(b)? a : Math.max(a,b); } + } //End of Math2 class. diff --git a/WEB-INF/classes/com/cohort/util/String2.java b/WEB-INF/classes/com/cohort/util/String2.java index 480d8b923..9b09280f4 100644 --- a/WEB-INF/classes/com/cohort/util/String2.java +++ b/WEB-INF/classes/com/cohort/util/String2.java @@ -25,6 +25,7 @@ import java.io.Writer; import java.lang.ref.WeakReference; import java.net.URLDecoder; +import java.nio.charset.Charset; import java.security.MessageDigest; import java.text.DecimalFormat; import java.util.ArrayList; @@ -78,6 +79,15 @@ public class String2 { */ public static String lineSeparator = System.getProperty("line.separator"); +// public final static String CHARSET = "charset"; //the name of the charset att + public final static String ISO_8859_1 = "ISO-8859-1"; //the value of the charset att and usable in Java code + public final static String ISO_8859_1_LC = ISO_8859_1.toLowerCase(); + public final static Charset ISO_8859_1_CHARSET = Charset.forName(ISO_8859_1); + public final static String ENCODING = "_Encoding"; //the name of the _Encoding att + public final static String UTF_8 = "UTF-8"; //a value of the _Encoding att and usable in Java code + public final static String UTF_8_LC = UTF_8.toLowerCase(); + public final static String JSON = "JSON"; + /** Returns true if the current Operating System is Windows. */ public static String OSName = System.getProperty("os.name"); public static boolean OSIsWindows = OSName.toLowerCase().indexOf("windows") >= 0; @@ -106,6 +116,49 @@ public class String2 { "\\p{L}[\\p{L}0-9'._%+-]{0,127}@[\\p{L}0-9.-]{1,127}\\.[A-Za-z]{2,4}"; public final static Pattern EMAIL_PATTERN = Pattern.compile(EMAIL_REGEX); + + public final static String ACDD_CONTACT_TYPES[] = {"person", "group", "institution", "position"}; //ACDD 1.3 + + //in the order they are used... + public final static String ACDD_PERSON_REGEX1 = "(DHYRENBACH|JFPIOLLE|JLH|ZHIJIN)"; + public final static String ACDD_GROUP_REGEX = + ".*(Center|CMEMS|GHRSST|Group|ICOADS|MEaSUREs|OBPG|Office|project|Project|SSALTO|Team).*"; + public final static String ACDD_INSTITUTION_REGEX1 = + ".*(Environment Canada|Remote Sensing Systems|University).*"; + public final static String ACDD_INSTITUTION_REGEX2 = ".*[A-Z]{3,}.*"; //3 or more adjacent capital letters anywhere + public final static String ACDD_INSTITUTION_REGEX3 = "([A-Za-z][a-z]+ ){3,}[A-Za-z][a-z]+.*"; //start with 4 or more words (including "of") + /** Catch most (not all) people's names, + e.g., Dr. Kenneth R. Jones, Ken R. Jones, Ken R Jones, Ken Jones, K Jones, Mary McCarthy + Don't match other things (e.g., institutions with 3 word names). + See tests in TestUtil.testString2. */ + public final static String ACDD_PERSON_REGEX2 = + "(Dr\\.? |Prof\\.? |)[A-Z](\\.|[a-z]*) ([A-Z]\\.? |)(Ma?c|)[A-Z][a-z]+(, .+|)"; + + public final static String NCCSV_VERSION = "NCCSV-1.0"; + public final static String NCCSV_GLOBAL = "*GLOBAL*"; + public final static String NCCSV_DATATYPE = "*DATA_TYPE*"; + public final static String NCCSV_SCALAR = "*SCALAR*"; + public final static String NCCSV_END_METADATA = "*END_METADATA*"; + public final static String NCCSV_END_DATA = "*END_DATA*"; + + public final static Pattern NCCSV_BYTE_ATT_PATTERN = Pattern.compile("-?\\d{1,3}b"); + public final static Pattern NCCSV_SHORT_ATT_PATTERN = Pattern.compile("-?\\d{1,5}s"); + public final static Pattern NCCSV_INT_ATT_PATTERN = Pattern.compile("-?\\d{1,10}i"); + public final static Pattern NCCSV_LONG_ATT_PATTERN = Pattern.compile("-?\\d{1,19}L"); + public final static Pattern NCCSV_FLOAT_ATT_PATTERN = Pattern.compile( + // -( 1 .? 1? | .1 ) e - 10 |NaN f + "(-?(\\d{1,15}\\.?\\d{0,15}|\\.\\d{1,15})([eE][+-]?\\d{1,2})?|NaN)f"); + public final static Pattern NCCSV_DOUBLE_ATT_PATTERN = Pattern.compile( + // -( 1 .? 1? | .1 ) e +- 100 |NaN d + "(-?(\\d{1,25}\\.?\\d{0,25}|\\.\\d{1,25})([eE][+-]?\\d{1,3})?|NaN)d"); + public final static Pattern NCCSV_CHAR_ATT_PATTERN = Pattern.compile( + // ' char | \special | "" | \uffff ' + "\"?'([ -~^\"]|\\\\[bfnrt/\\\'\\\"\\\\]|\"\"|\\\\u[0-9a-fA-F]{4})'\"?"); + + /* This is to test: does an NCSV string generated by Java Look Like A (LLA) number? */ + public final static Pattern NCCSV_LLA_NUMBER_PATTERN = Pattern.compile( + "(-?\\d[0-9.eE+-]*|NaN)(b|s|L|f|)"); //Java always writes a leading digit, e.g., 0.1, not .1 + /** These are NOT thread-safe. Always use them in synchronized blocks ("synchronized(gen....) {}").*/ private static DecimalFormat genStdFormat6 = new DecimalFormat("0.######"); private static DecimalFormat genEngFormat6 = new DecimalFormat("##0.#####E0"); @@ -601,7 +654,7 @@ public static int indexOf(char[] cArray, char c) { * This indexOf is a little different: it finds the first instance in s of any char in car. * * @param s a string - * @param car the chars you want to find any of (perhaps from charListString.toCharArray()) + * @param car the chars you want to find any of * @param fromIndex the index number of the position to start the search * @return The first instance in s of any char in car. If not found, it returns -1. */ @@ -614,6 +667,23 @@ public static int indexOf(String s, char[] car, int fromIndex) { return -1; } + /** + * This indexOf is a little different: it finds the first instance in s of any char in car. + * + * @param s a string + * @param car the chars you want to find any of + * @param fromIndex the index number of the position to start the search + * @return The first instance in s of any char in car. If not found, it returns -1. + */ + public static int indexOf(String s, String car, int fromIndex) { + int sLength = s.length(); + for (int index = Math.max(fromIndex, 0); index < sLength; index++) { + if (car.indexOf(s.charAt(index)) >= 0) + return index; + } + return -1; + } + /** @@ -662,6 +732,59 @@ public static int indexOf(double[] dArray, double d) { return indexOf(dArray, d, 0); } + /** This calls directReadFromFile with charset= UTF_8 */ + public static String directReadFromUtf8File(String fileName) + throws Exception { + return directReadFromFile(fileName, UTF_8); + } + + /** This calls directReadFromFile with charset= ISO_8859_1 */ + public static String directReadFrom88591File(String fileName) + throws Exception { + return directReadFromFile(fileName, ISO_8859_1); + } + + /** + * This reads the bytes of the file with the specified charset. + * This does not alter the characters (e.g., the line endings). + * + *

This method is generally appropriate for small and medium-sized + * files. For very large files or files that need additional processing, + * it may be better to write a custom method to + * read the file line-by-line, processing as it goes. + * + * @param fileName is the (usually canonical) path (dir+name) for the file + * @param charset e.g., ISO-8859-1, UTF-8, or "" or null for the default (ISO-8859-1) + * @return a String with the decoded contents of the file. + * @throws Exception if trouble + */ + public static String directReadFromFile(String fileName, String charset) + throws Exception { + + //declare the BufferedReader variable + //declare the results variable: String results[] = {"", ""}; + //BufferedReader and results are declared outside try/catch so + //that they can be accessed from within either try/catch block. + long time = System.currentTimeMillis(); + FileInputStream fis = new FileInputStream(fileName); + InputStreamReader isr = new InputStreamReader(fis, + charset == null || charset.length() == 0? ISO_8859_1 : charset); + StringBuilder sb = new StringBuilder(8192); + + //get the text from the file + try { + char buffer[] = new char[8192]; + int nRead; + while ((nRead = isr.read(buffer)) >= 0) //-1 = end-of-file + sb.append(buffer, 0, nRead); + } finally { + try { + isr.close(); + } catch (Exception e) { + } + } + return sb.toString(); + } /** * This is a variant of readFromFile that uses the default character set @@ -680,8 +803,7 @@ public static String[] readFromFile(String fileName, String charset) { } /** - * This reads the text contents of the specified file. - * This assumes the file uses the default character encoding. + * This reads the text contents of the specified text file. * *

This method uses try/catch to ensure that all possible * exceptions are caught and returned as the error String @@ -689,7 +811,7 @@ public static String[] readFromFile(String fileName, String charset) { * *

This method is generally appropriate for small and medium-sized * files. For very large files or files that need additional processing, - * it may be more efficient to write a custom method to + * it may be better to write a custom method to * read the file line-by-line, processing as it goes. * * @param fileName is the (usually canonical) path (dir+name) for the file @@ -729,7 +851,7 @@ public static String[] readFromFile(String fileName, String charset, int maxAtte try { fis = new FileInputStream(fileName); isr = new InputStreamReader(fis, - charset == null || charset.length() == 0? "ISO-8859-1" : charset); + charset == null || charset.length() == 0? ISO_8859_1 : charset); } catch (Exception e) { if (attempt == maxAttempt) { log(ERROR + ": String2.readFromFile was unable to read " + fileName); @@ -810,7 +932,7 @@ public static String[] readLinesFromFile(String fileName, String charset, for (int i = 0; i < maxAttempt; i++) { try { isr = new InputStreamReader(new FileInputStream(fileName), - charset == null || charset.length() == 0? "ISO-8859-1" : charset); + charset == null || charset.length() == 0? ISO_8859_1 : charset); break; //success } catch (RuntimeException e) { if (i == maxAttempt - 1) @@ -1347,7 +1469,7 @@ public static boolean isUrl(String url) { /** * This returns true if the dir starts with http://, https://, ftp://, sftp://, * or smb://. - * This is like isRemote, but returns true for "file://...". + * This is like isRemote, but returns false for "file://...". * * @return true if the dir is remote (e.g., a URL other than file://) * If dir is null or "", this returns false. @@ -1849,31 +1971,86 @@ public static String zeroPad(String number, int nDigits) { return makeString('0', toAdd).concat(number); } + /** + * The converts a string[] into a JSON array of strings. + * + * @param sa + * @return e.g., ["aa", "bb", "cc"]. + * If sa is null, this returns null (as a String). + */ + public static String toJsonArray(String sa[]) { + if (sa == null) + return "null"; + int saLength = sa.length; + StringBuilder sb = new StringBuilder(10 * saLength); + int start = 0; + for (int i = 0; i < saLength; i++) { + sb.append(i == 0? "[" : ","); + sb.append(toJson(sa[i])); + } + sb.append("]"); + return sb.toString(); + } + + + /** * This makes a JSON version of a string * (\\, \f, \n, \r, \t and \" are escaped with a backslash character * and double quotes are added before and after). * null is returned as null. + * This variant encodes char #127 and above. * * @param s * @return the JSON-encoded string surrounded by "'s. */ public static String toJson(String s) { + return toJson(s, 127, true); + } + + /** + * This makes a JSON version of a string + * (\\, \f, \n, \r, \t and \" are escaped with a backslash character + * and double quotes are added before and after). + * null is returned as null. + * This variant encodes char #127 and above as \\uhhhh. + * + * @param s The String to be encoded. + * @param firstUEncodedChar The first char to be \\uhhhh encoded, + * commonly 127, 256, or 65536. + * @return the JSON-encoded string surrounded by "'s. + */ + public static String toJson(String s, int firstUEncodedChar) { + return toJson(s, firstUEncodedChar, true); + } + + /** + * This is a variant of toJson that lets you encode newlines or not. + * + * @param s The String to be encoded. + * @param firstUEncodedChar The first char to be \\uhhhh encoded, + * commonly 127, 256, or 65536. + * @return the JSON-encoded string surrounded by "'s. + */ + public static String toJson(String s, int firstUEncodedChar, boolean encodeNewline) { if (s == null) return "null"; int sLength = s.length(); - StringBuilder sb = new StringBuilder(sLength / 5 * 6); + StringBuilder sb = new StringBuilder((sLength / 5 + 1) * 6); sb.append('\"'); int start = 0; for (int i = 0; i < sLength; i++) { char ch = s.charAt(i); - if (ch < 32 || ch > 255) { + //using 127 (not 255) means the output is 7bit ASCII and file encoding is irrelevant + if (ch < 32 || ch >= firstUEncodedChar) { sb.append(s.substring(start, i)); start = i + 1; if (ch == '\f') sb.append("\\f"); - else if (ch == '\n') sb.append("\\n"); + else if (ch == '\n') sb.append(encodeNewline? "\\n" : "\n"); else if (ch == '\r') sb.append("\\r"); else if (ch == '\t') sb.append("\\t"); + else if (ch == '\b') {} //remove it + // / can be encoded as \/ but there is no need and it looks odd else sb.append("\\u" + String2.zeroPad(Integer.toHexString(ch), 4)); } else if (ch == '\\') { sb.append(s.substring(start, i)); @@ -1893,7 +2070,7 @@ public static String toJson(String s) { /** * This returns the unJSON version of a JSON string - * (surrounding "'s (if any) are removed and \\, \f, \n, \r, \t and \" are unescaped). + * (surrounding "'s (if any) are removed and \\, \f, \n, \r, \t, \/, and \" are unescaped). * This is very liberal in what it accepts, including all common C escaped characters: * http://msdn.microsoft.com/en-us/library/h21280bw%28v=vs.80%29.aspx * null and "null" are returned as null. @@ -1919,13 +2096,14 @@ public static String fromJson(String s) { po++; start = po + 1; ch = s.charAt(po); - if (ch == 'f') sb.append('\f'); - else if (ch == 'n') sb.append('\n'); - else if (ch == 'r') sb.append('\r'); - else if (ch == 't') sb.append('\t'); - else if (ch == '?') sb.append('?'); - else if (ch == '\\')sb.append('\\'); - else if (ch == '"') sb.append('\"'); + if (ch == 'f') sb.append('\f'); + else if (ch == 'n') sb.append('\n'); + else if (ch == 'r') sb.append('\r'); + else if (ch == 't') sb.append('\t'); + else if (ch == '?') sb.append('?'); + else if (ch == '\\') sb.append('\\'); + else if (ch == '/') sb.append('/'); + else if (ch == '"') sb.append('\"'); else if (ch == '\'') sb.append('\''); else if (ch == 'a' || ch == 'b' || ch == 'v') { //delete a=bell, b=backspace, v=vertTab @@ -1982,6 +2160,111 @@ else if (ch == 'a' || ch == 'b' || ch == 'v') { return sb.toString(); } + /** + * This converts an NCCSV encoded char to a true char + * (surrounding "'s and ''s (if any) are removed and \\, \f, \n, \r, \t, \/, and \" are unescaped). + * This is very liberal in what it accepts, including all common C escaped characters: + * http://msdn.microsoft.com/en-us/library/h21280bw%28v=vs.80%29.aspx + * + * @param s it may be enclosed by "'s and ''s, or not. + * @return the decoded char (or '?' if trouble) as a 1-char string. + */ + public static char fromNccsvChar(String s) { + if (s == null) + return '?'; + //String2.log(">> String2.fromNccsvChar in=" + annotatedString(s)); + if (s.length() >= 2 && s.charAt(0) == '"' && s.charAt(s.length() - 1) == '"') + s = s.substring(1, s.length() - 1); + if (s.length() >= 2 && s.charAt(0) == '\'' && s.charAt(s.length() - 1) == '\'') + s = s.substring(1, s.length() - 1); + s = s.equals("\"\"")? "\"" : fromJson(s); + //String2.log(">> String2.fromNccsvChar out=" + annotatedString(s)); + return s.length() > 0? s.charAt(0) : '?'; + } + + /** + * This converts an NCCSV string to a true string + * (surrounding "'s (if any) are removed and \\, \f, \n, \r, \t, \/, and \" are unescaped). + * This is very liberal in what it accepts, including all common C escaped characters: + * http://msdn.microsoft.com/en-us/library/h21280bw%28v=vs.80%29.aspx + * + * @param s it may be enclosed by "'s, or not. + * @return the decoded string + */ + public static String fromNccsvString(String s) { + return String2.replaceAll(fromJson(s), "\"\"", "\""); + } + + /** + * This encodes one char for an NCCSV char or String, without surrounding quotes. + */ + public static String toNccsvChar(char ch) { + if (ch == '\\') return "\\\\"; + if (ch == '\b') return "\\b"; //trouble + if (ch == '\f') return "\\f"; + if (ch == '\n') return "\\n"; + if (ch == '\r') return "\\r"; + if (ch == '\t') return "\\t"; + if (ch == '\"') return "\"\""; + if (ch < ' ' || ch > '~') return "\\u" + String2.zeroPad(Integer.toHexString(ch), 4); + return "" + ch; + } + + /** + * This encodes one String as an NCCSV data String, with surrounding double quotes + * only if necessary. + */ + public static String toNccsvDataString(String s) { + //encode the string + if (s == null || s.length() == 0) + return ""; + int n = s.length(); + StringBuilder sb = new StringBuilder(n * 2); + for (int i = 0; i < n; i++) + sb.append(toNccsvChar(s.charAt(i))); + + //surround in "'s? + if (s.startsWith(" ") || + s.endsWith(" ") || + s.indexOf(',') >= 0 || + s.indexOf('"') >= 0) + return "\"" + sb.toString() + "\""; + return sb.toString(); + } + + /** + * This encodes one String as an NCCSV att String, with surrounding double quotes + * only if necessary. + */ + public static String toNccsvAttString(String s) { + //encode the string + int n = s.length(); + StringBuilder sb = new StringBuilder(n * 2); + for (int i = 0; i < n; i++) + sb.append(toNccsvChar(s.charAt(i))); + + //surround in "'s? + if (s.startsWith(" ") || + s.endsWith(" ") || + s.indexOf(',') >= 0 || + s.indexOf('"') >= 0 || + NCCSV_CHAR_ATT_PATTERN.matcher(s).matches() || //Looks Like A char + NCCSV_LLA_NUMBER_PATTERN.matcher(s).matches()) //Looks Like A number + return "\"" + sb.toString() + "\""; + return sb.toString(); + } + + /** + * This encodes special characters in s if needed so that + * s can be stored as an item in a tsv string. + */ + public static String toTsvString(String s) { + if (s == null || s.length() == 0) + return ""; //json would return "null" + s = toJson(s); + return s.substring(1, s.length() - 1); //remove enclosing quotes + } + /** * This takes a multi-line string (with \\r, \\n, \\r\\n line separators) * and converts it into an ArrayList strings. @@ -2342,7 +2625,7 @@ public static String toCSSVString(char ar[]) { for (int i = 0; i < n; i++) { if (i > 0) sb.append(", "); - sb.append((int)ar[i]); //safe char to int type conversion + sb.append(toNccsvDataString("" + ar[i])); //safe char to int type conversion } return sb.toString(); } @@ -2788,8 +3071,10 @@ public static byte[] toByteArray(String s) { return null; int sLength = s.length(); byte[] ba = new byte[sLength]; - for (int i = 0; i < sLength; i++) - ba[i] = (byte)s.charAt(i); + for (int i = 0; i < sLength; i++) { + char c = s.charAt(i); //2016-11-29 I added: char>255 -> '?', it's better than just low 8 bits + ba[i] = (byte)(c < 256? c : '?'); + } return ba; } @@ -2805,8 +3090,10 @@ public static byte[] toByteArray(StringBuilder sb) { return null; int sbLength = sb.length(); byte[] ba = new byte[sbLength]; - for (int i = 0; i < sbLength; i++) - ba[i] = (byte)sb.charAt(i); + for (int i = 0; i < sbLength; i++) { + char c = sb.charAt(i); //2016-11-29 I added: char>255 -> 255, it's better than just low 8 bits + ba[i] = (byte)(c < 256? c : 255); + } return ba; } @@ -2848,7 +3135,7 @@ public static String hexDump(byte[] byteArray) { * This finds the first element in Object[] * where the ar[i].toString value equals to s. * - * @param ar the array of Objects + * @param ar the array of Objects (Strings?) * @param s the String to be found * @return the element number of ar which is equal to s (or -1 if ar is null, or s is null or not found) */ @@ -3156,7 +3443,7 @@ public static synchronized void setupLog( //always synchronize on logFileLock synchronized(logFileLock) { try { - logFile = new BufferedWriter(new FileWriter(fullFileName, append)); + logFile = new BufferedWriter(new FileWriter(fullFileName, append)); //default charset logFileSize = Math2.narrowToInt((new File(fullFileName)).length()); logFileName = fullFileName; //log file created, so assign logFileName } catch (Throwable t) { @@ -3882,9 +4169,11 @@ public static float parseFloat(String s) { try { s = s.replace(',', '.'); //!!! this is inconsistent with parseDouble - return Float.parseFloat(s); + float f = Float.parseFloat(s); + //String2.log(">> parseFloat " + s + " -> " + f); + return f; } catch (Exception e) { - //String2.log("parseFloat exception: " + s); + String2.log(">> parseFloat exception: " + s); return Float.NaN; } } @@ -4527,7 +4816,7 @@ public static String getClassPath() { //classPath is a URL! so spaces are encoded as %20 on Windows! //UTF-8: see https://en.wikipedia.org/wiki/Percent-encoding#Current_standard try { - classPath = URLDecoder.decode(classPath, "UTF-8"); + classPath = URLDecoder.decode(classPath, String2.UTF_8); } catch (Throwable t) { String2.log(MustBe.throwableToString(t)); } @@ -4789,7 +5078,7 @@ public static int findInvalidUnicode(String s, String alsoOK) { /** * This makes s valid Unicode by converting invalid characters (e.g., #128) - * with brackets, e.g., [#128]. The invalid characters are often Windows charset + * with \\uhhhh. The invalid characters are often Windows charset * characters #127 - 159. * * @param s @@ -4806,26 +5095,100 @@ public static String makeValidUnicode(String s, String alsoOK) { if (alsoOK.indexOf(ch) >= 0) sb.append(ch); else if (ch < 32) - sb.append("[#" + (int)ch + "]"); - else if (ch <= 126) + sb.append("\\u" + String2.zeroPad(Integer.toHexString(ch), 4)); + else if (ch < 127) sb.append(ch); - else if (ch <= 159) - sb.append("[#" + (int)ch + "]"); + else if (ch < 160) + sb.append("\\u" + String2.zeroPad(Integer.toHexString(ch), 4)); else sb.append(ch); //160+ is valid } return sb.toString(); } + /** + * This converts the char to an ISO-8859-1 (ISO_8859_1) char. + * This converts any char in 127-159 and >255 into '?'. + * + * @param ch the char to be converted + * @return an ISO_8859_1-only char. + */ + public static char toIso88591Char(char ch) { + if (ch < 127) return ch; + if (ch < 160) return '?'; + if (ch < 256) return ch; + return '?'; + } + + /** + * This converts the chars to ISO-8859-1 (ISO_8859_1) chars. + * This converts any char in 127-159 and >255 into '?'. + * + * @param car[] the char[] to be converted + * @return car for convenience. + */ + public static char[] toIso88591Chars(char car[]) { + int n = car.length; + for (int i = 0; i < n; i++) { + char ch = car[i]; + if (ch < 127) { } + else if (ch < 160) {car[i] = '?'; } + else if (ch < 256) { } + else {car[i] = '?'; } + } + return car; + } + + /** + * A little weird: This returns the ISO-8859-1 (ISO_8859_1) encoding of the + * string as a String (using only the lower byte of each 2-byte char), + * so a unicode string can be stored in a 1-byte/char string. + * This converts any char in 127-159 and >255 into '?'. + * + * @param s the string to be converted + * @return an ISO_8859_1-only string (perhaps the same string). + * If s==null, this returns "". + */ + public static String toIso88591String(String s) { + //return ISO_8859_1_CHARSET.decode( //makes a CharBuffer + // ISO_8859_1_CHARSET.encode(s)).toString(); //makes a ByteBuffer + + if (s == null) + return ""; + boolean returnS = true; + int n = s.length(); + StringBuilder sb = new StringBuilder(n); + for (int i = 0; i < n; i++) { + char ch = s.charAt(i); + if (ch < 127) { sb.append(ch); } + else if (ch < 160) { sb.append('?'); returnS = false; } + else if (ch < 256) { sb.append(ch); } + else { sb.append('?'); returnS = false; } + } + return returnS? s : sb.toString(); + } + + /** This converts all of the Strings to ISO_8859_1 encoding. + * + * @return sar for convenience + */ + public static String[] toIso88591Strings(String sar[]) { + int n = sar.length; + for (int i = 0; i < n; i++) + sar[i] = toIso88591String(sar[i]); + return sar; + } + /** * This returns the UTF-8 encoding of the string (or null if trouble). * The inverse of this is utf8ToString. */ public static byte[] getUTF8Bytes(String s) { try { - return s.getBytes("UTF-8"); + return s.getBytes(String2.UTF_8); } catch (Exception e) { - String2.log(ERROR + " in String2.getUTF8Bytes(" + s + "): " + e.toString()); - return null; + String2.log("Caught " + ERROR + " in String2.getUTF8Bytes(" + s + "): " + + MustBe.throwableToString(e)); + return new byte[]{59, 92, 92, 79, 92}; //ERROR } } @@ -4835,13 +5198,44 @@ public static byte[] getUTF8Bytes(String s) { */ public static String utf8ToString(byte[] bar) { try { - return new String(bar, "UTF-8"); + return new String(bar, String2.UTF_8); } catch (Exception e) { - String2.log(ERROR + " in String2.utf8ToString: " + e.toString()); - return null; + String2.log("Caught " + ERROR + " in String2.utf8ToString: " + + MustBe.throwableToString(e)); + return String2.ERROR; + } + } + + /** + * A little weird: This returns the UTF-8 encoding of the string as a String + * (using only the lower byte of each 2-byte char), + * so a unicode string can be stored in a 1-byte/char string. + * + */ + public static String toUTF8String(String s) { + try { + return new String(s.getBytes(String2.UTF_8)); + } catch (Exception e) { + String2.log("Caught " + ERROR + " in String2.toUTF8String(" + s + "): " + + MustBe.throwableToString(e)); + return String2.ERROR; } } + /** + * A little weird: This returns the unicode string from a UTF-8 encoded String. + */ + public static String fromUTF8String(String s) { + try { + return utf8ToString(toByteArray(s)); + } catch (Exception e) { + String2.log("Caught " + ERROR + " in String2.fromUTF8String(" + s + "): " + + MustBe.throwableToString(e)); + return String2.ERROR; + } + } + + /** * This creates the jump table (int[256]) for a given 'find' stringUtf8 * of use by indexOf(byte[], byte[], jumpTable[]) below. @@ -4973,7 +5367,7 @@ public static String passwordDigest(String algorithm, String password) { } public static final String FILE_DIGEST_OPTIONS[] = {"MD5", "SHA-1", "SHA-256"}; - public static final String FILE_DIGEST_EXTENSIONS[] = {".md5", ".sha1", ".sha256"}; + public static final String FILE_DIGEST_EXTENSIONS[] = {".md5", ".sha1", ".sha256"}; //Bagit likes these (after the '.') /** * This returns a hash digest of fullFileName (read as bytes) @@ -5226,7 +5620,7 @@ public static String encodeVariableNameSafe(String s) { * adds "xh" and the hash code for the entire original string, * so the result will always be less than ~41 characters. *
This meets MatLab restrictions: - * http://www.mathworks.com/help/matlab/ref/matlab.lang.makevalidname.html + * https://www.mathworks.com/help/matlab/ref/matlab.lang.makevalidname.html * *

THIS WON'T BE CHANGED. FILE NAMES CREATED FOR EDDGridFromFile and EDDTableFromFile * DEPEND ON SAME ENCODING OVER TIME. @@ -5278,7 +5672,7 @@ public static String encodeMatlabNameSafe(String s) { * adds "xh" and the hash code for the entire original string, * so the result will always be less than ~41 characters. *
This meets MatLab restrictions: - * http://www.mathworks.com/help/matlab/ref/matlab.lang.makevalidname.html + * https://www.mathworks.com/help/matlab/ref/matlab.lang.makevalidname.html * *

THIS WON'T BE CHANGED. SOME datasetIDs DEPEND ON SAME ENCODING OVER TIME. * @@ -5422,35 +5816,6 @@ public static String quoteParameterIfNeeded(String s) { - /** If quoted=true, this puts double quotes around a string, if needed. - * In any case, carriageReturn/newline characters/combos are replaced by - * char #166 (pipe with gap). - * - * @param quoted if true, if a String value starts or ends with a space - * or has a double quote or comma, - * the value will be surrounded in double quotes - * and internal double quotes become two double quotes. - * @return the revised string - */ - public static String quoteIfNeeded(boolean quoted, String s) { - //this is Bob's unprecedented solution to dealing with newlines - //� is (char)166 (#166), so distinct from pipe, (char)124 - int po = s.indexOf('\n'); - if (po >= 0) { - s = replaceAll(s, '\n', (char)166); //'�' (#166) - s = replaceAll(s, "\r", ""); - } else { - s = replaceAll(s, '\r', (char)166); //'�' (#166) - s = replaceAll(s, "\n", ""); - } - if (quoted) { - if (s.indexOf('"') >= 0 || s.indexOf(',') >= 0 || - (s.length() > 0 && (s.charAt(0) == ' ' || s.charAt(s.length() - 1) == ' '))) - s = "\"" + replaceAll(s, "\"", "\"\"") + "\""; - } - return s; - } - /* * * This makes a medium-deep clone of an ArrayList by calling clone() of * each element of the ArrayList. @@ -5737,5 +6102,36 @@ public static StringBuilder addNewlineIfNone(StringBuilder sb) { return sb; } + /** + * Validate ACDD contact type (case insensitive search, case sensitive return), + * or return null no match. + */ + public static String validateAcddContactType(String value) { + int which = caseInsensitiveIndexOf(ACDD_CONTACT_TYPES, value); + return which < 0? null : ACDD_CONTACT_TYPES[which]; + } + + + /** + * Guess the ACDD contact type, or return null if new pretty sure. + */ + public static String guessAcddContactType(String name) { + //guess publisher_type + //order of tests is important + //position is rare + if (name.matches(ACDD_PERSON_REGEX1)) + return "person"; + if (name.matches(ACDD_GROUP_REGEX)) + return "group"; + if (name.matches(ACDD_INSTITUTION_REGEX1)) + return "institution"; + if (name.matches(ACDD_INSTITUTION_REGEX2)) + return "institution"; + if (name.matches(ACDD_PERSON_REGEX2)) + return "person"; + //if not pretty sure, don't specify + return null; + } + } //End of String2 class. diff --git a/WEB-INF/classes/com/cohort/util/String2LogFactory.java b/WEB-INF/classes/com/cohort/util/String2LogFactory.java index ab075b270..743488b97 100644 --- a/WEB-INF/classes/com/cohort/util/String2LogFactory.java +++ b/WEB-INF/classes/com/cohort/util/String2LogFactory.java @@ -6,7 +6,7 @@ * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/WEB-INF/classes/com/cohort/util/Test.java b/WEB-INF/classes/com/cohort/util/Test.java index acab2f2d6..72228864b 100644 --- a/WEB-INF/classes/com/cohort/util/Test.java +++ b/WEB-INF/classes/com/cohort/util/Test.java @@ -5,6 +5,7 @@ package com.cohort.util; import com.cohort.array.Attributes; +import com.cohort.array.PrimitiveArray; import java.awt.Color; import java.awt.Graphics; @@ -620,6 +621,12 @@ public static void ensureEqual(Object a, Object b, String message) { return; } + if (a instanceof PrimitiveArray) { + String err = ((PrimitiveArray)a).testEquals(b); + if (err.length() > 0) + error(err); + } + //fall through to most general case if (!a.equals(b)) error(errorInObjectEquals + message + "\nSpecifically:\n" + diff --git a/WEB-INF/classes/com/cohort/util/TestUtil.java b/WEB-INF/classes/com/cohort/util/TestUtil.java index ef1fe338f..0ea5cee45 100644 --- a/WEB-INF/classes/com/cohort/util/TestUtil.java +++ b/WEB-INF/classes/com/cohort/util/TestUtil.java @@ -25,9 +25,8 @@ import java.util.Vector; import javax.imageio.ImageIO; -import org.joda.time.DateTimeZone; -import org.joda.time.format.DateTimeFormat; -import org.joda.time.format.DateTimeFormatter; +import java.time.format.DateTimeFormatter; +import java.time.ZoneId; /** * This is a Java program to test all of the methods in com.cohort.util. @@ -111,6 +110,23 @@ public static void testMath2() { String2.log("test 215.1125 = " + f); Test.ensureEqual("215.1125", "" + f, "215.1125"); + //longToDoubleNaN + String2.log("test longToDoubleNaN"); + Test.ensureEqual(Math.round(Math2.longToDoubleNaN(-9223372036854775808L)), + -9223372036854775808L, "k-"); + Test.ensureEqual(String2.parseDouble("" + Math2.longToDoubleNaN( + -9223372036854775808L)), + -9.223372036854776E18, "k-2"); + Test.ensureEqual(Math.round(-9.223372036854776E18), + -9223372036854775808L, "k-3"); + Test.ensureEqual(Math2.roundToLong(-9.223372036854776E18), + -9223372036854775808L, "k-3"); + Test.ensureEqual(Math.round(Math2.longToDoubleNaN( 9223372036854775806L)), + 9223372036854774784L, "k+"); //Not good, but best available + + Test.ensureEqual(Math2.longToDoubleNaN(9223372036854775807L), Double.NaN, "kMV"); + Test.ensureEqual(Math2.longToDoubleNaN(Long.MAX_VALUE), Double.NaN, "kMV2"); + //log10 String2.log("test log10"); Test.ensureEqual(Math2.log10(100), 2, "a"); @@ -479,8 +495,12 @@ public static void testMath2() { Test.ensureEqual(Math2.roundToLong(0.5), 1, "g"); Test.ensureEqual(Math2.roundToLong(0.6), 1, "h"); Test.ensureEqual(Math2.roundToLong(1.49), 1, "i"); - Test.ensureEqual(Math2.roundToLong(Long.MIN_VALUE - 0.499), Long.MAX_VALUE, "k"); //unusual: limited double precision throws this to mv - Test.ensureEqual(Math2.roundToLong(Long.MAX_VALUE + 0.499), Long.MAX_VALUE, "l"); + // MIN_VALUE -9223372036854775808.0 + Test.ensureEqual(Math2.roundToLong(-9223372036854777000.0), Long.MAX_VALUE, "k"); //unusual: limited double precision throws this to mv + Test.ensureEqual(Math2.roundToLong(Long.MIN_VALUE), Long.MIN_VALUE, "k"); //unusual: limited double precision throws this to mv + Test.ensureEqual(Math2.roundToLong(9.223372036854774784E18), //largest that can do round trip + 9223372036854774784L, "l"); + Test.ensureEqual(Math2.roundToLong(Long.MAX_VALUE), Long.MAX_VALUE, "l"); Test.ensureEqual(Math2.roundToLong(Double.NaN), Long.MAX_VALUE, "m"); Test.ensureEqual(Math2.roundToLong(Double.POSITIVE_INFINITY), Long.MAX_VALUE, "o"); Test.ensureEqual(Math2.roundToLong(Double.NEGATIVE_INFINITY), Long.MAX_VALUE, "p"); @@ -1057,6 +1077,7 @@ public static void testMath2() { Test.ensureEqual(Math2.binaryFindClosest(sortedDuplicates, 2.9), 6, ""); Test.ensureEqual(Math2.binaryFindClosest(sortedDuplicates, 3.1), 6, ""); + String2.log("test reduceHashCode"); Random random = new Random(); for (i = 0; i < 100; i++) { int j = random.nextInt(); @@ -1067,6 +1088,7 @@ public static void testMath2() { for (int k = 0; k < n; k++) Test.ensureTrue(String2.isDigit(s.charAt(k)), error); } + } public static void timeCurrentTimeMillis() { @@ -1422,8 +1444,8 @@ public static void testString2() throws Exception { //toJson String2.log("test toJson"); - String a = "\\ \f\n\r\t\" z\u0000\uffff�"; - String b = "\"\\\\ \\f\\n\\r\\t\\\" z\\u0000\\uffff�\""; + String a = "\\ \f\n\r\t\" z\u0000\uffff\u00ff"; + String b = "\"\\\\ \\f\\n\\r\\t\\\" z\\u0000\\uffff\\u00ff\""; Test.ensureEqual(String2.toJson(a), b, ""); Test.ensureEqual(String2.fromJson(b), a, ""); Test.ensureEqual(String2.fromJson("\\?\\'"), "?'", ""); @@ -1437,6 +1459,62 @@ public static void testString2() throws Exception { Test.ensureEqual(String2.fromJson("\\x6m"), "", ""); Test.ensureEqual(String2.fromJson("\\u006m"), "", ""); + //toIso88591String + String2.log("test toIso88591String"); + s = String2.annotatedString(String2.toIso88591String("\u0000\n\r\t\f aA\u0091\u00fc\u20ac")); + Test.ensureEqual(s, "[0][10]\n[13][9][12] aA?[252]?[end]", "results=" + s); + + //fromNccsvChar + String2.log("test fromNccsvChar"); + Test.ensureEqual("" + String2.fromNccsvChar("a"), "a", ""); + Test.ensureEqual("" + String2.fromNccsvChar(" "), " ", ""); + Test.ensureEqual("" + String2.fromNccsvChar("' '"), " ", ""); + Test.ensureEqual("" + String2.fromNccsvChar("\" \""), " ", ""); + Test.ensureEqual("" + String2.fromNccsvChar("\"' '\""), " ", ""); + Test.ensureEqual("" + String2.fromNccsvChar("\"'\"\"'\""), "\"", ""); + Test.ensureEqual("" + String2.fromNccsvChar("\\t"), "\t", ""); + Test.ensureEqual("" + String2.fromNccsvChar("\\n"), "\n", ""); + Test.ensureEqual("" + String2.fromNccsvChar("\u20AC"), "\u20ac", ""); + Test.ensureEqual("" + String2.fromNccsvChar("'\u20AC'"), "\u20ac", ""); + Test.ensureEqual("" + String2.fromNccsvChar("\"\u20AC\""), "\u20ac", ""); + Test.ensureEqual("" + String2.fromNccsvChar("\"'\u20AC'\""), "\u20ac", ""); + Test.ensureEqual("" + String2.fromNccsvChar("\"\""), "?", ""); + Test.ensureEqual("" + String2.fromNccsvChar("\'\'"), "?", ""); + Test.ensureEqual("" + String2.fromNccsvChar(""), "?", ""); + Test.ensureEqual("" + String2.fromNccsvChar(null), "?", ""); + + + //toNccsvString + String2.log("test toNccsvString"); + a = "\\ \f\n\r\t\"' z\u0000\uffff�"; + b = "\"\\\\ \\f\\n\\r\\t\"\"' z\\u0000\\uffff\\u00ff\""; + Test.ensureEqual(String2.toNccsvDataString(a), b, ""); + + + //PERSON_REGEX + String2.log("test PERSON_REGEX"); + Test.ensureTrue("Dr. Kenneth S. Jones, something".matches(String2.ACDD_PERSON_REGEX2), ""); + Test.ensureTrue("Dr. Kenneth S. Jones".matches(String2.ACDD_PERSON_REGEX2), ""); + Test.ensureTrue("Dr. K S. Jones".matches(String2.ACDD_PERSON_REGEX2), ""); + Test.ensureTrue("Ken S. Jones".matches(String2.ACDD_PERSON_REGEX2), ""); + Test.ensureTrue("Ken S Jones".matches(String2.ACDD_PERSON_REGEX2), ""); + Test.ensureTrue("Ken Jones".matches(String2.ACDD_PERSON_REGEX2), ""); + Test.ensureTrue("K Jones".matches(String2.ACDD_PERSON_REGEX2), ""); + Test.ensureTrue("Ke Jo".matches(String2.ACDD_PERSON_REGEX2), ""); + Test.ensureTrue("Mary McKibbon".matches(String2.ACDD_PERSON_REGEX2), ""); + Test.ensureTrue("Mary MacKibbon".matches(String2.ACDD_PERSON_REGEX2), ""); + + Test.ensureTrue(!"Dr. Kenneth S. Jones,".matches(String2.ACDD_PERSON_REGEX2), ""); + Test.ensureTrue(!"Kenneth R S Jones".matches(String2.ACDD_PERSON_REGEX2), ""); + Test.ensureTrue(!"D. K S. Jones".matches(String2.ACDD_PERSON_REGEX2), ""); + Test.ensureTrue(!"K S. CJones".matches(String2.ACDD_PERSON_REGEX2), ""); + Test.ensureTrue(!"K S.. Jones".matches(String2.ACDD_PERSON_REGEX2), ""); + Test.ensureTrue(!"KS Jones".matches(String2.ACDD_PERSON_REGEX2), ""); + Test.ensureTrue(!"Ke. Jones".matches(String2.ACDD_PERSON_REGEX2), ""); + Test.ensureTrue(!"Jones".matches(String2.ACDD_PERSON_REGEX2), ""); + Test.ensureTrue(!"Ken MdKelty".matches(String2.ACDD_PERSON_REGEX2), ""); + + //annotatedString String2.log("test annotatedString"); Test.ensureEqual(String2.annotatedString("\ta\nb"), "[9]a[10]\nb[end]", "a"); @@ -2577,8 +2655,20 @@ public static void testCalendar2() throws Exception { DateTimeFormatter dtf; String2.log("\n*** TestUtil.testCalendar2\n"); - String2.pressEnterToContinue("current time local: " + - Calendar2.getCurrentISODateTimeStringLocalTZ()); + String2.log("current time local: " + Calendar2.getCurrentISODateTimeStringLocalTZ()); + String2.pressEnterToContinue(); + + //timePrecisionToTimeFormat + Test.ensureEqual(Calendar2.timePrecisionToTimeFormat("1970-01-01T00:00:00.000Z"), "yyyy-MM-dd'T'HH:mm:ss.SSSZ", ""); + Test.ensureEqual(Calendar2.timePrecisionToTimeFormat("1970-01-01T00:00:00.0Z"), "yyyy-MM-dd'T'HH:mm:ss.SZ", ""); + Test.ensureEqual(Calendar2.timePrecisionToTimeFormat("1970-01-01T00:00:00Z"), "yyyy-MM-dd'T'HH:mm:ssZ", ""); + Test.ensureEqual(Calendar2.timePrecisionToTimeFormat("1970-01-01T00:00Z"), "yyyy-MM-dd'T'HH:mmZ", ""); + Test.ensureEqual(Calendar2.timePrecisionToTimeFormat("1970-01-01T00Z"), "yyyy-MM-dd'T'HHZ", ""); + Test.ensureEqual(Calendar2.timePrecisionToTimeFormat("1970-01-01"), "yyyy-MM-dd", ""); + Test.ensureEqual(Calendar2.timePrecisionToTimeFormat("1970-01"), "yyyy-MM", ""); + Test.ensureEqual(Calendar2.timePrecisionToTimeFormat("1970"), "yyyy-MM-dd'T'HH:mm:ssZ", ""); + Test.ensureEqual(Calendar2.timePrecisionToTimeFormat("zz"), "yyyy-MM-dd'T'HH:mm:ssZ", ""); + Test.ensureEqual(Calendar2.timePrecisionToTimeFormat(null), "yyyy-MM-dd'T'HH:mm:ssZ", ""); //convertToJavaDateTimeFormat(String s) -> yyyy-MM-dd'T'HH:mm:ssZ // y-m-d --> push to Calendar2.parseISODateTime @@ -2731,11 +2821,15 @@ public static void testCalendar2() throws Exception { Test.ensureEqual(Calendar2.convertToJavaDateTimeFormat("hhmm"), "HHmm", ""); Test.ensureEqual(Calendar2.convertToJavaDateTimeFormat("hhmmssZ"), "HHmmssZ", ""); + //isNumericTimeUnits + Test.ensureTrue(Calendar2.isNumericTimeUnits("HoURs SInCE 1980-01-01T00:00:00Z"), ""); + Test.ensureTrue(Calendar2.isNumericTimeUnits("daYs SINCE 1-1-1"), ""); + Test.ensureTrue(Calendar2.isNumericTimeUnits("days SINCE -4713-01-01"), ""); //convert local time (Standard or DST) to UTC //IDs are TZ strings from https://en.wikipedia.org/wiki/List_of_tz_database_time_zones - //String2.log(String2.toCSSVString(DateTimeZone.getAvailableIDs())); + String2.log(String2.toCSSVString(ZoneId.getAvailableZoneIds())); //Most common in the US (with comments in parentheses): //US/Hawaii, Pacific/Honolulu (no DST), //US/Aleutian, America/Adak, @@ -2747,6 +2841,8 @@ public static void testCalendar2() throws Exception { //US/Eastern, America/New_York, //Zulu + + //parse iso format String2.log("\nparse iso format"); //zulu time zone @@ -2781,42 +2877,39 @@ public static void testCalendar2() throws Exception { Test.ensureEqual(s, "2016-03-14T00 Pacific => 2016-03-14T07:00:00Z", ""); - //parse Joda format - String2.log("\nparse joda format"); + //parse java.time (was Joda) format + String2.log("\nparse java.time (was joda) format"); //zulu time zone //In US, DST change has Sunday March 13, 2016 - DateTimeZone zuluDTZ = DateTimeZone.forID("Zulu"); //joda - DateTimeZone pacificDTZ = DateTimeZone.forID("America/Los_Angeles"); //joda - dtf = DateTimeFormat.forPattern("MM/dd/yyyy").withZone(zuluDTZ); + String zulu = Calendar2.zulu; + String pacific = "America/Los_Angeles"; + dtf = Calendar2.makeDateTimeFormatter("M/d/yyyy", Calendar2.zulu); s = "3/12/2016"; s = s + " Zulu => " + Calendar2.epochSecondsToIsoStringT( - dtf.parseMillis(s) / 1000.0) + "Z"; + Calendar2.toEpochSeconds(s, dtf)) + "Z"; String2.log(s); Test.ensureEqual(s, "3/12/2016 Zulu => 2016-03-12T00:00:00Z", ""); s = "3/14/2016"; s = s + " Zulu => " + Calendar2.epochSecondsToIsoStringT( - dtf.parseMillis(s) / 1000.0) + "Z"; + Calendar2.toEpochSeconds(s, dtf)) + "Z"; String2.log(s); Test.ensureEqual(s, "3/14/2016 Zulu => 2016-03-14T00:00:00Z", ""); //pacific time zone - dtf = DateTimeFormat.forPattern("MM/dd/yyyy").withZone(pacificDTZ); + dtf = Calendar2.makeDateTimeFormatter("M/d/yyyy", pacific); s = "3/12/2016"; s = s + " Pacific => " + Calendar2.epochSecondsToIsoStringT( - dtf.parseMillis(s) / 1000.0) + "Z"; + Calendar2.toEpochSeconds(s, dtf)) + "Z"; String2.log(s); Test.ensureEqual(s, "3/12/2016 Pacific => 2016-03-12T08:00:00Z", ""); s = "3/14/2016"; s = s + " Pacific => " + Calendar2.epochSecondsToIsoStringT( - dtf.parseMillis(s) / 1000.0) + "Z"; + Calendar2.toEpochSeconds(s, dtf)) + "Z"; String2.log(s); Test.ensureEqual(s, "3/14/2016 Pacific => 2016-03-14T07:00:00Z", ""); - if (true) - return; - //suggestDateTimeFormat Test.ensureEqual(Calendar2.suggestDateTimeFormat((String)null), "", ""); Test.ensureEqual(Calendar2.suggestDateTimeFormat(""), "", ""); @@ -2826,6 +2919,7 @@ public static void testCalendar2() throws Exception { Test.ensureEqual(Calendar2.suggestDateTimeFormat("0000-001"), "yyyy-DDD", ""); Test.ensureEqual(Calendar2.suggestDateTimeFormat("2999-366"), "yyyy-DDD", ""); Test.ensureEqual(Calendar2.suggestDateTimeFormat("9999-399"), "yyyy-DDD", ""); + Test.ensureEqual(Calendar2.suggestDateTimeFormat("-4713-001"), "yyyy-DDD", ""); Test.ensureEqual(Calendar2.suggestDateTimeFormat("1985-400"), "", ""); Test.ensureEqual(Calendar2.suggestDateTimeFormat("0000001"), "yyyyDDD", ""); @@ -2833,9 +2927,10 @@ public static void testCalendar2() throws Exception { Test.ensureEqual(Calendar2.suggestDateTimeFormat("4999399"), "yyyyDDD", ""); Test.ensureEqual(Calendar2.suggestDateTimeFormat("5000034"), "", ""); //5000 invalid for compact formats Test.ensureEqual(Calendar2.suggestDateTimeFormat("1985400"), "", ""); + Test.ensureEqual(Calendar2.suggestDateTimeFormat("-4713400"), "", ""); - Test.ensureEqual(Calendar2.suggestDateTimeFormat("9985-01-02T23:59:59.999-08:00"), "yyyy-MM-dd'T'HH:mm:ss.sssZ", ""); - Test.ensureEqual(Calendar2.suggestDateTimeFormat("9985-01-02T23:59:59.999"), "yyyy-MM-dd'T'HH:mm:ss.sss", ""); + Test.ensureEqual(Calendar2.suggestDateTimeFormat("9985-01-02T23:59:59.999-08:00"), "yyyy-MM-dd'T'HH:mm:ss.SSSZ", ""); + Test.ensureEqual(Calendar2.suggestDateTimeFormat("9985-01-02T23:59:59.999"), "yyyy-MM-dd'T'HH:mm:ss.SSS", ""); Test.ensureEqual(Calendar2.suggestDateTimeFormat("9985-01-02T23:59:59-08:00"), "yyyy-MM-dd'T'HH:mm:ssZ", ""); Test.ensureEqual(Calendar2.suggestDateTimeFormat("9985-01-02T23:59:59"), "yyyy-MM-dd'T'HH:mm:ss", ""); Test.ensureEqual(Calendar2.suggestDateTimeFormat("9985-01-02T23:59"), "yyyy-MM-dd'T'HH:mm", ""); @@ -2844,9 +2939,17 @@ public static void testCalendar2() throws Exception { Test.ensureEqual(Calendar2.suggestDateTimeFormat("9999-19"), "yyyy-MM", ""); Test.ensureEqual(Calendar2.suggestDateTimeFormat("1985-20"), "", ""); + Test.ensureEqual(Calendar2.suggestDateTimeFormat("-4713-01-02T23:59:59.999-08:00"), "yyyy-MM-dd'T'HH:mm:ss.SSSZ", ""); + Test.ensureEqual(Calendar2.suggestDateTimeFormat("-4713-01-02T23:59:59.999"), "yyyy-MM-dd'T'HH:mm:ss.SSS", ""); + Test.ensureEqual(Calendar2.suggestDateTimeFormat("-4713-01-02T23:59:59-08:00"), "yyyy-MM-dd'T'HH:mm:ssZ", ""); + Test.ensureEqual(Calendar2.suggestDateTimeFormat("-4713-01-02T23:59:59"), "yyyy-MM-dd'T'HH:mm:ss", ""); + Test.ensureEqual(Calendar2.suggestDateTimeFormat("-4713-01-02T23:59"), "yyyy-MM-dd'T'HH:mm", ""); + Test.ensureEqual(Calendar2.suggestDateTimeFormat("-4713-01-02T23"), "yyyy-MM-dd'T'HH", ""); + Test.ensureEqual(Calendar2.suggestDateTimeFormat("-4713-01"), "yyyy-MM", ""); + - Test.ensureEqual(Calendar2.suggestDateTimeFormat("9985-01-02 23:59:59.999"), "yyyy-MM-dd HH:mm:ss.sss", ""); - Test.ensureEqual(Calendar2.suggestDateTimeFormat("9985-01-02 23:59:59.9"), "yyyy-MM-dd HH:mm:ss.sss", ""); + Test.ensureEqual(Calendar2.suggestDateTimeFormat("9985-01-02 23:59:59.999"), "yyyy-MM-dd HH:mm:ss.SSS", ""); + Test.ensureEqual(Calendar2.suggestDateTimeFormat("9985-01-02 23:59:59.9"), "yyyy-MM-dd HH:mm:ss.SSS", ""); Test.ensureEqual(Calendar2.suggestDateTimeFormat("9985-01-02 23:59:59-08:00"), "yyyy-MM-dd HH:mm:ssZ", ""); Test.ensureEqual(Calendar2.suggestDateTimeFormat("9985-01-02 23:59:59"), "yyyy-MM-dd HH:mm:ss", ""); Test.ensureEqual(Calendar2.suggestDateTimeFormat("9985-01-02 23:59"), "yyyy-MM-dd HH:mm", ""); @@ -2861,6 +2964,7 @@ public static void testCalendar2() throws Exception { Test.ensureEqual(Calendar2.suggestDateTimeFormat("19850909335959"), "", ""); Test.ensureEqual(Calendar2.suggestDateTimeFormat("19850909296959"), "", ""); Test.ensureEqual(Calendar2.suggestDateTimeFormat("19850909295969"), "", ""); + Test.ensureEqual(Calendar2.suggestDateTimeFormat("-47130909295969"), "", ""); Test.ensureEqual(Calendar2.suggestDateTimeFormat("000001010000"), "yyyyMMddHHmm", ""); Test.ensureEqual(Calendar2.suggestDateTimeFormat("499919392959"), "yyyyMMddHHmm", ""); @@ -2869,6 +2973,7 @@ public static void testCalendar2() throws Exception { Test.ensureEqual(Calendar2.suggestDateTimeFormat("198509422359"), "", ""); Test.ensureEqual(Calendar2.suggestDateTimeFormat("198509093359"), "", ""); Test.ensureEqual(Calendar2.suggestDateTimeFormat("198509092969"), "", ""); + Test.ensureEqual(Calendar2.suggestDateTimeFormat("-471309092969"), "", ""); Test.ensureEqual(Calendar2.suggestDateTimeFormat("0000010100"), "yyyyMMddHH", ""); Test.ensureEqual(Calendar2.suggestDateTimeFormat("4999193929"), "yyyyMMddHH", ""); @@ -2876,31 +2981,40 @@ public static void testCalendar2() throws Exception { Test.ensureEqual(Calendar2.suggestDateTimeFormat("1985200223"), "", ""); Test.ensureEqual(Calendar2.suggestDateTimeFormat("1985094223"), "", ""); Test.ensureEqual(Calendar2.suggestDateTimeFormat("1985090933"), "", ""); + Test.ensureEqual(Calendar2.suggestDateTimeFormat("-4713090933"), "", ""); Test.ensureEqual(Calendar2.suggestDateTimeFormat("00000101"), "yyyyMMdd", ""); Test.ensureEqual(Calendar2.suggestDateTimeFormat("49991939"), "yyyyMMdd", ""); Test.ensureEqual(Calendar2.suggestDateTimeFormat("50000102"), "", ""); //5000 invalid for compact formats Test.ensureEqual(Calendar2.suggestDateTimeFormat("19852002"), "", ""); Test.ensureEqual(Calendar2.suggestDateTimeFormat("19850942"), "", ""); + Test.ensureEqual(Calendar2.suggestDateTimeFormat("-47130942"), "", ""); Test.ensureEqual(Calendar2.suggestDateTimeFormat("000001"), "yyyyMM", ""); Test.ensureEqual(Calendar2.suggestDateTimeFormat("499919"), "yyyyMM", ""); Test.ensureEqual(Calendar2.suggestDateTimeFormat("500001"), "", ""); //5000 invalid for compact formats Test.ensureEqual(Calendar2.suggestDateTimeFormat("198520"), "", ""); - - - Test.ensureEqual(Calendar2.suggestDateTimeFormat("1/2/85"), "M/d/yy", ""); - Test.ensureEqual(Calendar2.suggestDateTimeFormat("11/22/85"), "M/d/yy", ""); - Test.ensureEqual(Calendar2.suggestDateTimeFormat("1/2/1985"), "M/d/yy", ""); - Test.ensureEqual(Calendar2.suggestDateTimeFormat("11/22/1985"), "M/d/yy", ""); - Test.ensureEqual(Calendar2.suggestDateTimeFormat("Jan 2, 85"), "MMM d, yy", ""); - Test.ensureEqual(Calendar2.suggestDateTimeFormat("Jan 22, 85"), "MMM d, yy", ""); - Test.ensureEqual(Calendar2.suggestDateTimeFormat("Jan 2, 1985"), "MMM d, yy", ""); - Test.ensureEqual(Calendar2.suggestDateTimeFormat("2 Jan 85"), "d MMM yy", ""); - Test.ensureEqual(Calendar2.suggestDateTimeFormat("2 Jan 1985"), "d MMM yy", ""); - Test.ensureEqual(Calendar2.suggestDateTimeFormat("22 Jan 1985"), "d MMM yy", ""); - Test.ensureEqual(Calendar2.suggestDateTimeFormat("2-JAN-85"), "d-MMM-yy", ""); - Test.ensureEqual(Calendar2.suggestDateTimeFormat("02-JAN-1985"), "d-MMM-yy", ""); + Test.ensureEqual(Calendar2.suggestDateTimeFormat("-471320"), "", ""); + + Test.ensureEqual(Calendar2.suggestDateTimeFormat("1/2/85"), "", ""); + Test.ensureEqual(Calendar2.suggestDateTimeFormat("11/22/85"), "", ""); + Test.ensureEqual(Calendar2.suggestDateTimeFormat("Jan 2, 85"), "", ""); + Test.ensureEqual(Calendar2.suggestDateTimeFormat("Jan 22, 85"), "", ""); + Test.ensureEqual(Calendar2.suggestDateTimeFormat("2 Jan 85"), "", ""); + Test.ensureEqual(Calendar2.suggestDateTimeFormat("2-JAN-85"), "", ""); + + Test.ensureEqual(Calendar2.suggestDateTimeFormat("1/2/1985"), "M/d/yyyy", ""); + Test.ensureEqual(Calendar2.suggestDateTimeFormat("11/22/1985"), "M/d/yyyy", ""); + Test.ensureEqual(Calendar2.suggestDateTimeFormat("1/2/1985"), "M/d/yyyy", ""); + Test.ensureEqual(Calendar2.suggestDateTimeFormat("11/22/1985"), "M/d/yyyy", ""); + Test.ensureEqual(Calendar2.suggestDateTimeFormat("Jan 2, 1985"), "MMM d, yyyy", ""); + Test.ensureEqual(Calendar2.suggestDateTimeFormat("Jan 22, 1985"), "MMM d, yyyy", ""); + Test.ensureEqual(Calendar2.suggestDateTimeFormat("Jan 2, 1985"), "MMM d, yyyy", ""); + Test.ensureEqual(Calendar2.suggestDateTimeFormat("2 Jan 1985"), "d MMM yyyy", ""); + Test.ensureEqual(Calendar2.suggestDateTimeFormat("2 Jan 1985"), "d MMM yyyy", ""); + Test.ensureEqual(Calendar2.suggestDateTimeFormat("22 Jan 1985"), "d MMM yyyy", ""); + Test.ensureEqual(Calendar2.suggestDateTimeFormat("2-JAN-1985"), "d-MMM-yyyy", ""); + Test.ensureEqual(Calendar2.suggestDateTimeFormat("02-JAN-1985"), "d-MMM-yyyy", ""); Test.ensureEqual(Calendar2.suggestDateTimeFormat("Sun, 06 Nov 1994 08:49:37 GMT"), "EEE, dd MMM yyyy HH:mm:ss 'GMT'", ""); Test.ensureEqual(Calendar2.suggestDateTimeFormat("Sun, 06 Nov 1994 08:49:37 -0800"), @@ -2913,7 +3027,7 @@ public static void testCalendar2() throws Exception { Test.ensureEqual(Calendar2.suggestDateTimeFormat(StringArray.fromCSV(", 1985-01-02, 9000-10-a1")), "", ""); Test.ensureEqual(Calendar2.suggestDateTimeFormat(StringArray.fromCSV(", 4 Feb 9999, 2 Jan 1985")), - "d MMM yy", ""); + "d MMM yyyy", ""); Test.ensureEqual(Calendar2.suggestDateTimeFormat(StringArray.fromCSV(", 1985-01-02, Jan 2, 1985")), "", ""); @@ -2924,6 +3038,15 @@ public static void testCalendar2() throws Exception { "6015-10-12T11:13:22", "yyyy-MM-dd"), 1.27672456402E11, ""); Test.ensureEqual(Calendar2.toEpochSeconds( "2015-1a-12T11:13:22", "yyyy-MM-dd"), Double.NaN, ""); + d = -6.21357696E10; + Test.ensureEqual(Calendar2.toEpochSeconds( + "0001-01-01T00:00:00", "yyyy-MM-dd"), d, ""); //parsed by Calendar2.parseIsoDateTime + Test.ensureEqual(Calendar2.toEpochSeconds( + "0000-01-01T00:00:00", "yyyy-MM-dd"), d -= 366*86400, ""); + Test.ensureEqual(Calendar2.toEpochSeconds( + "-0001-01-01T00:00:00","yyyy-MM-dd"), d -= 365*86400, ""); //parsed by Calendar2.parseIsoDateTime + Test.ensureEqual(Calendar2.toEpochSeconds( + "-0002-01-01T00:00:00","yyyy-MM-dd"), d -= 365*86400, ""); //parsed by Calendar2.parseIsoDateTime Test.ensureEqual(Calendar2.toEpochSeconds( "Sun, 06 Nov 1994 08:49:37 GMT", "EEE, dd MMM yyyy HH:mm:ss 'GMT'"), 7.84111777E8, ""); @@ -2931,7 +3054,7 @@ public static void testCalendar2() throws Exception { "Sun, 06 Nov 1994 08:49:3a GMT", "EEE, dd MMM yyyy HH:mm:ss 'GMT'"), Double.NaN, ""); Test.ensureEqual(Calendar2.toEpochSeconds( - "4 Feb 9999", "d MMM yy"), 2.533737024E11, ""); + "4 Feb 9999", "d MMM yyyy"), 2.533737024E11, ""); Test.ensureEqual(Calendar2.toEpochSeconds( "1985-01-04", "d MMM yy"), Double.NaN, ""); @@ -2943,10 +3066,10 @@ public static void testCalendar2() throws Exception { StringArray.fromCSV(", 1985-01-02, 2 Jan 1985"), "yyyy-MM").toString(), "NaN, 4.73472E8, NaN", ""); Test.ensureEqual(Calendar2.toEpochSeconds( - StringArray.fromCSV(", 4 Feb 9999, 2 Jan 1985"), "d MMM yy").toString(), + StringArray.fromCSV(", 4 Feb 9999, 2 Jan 1985"), "d MMM yyyy").toString(), "NaN, 2.533737024E11, 4.73472E8", ""); Test.ensureEqual(Calendar2.toEpochSeconds( - StringArray.fromCSV(", 4 Feb 9999, 1985-01-03"), "d MMM yy").toString(), + StringArray.fromCSV(", 4 Feb 9999, 1985-01-03"), "d MMM yyyy").toString(), "NaN, 2.533737024E11, NaN", ""); @@ -2957,6 +3080,8 @@ public static void testCalendar2() throws Exception { s = "d since 1-"; Test.ensureTrue(Calendar2.isTimeUnits(s), s); s = "d since 1-"; Test.ensureTrue(Calendar2.isTimeUnits(s), s); s = " hours since 1970-01-01T00:00:00Z "; Test.ensureTrue(Calendar2.isTimeUnits(s), s); + s = " hours since 0000-01-01T00:00:00Z "; Test.ensureTrue(Calendar2.isTimeUnits(s), s); + s = " hours since -0001-01-01T00:00:00Z ";Test.ensureTrue(Calendar2.isTimeUnits(s), s); s = "millis since 1970-01-01"; Test.ensureTrue(Calendar2.isTimeUnits(s), s); s = "d SiNCE 2001"; Test.ensureTrue(Calendar2.isTimeUnits(s), s); @@ -2970,88 +3095,114 @@ public static void testCalendar2() throws Exception { s = "d since analysis"; Test.ensureTrue(!Calendar2.isTimeUnits(s), s); s = "d since2001"; Test.ensureTrue(!Calendar2.isTimeUnits(s), s); - //test that all of thos formats work - dtf = DateTimeFormat.forPattern("yyyy-DDD").withZone(DateTimeZone.UTC); - Test.ensureEqual(Calendar2.millisToIsoZuluString(dtf.parseMillis( - "2002-027")), + //test that all of those formats work + dtf = Calendar2.makeDateTimeFormatter("yyyy-DDD", "UTC"); + Test.ensureEqual(Calendar2.epochSecondsToIsoStringT(Calendar2.toEpochSeconds( + "2002-027", dtf)), "2002-01-27T00:00:00", ""); - dtf = DateTimeFormat.forPattern("yyyyDDD").withZone(DateTimeZone.UTC); - Test.ensureEqual(Calendar2.millisToIsoZuluString(dtf.parseMillis( - "2002027")), + dtf = Calendar2.makeDateTimeFormatter("yyyyDDD", "UTC"); + Test.ensureEqual(Calendar2.epochSecondsToIsoStringT(Calendar2.toEpochSeconds( + "2002027", dtf)), "2002-01-27T00:00:00", ""); - dtf = DateTimeFormat.forPattern("yyyyMMddHHmmss").withZone(DateTimeZone.UTC); - Test.ensureEqual(Calendar2.millisToIsoZuluString(dtf.parseMillis( - "19850102235959")), + dtf = Calendar2.makeDateTimeFormatter("yyyyMMddHHmmss", "UTC"); + Test.ensureEqual(Calendar2.epochSecondsToIsoStringT(Calendar2.toEpochSeconds( + "19850102235959", dtf)), "1985-01-02T23:59:59", ""); - dtf = DateTimeFormat.forPattern("yyyyMMddHHmm").withZone(DateTimeZone.UTC); - Test.ensureEqual(Calendar2.millisToIsoZuluString(dtf.parseMillis( - "198501022359")), + dtf = Calendar2.makeDateTimeFormatter("yyyyMMddHHmm", "UTC"); + Test.ensureEqual(Calendar2.epochSecondsToIsoStringT(Calendar2.toEpochSeconds( + "198501022359", dtf)), "1985-01-02T23:59:00", ""); - dtf = DateTimeFormat.forPattern("yyyyMMddHH").withZone(DateTimeZone.UTC); - Test.ensureEqual(Calendar2.millisToIsoZuluString(dtf.parseMillis( - "1985010223")), + dtf = Calendar2.makeDateTimeFormatter("yyyyMMddHH", "UTC"); + Test.ensureEqual(Calendar2.epochSecondsToIsoStringT(Calendar2.toEpochSeconds( + "1985010223", dtf)), "1985-01-02T23:00:00", ""); - dtf = DateTimeFormat.forPattern("yyyyMMdd").withZone(DateTimeZone.UTC); - Test.ensureEqual(Calendar2.millisToIsoZuluString(dtf.parseMillis( - "19850102")), + dtf = Calendar2.makeDateTimeFormatter("yyyyMMdd", "UTC"); + Test.ensureEqual(Calendar2.epochSecondsToIsoStringT(Calendar2.toEpochSeconds( + "19850102", dtf)), "1985-01-02T00:00:00", ""); - dtf = DateTimeFormat.forPattern("yyyyMM").withZone(DateTimeZone.UTC); - Test.ensureEqual(Calendar2.millisToIsoZuluString(dtf.parseMillis( - "198501")), + dtf = Calendar2.makeDateTimeFormatter("yyyyMM", "UTC"); + Test.ensureEqual(Calendar2.epochSecondsToIsoStringT(Calendar2.toEpochSeconds( + "198501", dtf)), "1985-01-01T00:00:00", ""); - dtf = DateTimeFormat.forPattern("M/d/yy").withZone(DateTimeZone.UTC); - Test.ensureEqual(Calendar2.millisToIsoZuluString(dtf.parseMillis( - "1/2/85")), - "1985-01-02T00:00:00", ""); - Test.ensureEqual(Calendar2.millisToIsoZuluString(dtf.parseMillis( - "11/22/85")), - "1985-11-22T00:00:00", ""); - Test.ensureEqual(Calendar2.millisToIsoZuluString(dtf.parseMillis( - "1/2/1985")), + +//!!! 2017-03-20 with switch to java.time yy expands to 2085 instead of 1985 +/* + dtf = Calendar2.makeDateTimeFormatter("M/d/yy", "UTC"); + Test.ensureEqual(Calendar2.epochSecondsToIsoStringT(Calendar2.toEpochSeconds( + "1/2/85", dtf)), + "2085-01-02T00:00:00", ""); + Test.ensureEqual(Calendar2.epochSecondsToIsoStringT(Calendar2.toEpochSeconds( + "11/22/85", dtf)), + "2085-11-22T00:00:00", ""); + Test.ensureEqual(Calendar2.epochSecondsToIsoStringT(Calendar2.toEpochSeconds( + "1/2/1985", dtf)), "1985-01-02T00:00:00", ""); - Test.ensureEqual(Calendar2.millisToIsoZuluString(dtf.parseMillis( - "11/22/1985")), + Test.ensureEqual(Calendar2.epochSecondsToIsoStringT(Calendar2.toEpochSeconds( + "11/22/1985", dtf)), "1985-11-22T00:00:00", ""); - dtf = DateTimeFormat.forPattern("MMM d, yy").withZone(DateTimeZone.UTC); - Test.ensureEqual(Calendar2.millisToIsoZuluString(dtf.parseMillis( - "Jan 2, 85")), - "1985-01-02T00:00:00", ""); - Test.ensureEqual(Calendar2.millisToIsoZuluString(dtf.parseMillis( - "Jan 22, 85")), + dtf = Calendar2.makeDateTimeFormatter("MMM d, yy", ZoneId.of("UTC"); + Test.ensureEqual(Calendar2.epochSecondsToIsoStringT(Calendar2.toEpochSeconds( + "Jan 2, 85", dtf)), + "2085-01-02T00:00:00", ""); + Test.ensureEqual(Calendar2.epochSecondsToIsoStringT(Calendar2.toEpochSeconds( + "Jan 22, 85", dtf)), + "2085-01-22T00:00:00", ""); + Test.ensureEqual(Calendar2.epochSecondsToIsoStringT(Calendar2.toEpochSeconds( + "Jan 22, 1985", dtf)), "1985-01-22T00:00:00", ""); - Test.ensureEqual(Calendar2.millisToIsoZuluString(dtf.parseMillis( - "Jan 22, 1985")), + dtf = Calendar2.makeDateTimeFormatter("d MMM yy", "UTC"); + Test.ensureEqual(Calendar2.epochSecondsToIsoStringT(Calendar2.toEpochSeconds( + "2 Jan 85", dtf)), + "2085-01-02T00:00:00", ""); + Test.ensureEqual(Calendar2.epochSecondsToIsoStringT(Calendar2.toEpochSeconds( + "22 Jan 1985", dtf)), "1985-01-22T00:00:00", ""); - dtf = DateTimeFormat.forPattern("d MMM yy").withZone(DateTimeZone.UTC); - Test.ensureEqual(Calendar2.millisToIsoZuluString(dtf.parseMillis( - "2 Jan 85")), - "1985-01-02T00:00:00", ""); - Test.ensureEqual(Calendar2.millisToIsoZuluString(dtf.parseMillis( - "22 Jan 1985")), - "1985-01-22T00:00:00", ""); - dtf = DateTimeFormat.forPattern("d-MMM-yy").withZone(DateTimeZone.UTC); - Test.ensureEqual(Calendar2.millisToIsoZuluString(dtf.parseMillis( - "2-JAN-85")), - "1985-01-02T00:00:00", ""); - Test.ensureEqual(Calendar2.millisToIsoZuluString(dtf.parseMillis( - "02-JAN-1985")), + dtf = Calendar2.makeDateTimeFormatter("d-MMM-yy", "UTC"); + Test.ensureEqual(Calendar2.epochSecondsToIsoStringT(Calendar2.toEpochSeconds( + "2-JAN-85", dtf)), + "2085-01-02T00:00:00", ""); + Test.ensureEqual(Calendar2.epochSecondsToIsoStringT(Calendar2.toEpochSeconds( + "02-JAN-1985", dtf)), "1985-01-02T00:00:00", ""); - dtf = DateTimeFormat.forPattern("EEE, dd MMM yyyy HH:mm:ss 'GMT'").withZone(DateTimeZone.UTC); - Test.ensureEqual(Calendar2.millisToIsoZuluString(dtf.parseMillis( - "WED, 02 JAN 1985 01:02:03 GMT")), +*/ + dtf = Calendar2.makeDateTimeFormatter("EEE, dd MMM yyyy HH:mm:ss 'GMT'", "UTC"); + Test.ensureEqual(Calendar2.epochSecondsToIsoStringT(Calendar2.toEpochSeconds( + "WED, 02 JAN 1985 01:02:03 GMT", dtf)), + "1985-01-02T01:02:03", ""); + + dtf = Calendar2.makeDateTimeFormatter("EEE, dd MMM yyyy HH:mm:ssZ", "UTC"); + Test.ensureEqual(Calendar2.epochSecondsToIsoStringT(Calendar2.toEpochSeconds( + "WED, 02 JAN 1985 01:02:03Z", dtf)), "1985-01-02T01:02:03", ""); - dtf = DateTimeFormat.forPattern("EEE, dd MMM yyyy HH:mm:ss Z").withZone(DateTimeZone.UTC); - Test.ensureEqual(Calendar2.millisToIsoZuluString(dtf.parseMillis( - "WED, 02 JAN 1985 01:02:03 -0800")), + Test.ensureEqual(Calendar2.epochSecondsToIsoStringT(Calendar2.toEpochSeconds( + "WED, 02 JAN 1985 01:02:03-08", dtf)), "1985-01-02T09:02:03", ""); - Test.ensureEqual(Calendar2.millisToIsoZuluString(dtf.parseMillis( - "WED, 02 JAN 1985 01:02:03 -08:00")), + Test.ensureEqual(Calendar2.epochSecondsToIsoStringT(Calendar2.toEpochSeconds( + "WED, 02 JAN 1985 01:02:03-0800", dtf)), + "1985-01-02T09:02:03", ""); + Test.ensureEqual(Calendar2.epochSecondsToIsoStringT(Calendar2.toEpochSeconds( + "WED, 02 JAN 1985 01:02:03-08:00", dtf)), + "1985-01-02T09:02:03", ""); + + dtf = Calendar2.makeDateTimeFormatter("EEE, dd MMM yyyy HH:mm:ss Z", "UTC"); + Test.ensureEqual(Calendar2.epochSecondsToIsoStringT(Calendar2.toEpochSeconds( + "WED, 02 JAN 1985 01:02:03 Z", dtf)), + "1985-01-02T01:02:03", ""); + Test.ensureEqual(Calendar2.epochSecondsToIsoStringT(Calendar2.toEpochSeconds( + "WED, 02 JAN 1985 01:02:03 -08", dtf)), + "1985-01-02T09:02:03", ""); + Test.ensureEqual(Calendar2.epochSecondsToIsoStringT(Calendar2.toEpochSeconds( + "WED, 02 JAN 1985 01:02:03 -0800", dtf)), + "1985-01-02T09:02:03", ""); + Test.ensureEqual(Calendar2.epochSecondsToIsoStringT(Calendar2.toEpochSeconds( + "WED, 02 JAN 1985 01:02:03 -08:00", dtf)), "1985-01-02T09:02:03", ""); Test.ensureTrue(Calendar2.probablyISODateTime("1234-01-01T00"), ""); Test.ensureTrue(Calendar2.probablyISODateTime("0000-0"), ""); Test.ensureTrue(Calendar2.probablyISODateTime("-9999-9"), ""); + Test.ensureTrue(Calendar2.probablyISODateTime("-0001-01"), ""); Test.ensureTrue(!Calendar2.probablyISODateTime("a1234-01-01T00"), ""); Test.ensureTrue(!Calendar2.probablyISODateTime("1234=01-01T00"), ""); @@ -3060,6 +3211,7 @@ public static void testCalendar2() throws Exception { Test.ensureTrue(!Calendar2.probablyISODateTime("12a4-01-01T00"), ""); Test.ensureTrue(!Calendar2.probablyISODateTime("+1234-01-01T00"), ""); Test.ensureTrue(!Calendar2.probablyISODateTime("1234-"), ""); + Test.ensureTrue(!Calendar2.probablyISODateTime("-0001"), ""); //factorToGetSeconds Test.ensureEqual(Calendar2.factorToGetSeconds("ms"), 0.001, "a"); @@ -3194,6 +3346,7 @@ public static void testCalendar2() throws Exception { Test.ensureEqual(Calendar2.parseMinMaxString("min(z)-3.5", 100, false), 100-3.5, ""); Test.ensureEqual(Calendar2.parseMinMaxString("min(z) 36", 100, false), 100+36, ""); + String2.log("\nExpected errors:"); try { d = Calendar2.parseMinMaxString("min", 100, false); throw new RuntimeException("should have failed."); @@ -3223,7 +3376,31 @@ public static void testCalendar2() throws Exception { "willFail=\"" + willFail[i] + "\" other failure: " + es); } } + String2.log("> End of expected errors"); + //parseNumberTimeUnits(String ntu) { + Test.ensureEqual(Calendar2.parseNumberTimeUnits("1.4e5sec"), new double[]{1.4e5, 1}, ""); + Test.ensureEqual(Calendar2.parseNumberTimeUnits("2min"), new double[]{2, 60}, ""); + Test.ensureEqual(Calendar2.parseNumberTimeUnits("hours"), new double[]{1, 3600}, ""); + Test.ensureEqual(Calendar2.parseNumberTimeUnits("2.3e3"), new double[]{2.3e3, 1}, ""); + + String2.log("\nExpected errors:"); + willFail = new String[]{ + null, "", " , ", "1e500", "2monw", "zztop"}; + for (int i = 0; i < willFail.length; i++) { + try { + double dar[] = Calendar2.parseNumberTimeUnits(willFail[i]); + throw new RuntimeException(willFail[i] + " should have failed."); + } catch (Exception e) { + String es = MustBe.throwableToString(e); + if (es.indexOf("ERROR in parseNumberTimeUnits: ") < 0 && + es.indexOf("ERROR in Calendar2.factorToGetSeconds: ") < 0) { + String2.log("Unexpected error for " + willFail[i] + ": "); + throw e; + } + } + } + String2.log("> End of expected errors"); //getMonthName3 Test.ensureEqual(Calendar2.getMonthName3(1), "Jan", "a"); @@ -3889,6 +4066,24 @@ public static void testCalendar2() throws Exception { Test.ensureEqual(Calendar2.epochSecondsToIsoStringT(Calendar2.unitsSinceToEpochSeconds(da[0], da[1], 12)), "1987-06-01T00:00:00", ""); Test.ensureEqual(Calendar2.epochSecondsToUnitsSince(da[0], da[1], Calendar2.isoStringToEpochSeconds("1987-06-01")), 12, ""); + //SeaDataNet, astronomical year, Chronological Julian Date + da = Calendar2.getTimeBaseAndFactor("days since -4712-01-01"); //-4713 BC is astronomicalYear=-4712 + Test.ensureEqual(da[0], -2.108668032E11, ""); + Test.ensureEqual(da[1], 86400, ""); + //http://www.julian-date.com/ + Test.ensureEqual(Calendar2.epochSecondsToIsoStringT( + Calendar2.unitsSinceToEpochSeconds(da[0], da[1], 2457711.5)), //Chronological JD (CJD) starts at midnight + "2016-11-18T12:00:00", ""); + //http://www.hermetic.ch/cal_stud/jdn.htm section 5 says + // A chronological Julian day number is a count of nychthemerons, assumed + // to begin at midnight GMT, from the nychthemeron which began at + // midnight GMT on -4712-01-01 JC. + // Chronological Julian day number 2,452,952 is the period from + // midnight GMT on 2003-11-08 CE (Common Era) to the next midnight GMT. + Test.ensureEqual(Calendar2.epochSecondsToIsoStringT( + Calendar2.unitsSinceToEpochSeconds(da[0], da[1], 2452952)), //Chronological JD (CJD) starts at midnight + "2003-11-08T00:00:00", ""); + da = Calendar2.getTimeBaseAndFactor("years since 0001-01-01"); //some datasets use this! s = "1985-01-01T00:00:00"; d = Calendar2.isoStringToEpochSeconds(s); @@ -3984,6 +4179,8 @@ public static void testCalendar2() throws Exception { Test.ensureEqual(Calendar2.formatAsISODateTimeSpace(Calendar2.isoDateTimeAdd("2001-02-03", -2, Calendar2.YEAR)), "1999-02-03 00:00:00", ""); Test.ensureEqual(Calendar2.formatAsISODateTimeSpace(Calendar2.isoDateTimeAdd("0001-02-03", -1, Calendar2.YEAR)), "0000-02-03 00:00:00", ""); Test.ensureEqual(Calendar2.formatAsISODateTimeSpace(Calendar2.isoDateTimeAdd("0001-02-03", -2, Calendar2.YEAR)), "-0001-02-03 00:00:00", ""); + Test.ensureEqual(Calendar2.formatAsISODateTimeSpace(Calendar2.isoDateTimeAdd("0000-02-03", -1, Calendar2.YEAR)), "-0001-02-03 00:00:00", ""); + Test.ensureEqual(Calendar2.formatAsISODateTimeSpace(Calendar2.isoDateTimeAdd("0000-02-03", -2, Calendar2.YEAR)), "-0002-02-03 00:00:00", ""); Test.ensureEqual(Calendar2.formatAsISODateTimeSpace(Calendar2.isoDateTimeAdd("-0001-02-03", 1, Calendar2.YEAR)), "0000-02-03 00:00:00", ""); Test.ensureEqual(Calendar2.formatAsISODateTimeSpace(Calendar2.isoDateTimeAdd("-0001-02-03", 2, Calendar2.YEAR)), "0001-02-03 00:00:00", ""); Test.ensureEqual(Calendar2.formatAsISODateTimeSpace(Calendar2.isoDateTimeAdd("-0001-02-03", -3, Calendar2.MONTH)), "-0002-11-03 00:00:00", ""); @@ -4172,7 +4369,8 @@ public static void testCalendar2() throws Exception { Test.ensureEqual(Calendar2.elapsedTimeString(-3*Calendar2.MILLIS_PER_DAY - 4005), "-3 days 0h 0m 4s", ""); //was "-3 days 00:00:04.005", ""); Test.ensureEqual(Calendar2.elapsedTimeString(-1*Calendar2.MILLIS_PER_DAY), "-1 day", ""); Test.ensureEqual(Calendar2.elapsedTimeString(-2*Calendar2.MILLIS_PER_DAY), "-2 days", ""); - Test.ensureEqual(Calendar2.elapsedTimeString(-Long.MAX_VALUE), "infinity", ""); + Test.ensureEqual(Calendar2.elapsedTimeString(Long.MIN_VALUE), "infinity", ""); + Test.ensureEqual(Calendar2.elapsedTimeString(Long.MAX_VALUE-1), "infinity", ""); Test.ensureEqual(Calendar2.elapsedTimeString(-Double.NaN), "infinity", ""); //clearSmallerFields @@ -4209,6 +4407,9 @@ public static void testCalendar2() throws Exception { gc = Calendar2.parseISODateTimeZulu("-0000-08-31T16:01:02"); Calendar2.clearSmallerFields(gc, Calendar2.MINUTE); Test.ensureEqual(Calendar2.formatAsISODateTimeT(gc), "0000-08-31T16:01:00", ""); + gc = Calendar2.parseISODateTimeZulu("-0001-08-31T16:01:02"); + Calendar2.clearSmallerFields(gc, Calendar2.MINUTE); + Test.ensureEqual(Calendar2.formatAsISODateTimeT(gc), "-0001-08-31T16:01:00", ""); try { Calendar2.clearSmallerFields(gc, Calendar2.AM_PM); String2.log("Shouldn't get here.79"); Math2.sleep(60000); diff --git a/WEB-INF/classes/com/cohort/util/XML.java b/WEB-INF/classes/com/cohort/util/XML.java index 69c86206b..0765e73e0 100644 --- a/WEB-INF/classes/com/cohort/util/XML.java +++ b/WEB-INF/classes/com/cohort/util/XML.java @@ -229,11 +229,11 @@ public static String encodeAsXML(String s, boolean encodeAsXML) { /** This encodes spaces as (char)160 (nbsp) when they are leading, trailing, * or more than 1 consecutive. - * #160 (instead of &nbsp; or &#160;) is fine because that is the - * UTF character for a non-break-space. When UTF stream is encoded as UTF-8, - * it is appropriately encoded. + * #160 (instead of &nbsp; [not supported in XML] or &#160;) + * is fine because that is the character for a non-break-space. + * When the stream is encoded as UTF-8, it is appropriately encoded. * - * This is reasonable for HTML, but not recommended for xhtml. + * This is reasonable for HTML, but not recommended for xhtml(?). * * @param s * @return s with some spaces encoded as (char)160 (nbsp) @@ -711,7 +711,7 @@ public static void prettyXml(String inFileName, String outFileName) { String2.log("prettyXml\n in=" + inFileName + "\nout=" + outFileName); if (inFileName.equals(outFileName)) throw new RuntimeException("Error: inFileName equals outFileName!"); - String in[] = String2.readFromFile(inFileName, "UTF-8"); + String in[] = String2.readFromFile(inFileName, String2.UTF_8); if (in[0].length() > 0) throw new RuntimeException("Error while reading " + inFileName + "\n" + in[0]); String xml = in[1]; @@ -728,7 +728,7 @@ public static void prettyXml(String inFileName, String outFileName) { - column + column diff --git a/WEB-INF/classes/dods/dap/DFloat64.java b/WEB-INF/classes/dods/dap/DFloat64.java index 785851bc9..554eb198b 100644 --- a/WEB-INF/classes/dods/dap/DFloat64.java +++ b/WEB-INF/classes/dods/dap/DFloat64.java @@ -105,6 +105,8 @@ public synchronized void deserialize(DataInputStream source, StatusUI statusUI) throws IOException, EOFException { val = source.readDouble(); + //System.out.println(">>DFloat64=" + val); + if(statusUI != null) statusUI.incrementByteCount(8); } diff --git a/WEB-INF/classes/dods/dap/DInt16.java b/WEB-INF/classes/dods/dap/DInt16.java index df2916d45..c87fdf402 100644 --- a/WEB-INF/classes/dods/dap/DInt16.java +++ b/WEB-INF/classes/dods/dap/DInt16.java @@ -127,6 +127,7 @@ public synchronized void deserialize(DataInputStream source, // Read this value in as a 32bit int, since the smallest thing // we write to the network is 32bits. Cast it to short (16bit int)! val = (short)source.readInt(); + //System.out.println(">>DInt16=" + val); if(statusUI != null) statusUI.incrementByteCount(4); } diff --git a/WEB-INF/classes/dods/dap/DString.java b/WEB-INF/classes/dods/dap/DString.java index 59260a669..af3a9cc5f 100644 --- a/WEB-INF/classes/dods/dap/DString.java +++ b/WEB-INF/classes/dods/dap/DString.java @@ -105,7 +105,7 @@ public synchronized void deserialize(DataInputStream source, throws IOException, EOFException, DataReadException { int len = source.readInt(); if (len < 0) - throw new DataReadException("Negative string length read."); + throw new DataReadException("Negative string length read (" + len + ")."); //bob added (len) int modFour = len%4; // number of bytes to pad int pad = (modFour != 0) ? (4-modFour) : 0; @@ -131,11 +131,12 @@ public synchronized void deserialize(DataInputStream source, // This was chosen because it converts each byte to its Unicode value // with no translation (the first 256 glyphs in Unicode are ISO8859_1) try { - val = new String(byteArray, 0, len, "ISO8859_1"); + val = new String(byteArray, 0, len, "ISO-8859-1"); //bob changed from ISO8859_1 + //System.out.println(">>DString string=\"" + com.cohort.util.String2.replaceAll(val, "\u0007", "") + "\""); } catch (UnsupportedEncodingException e) { // this should never happen - System.err.println("ISO8859_1 encoding not supported by this VM!"); + System.err.println("ISO-8859-1 encoding not supported by this VM!"); //bob changed from ISO8859_1 System.exit(1); } } @@ -155,7 +156,7 @@ public void externalize(DataOutputStream sink) throws IOException { // with no translation (the first 256 glyphs in Unicode are ISO8859_1) try { - byte byteArray[] = val.getBytes("ISO8859_1"); + byte byteArray[] = val.getBytes("ISO-8859-1"); //bob changed from ISO8859_1 sink.writeInt(byteArray.length); int modFour = byteArray.length%4; // number of bytes to pad @@ -167,7 +168,7 @@ public void externalize(DataOutputStream sink) throws IOException { } catch (UnsupportedEncodingException e) { // this should never happen - System.err.println("ISO8859_1 encoding not supported by this VM!"); + System.err.println("ISO-8859-1 encoding not supported by this VM!"); //bob changed from ISO8859_1 System.exit(1); } } diff --git a/WEB-INF/classes/dods/dap/parser/DASParser.java b/WEB-INF/classes/dods/dap/parser/DASParser.java index bc30d8d37..35d3bd236 100644 --- a/WEB-INF/classes/dods/dap/parser/DASParser.java +++ b/WEB-INF/classes/dods/dap/parser/DASParser.java @@ -1,6 +1,7 @@ /* Generated By:JavaCC: Do not edit this line. DASParser.java */ package dods.dap.parser; +import com.cohort.util.MustBe; //Bob added import java.util.Stack; import dods.dap.*; @@ -12,7 +13,7 @@ public class DASParser implements DASParserConstants { private int type; private static final String attrTupleMsg = - "Error: Expected an attribute type. Such as Byte, Int32, String, et c.\n" + "Error: Expected an attribute type. Such as Byte, Int32, String, etc.\n" + "followed by a name and value.\n"; private static final String noDASMsg = @@ -360,7 +361,8 @@ final public void Strs() throws ParseException, DASException { // If we get an exception thrown inside a quoted string then assume // that the scanner has found EOF before the token (STR) ended (i.e. // we have an unterminated double quote on our hands). 5/29/2002 jhrg - error("Unterminated quote: " + e.getMessage() + ")"); + error("Unterminated quote: " + MustBe.throwableToString(e) + ")"); + //2017-05-05 Bob changed. was: e.getMessage() + ")"); } } diff --git a/WEB-INF/classes/gov/noaa/pfel/coastwatch/Browser.java b/WEB-INF/classes/gov/noaa/pfel/coastwatch/Browser.java index ae3d12415..6b1591885 100644 --- a/WEB-INF/classes/gov/noaa/pfel/coastwatch/Browser.java +++ b/WEB-INF/classes/gov/noaa/pfel/coastwatch/Browser.java @@ -5394,8 +5394,8 @@ public static void test() throws Exception { String2.pressEnterToContinue("Is GoogleEarth showing a coverage? \n" + "Close it, then..."); } catch (Exception e) { - String2.pressEnterToContinue("Unexpected error:\n" + - MustBe.throwableToString(e)); + String2.pressEnterToContinue(MustBe.throwableToString(e) + + "\nUnexpected error."); } try { @@ -5411,8 +5411,8 @@ public static void test() throws Exception { String2.pressEnterToContinue("Is GoogleEarth showing stations? \n" + "Close it, then..."); } catch (Exception e) { - String2.pressEnterToContinue("Unexpected error:\n" + - MustBe.throwableToString(e)); + String2.pressEnterToContinue(MustBe.throwableToString(e) + + "\nUnexpected error."); } diff --git a/WEB-INF/classes/gov/noaa/pfel/coastwatch/HelloWorld.java b/WEB-INF/classes/gov/noaa/pfel/coastwatch/HelloWorld.java index 776c506fd..03907b9e9 100644 --- a/WEB-INF/classes/gov/noaa/pfel/coastwatch/HelloWorld.java +++ b/WEB-INF/classes/gov/noaa/pfel/coastwatch/HelloWorld.java @@ -8,8 +8,8 @@ public void doGet(HttpServletRequest req, HttpServletResponse res) throws ServletException, IOException { res.setContentType("text/html"); - res.setEncoding("UTF-8"); - OutputStreamWriter out = new OutputStreamWriter(res.getOutputStream(), "UTF-8"); + res.setEncoding(String2.UTF_8); + OutputStreamWriter out = new OutputStreamWriter(res.getOutputStream(), String2.UTF_8); out.write( "\n" + "Hello World\n" + diff --git a/WEB-INF/classes/gov/noaa/pfel/coastwatch/Projects.java b/WEB-INF/classes/gov/noaa/pfel/coastwatch/Projects.java index 7c3fcee16..ae1a1fc12 100644 --- a/WEB-INF/classes/gov/noaa/pfel/coastwatch/Projects.java +++ b/WEB-INF/classes/gov/noaa/pfel/coastwatch/Projects.java @@ -39,18 +39,13 @@ import org.apache.commons.codec.digest.DigestUtils; //in netcdf-all.jar //import org.codehaus.janino.ExpressionEvaluator; -import org.joda.time.*; -import org.joda.time.format.*; +import java.time.*; +import java.time.format.*; /** - * Get netcdf-X.X.XX.jar from - * http://www.unidata.ucar.edu/software/thredds/current/netcdf-java/index.html + * Get netcdfAll-......jar from ftp://ftp.unidata.ucar.edu/pub * and copy it to /WEB-INF/lib renamed as netcdf-latest.jar. - * Get slf4j-jdk14.jar from - * ftp://ftp.unidata.ucar.edu/pub/netcdf-java/slf4j-jdk14.jar - * and copy it to /WEB-INF/lib. - * 2013-02-21 new netcdfAll uses Java logging, not slf4j. - * Put both of these .jar files in the classpath for the compiler and for Java. + * Put it in the classpath for the compiler and for Java. */ import ucar.nc2.*; import ucar.nc2.dataset.NetcdfDataset; @@ -209,7 +204,7 @@ public static void channelIslands() throws Exception { //sort by ID site.leftToRightSort(1); - String2.log("site table=\n" + site.toString("row", siteNRows)); + String2.log("site table=\n" + site.toString(siteNRows)); } //read f:/programs/kushner/KFM_Temperature.txt which has all the temperature data @@ -242,7 +237,7 @@ public static void channelIslands() throws Exception { } //sort by island, site, time - String2.log("pre-sort n=" + temp.nRows() + " temp table=\n" + temp.toString("row", 5)); + String2.log("pre-sort n=" + temp.nRows() + " temp table=\n" + temp.toString(5)); temp.leftToRightSort(3); //go through rows of temp, saving separate station files @@ -365,7 +360,7 @@ public static void channelIslands() throws Exception { if (nStationsCreated == 1) { Table tTable = new Table(); tTable.read4DNc("c:/programs/kushner/" + tempID + ".nc", null, 0, stationColumnName, 4); - String2.log("\nstation0=\n" + tTable.toString("row", 3)); + String2.log("\nstation0=\n" + tTable.toString(3)); //from site ascii file //Anacapa Admiral's Reef 34 00 200 N 119 25 520 W 16 //data from original ascii file @@ -535,7 +530,7 @@ public static void kfmTemperature200801() throws Exception { } //sort by ID, Time - String2.log("pre-sort n=" + temp.nRows() + " temp table=\n" + temp.toString("row", 5)); + String2.log("pre-sort n=" + temp.nRows() + " temp table=\n" + temp.toString(5)); temp.sort(new int[]{1, 0}, new boolean[] {true, true}); //go through rows of temp, saving separate station files @@ -668,7 +663,7 @@ public static void kfmTemperature200801() throws Exception { if (nStationsCreated == 1) { Table tTable = new Table(); tTable.read4DNc("c:/programs/kfm200801/KFMTemperature/" + tempID + ".nc", null, 0, stationColumnName, 4); - String2.log("\nstation0=\n" + tTable.toString("row", 3)); + String2.log("\nstation0=\n" + tTable.toString(3)); //from site ascii file //Anacapa Admiral's Reef 34 00 200 N 119 25 520 W 16 //data from original ascii file @@ -1132,7 +1127,7 @@ else if (tsvName.equals("KFM_Transect")) summary = //review the table if (tsvI == 0 && (startRow == 0 || row == dataNRows)) { - String2.log(stationTable.toString("row", 100)); + String2.log(stationTable.toString(100)); String2.pressEnterToContinue("Check if the file (above) is ok, then..."); } String2.log(" startRow=" + startRow + " end=" + (row-1) + " island=" + island + " station=" + station); @@ -1496,7 +1491,7 @@ else if (tabName.equals("KFMBandTransect")) summary = //review the table if (tabI == 0 && (startRow == 0 || row == dataNRows)) { - String2.log(stationTable.toString("row", 100)); + String2.log(stationTable.toString(100)); String2.pressEnterToContinue("Check if the file (above) is ok, then..."); } String2.log(" startRow=" + startRow + " end=" + (row-1) + " island=" + island + " station=" + station); @@ -1809,7 +1804,7 @@ public static void kfmFishTransect200801() throws Exception { //review the table if (tabI == 0 && (startRow == 0 || row == dataNRows)) { - String2.log(stationTable.toString("row", 100)); + String2.log(stationTable.toString(100)); String2.pressEnterToContinue("Check if the file (above) is ok, then..."); } String2.log(" startRow=" + startRow + " end=" + (row-1) + " island=" + island + " station=" + station); @@ -2518,7 +2513,7 @@ public static void kushner() throws Exception { //write out the file table.saveAsTabbedASCII("c:/programs/kushner/NDBC_" + id[idi] + "_met.asc"); - String2.log(table.toString("row", 5)); + String2.log(table.toString(5)); } } @@ -2676,7 +2671,7 @@ public static void writeSsc(Writer writer, String ncUrl) throws Exception { "urlPath=\"WCOS/temp/" + year + "_" + name6 + "_" + depth + "m\">\n" + " ncdods\n" + " \n" + - " \n" + + " \n" + " \n" + " \n" + " \n" + @@ -2693,6 +2688,38 @@ public static void writeSsc(Writer writer, String ncUrl) throws Exception { lastNcName = nameExt; } + + /** + * This makes ncml files for soda (3.3.1). + * + * @param iceOcean "ice" or "ocean" + * @throws Exception if trouble + */ + public static void makeSoda331Ncml(String iceOcean, int startYear, int stopYear) + throws Exception { + + String dir = "/u00/soda3/soda3_" + iceOcean + "/ncml/"; + String preName = "soda3.3.1_mn_" + iceOcean + "_reg_"; + for (int year = startYear; year <= stopYear; year++) { + String name = preName + year; + String2.log("writing " + dir + name + ".ncml"); + Writer writer = new FileWriter(dir + name + ".ncml"); + StringBuilder values = new StringBuilder(); + for (int i = 1; i <= 12; i++) + values.append((Calendar2.isoStringToEpochSeconds(year + "-" + String2.zeroPad(""+i, 2) + "-16") / + Calendar2.SECONDS_PER_DAY) + " "); + writer.write( +"\n" + +" \n" + +" \n" + +" " + values+ "\n" + +" \n" + +"\n"); + writer.close(); + } + } + /** * This adds metadata and a time dimension to SODA data files. * See http://www.atmos.umd.edu/~ocean/ @@ -2794,6 +2821,7 @@ public static void soda(String sodaVersion, String oldDir, String newDir) throws String newName = cdfName.substring(0, cdfName.length() - 3) + "nc"; newFile = NetcdfFileWriter.createNew( NetcdfFileWriter.Version.netcdf3, newDir + newName); + boolean nc3Mode = true; Group rootGroup = newFile.addGroup(null, ""); //find old dimensions @@ -3039,7 +3067,7 @@ public static void soda(String sodaVersion, String oldDir, String newDir) throws //define newVar in new file newVars[v] = newFile.addVariable(rootGroup, varName, dataType, dimensions); - NcHelper.setAttributes(newVars[v], atts); + NcHelper.setAttributes(nc3Mode, newVars[v], atts); } //define GLOBAL metadata @@ -3123,7 +3151,7 @@ public static void soda(String sodaVersion, String oldDir, String newDir) throws //has title gatts.add("Westernmost_Easting", minLon); //set the globalAttributes - NcHelper.setAttributes(rootGroup, gatts); + NcHelper.setAttributes(nc3Mode, rootGroup, gatts); //leave define mode newFile.create(); @@ -3177,11 +3205,75 @@ public static void soda(String sodaVersion, String oldDir, String newDir) throws } } + /** Can netcdf-java write longs (64bit integers) into a nc3 file? + * 2017-02-08: +java.lang.IllegalArgumentException: illegal dataType: long not supported in netcdf-3 + at ucar.nc2.NetcdfFileWriter.addVariable(NetcdfFileWriter.java:538) + at ucar.nc2.NetcdfFileWriter.addVariable(NetcdfFileWriter.java:518) + at gov.noaa.pfel.coastwatch.Projects.testLongInNc3(Projects.java:3237) +5 + at gov.noaa.pfel.coastwatch.TestAll.main(TestAll.java:442) + */ + public static void testLongInNc3() throws Exception { + String2.log("\n*** testLongInNc3"); + + //get a list of files + NetcdfFileWriter newFile = null; + String dirFileName = "/data/ethan/testLongInNc3.nc"; + try { + newFile = NetcdfFileWriter.createNew( + NetcdfFileWriter.Version.netcdf3, dirFileName); + boolean nc3Mode = true; + Group rootGroup = newFile.addGroup(null, ""); + + //create the dimensions + Dimension dim = newFile.addDimension(rootGroup, "row", 5); + ArrayList dims = new ArrayList(); + dims.add(dim); + + ArrayList dimensions = new ArrayList(); + DataType dataType = DataType.LONG; + + Attributes atts = new Attributes(); + atts.set("units", "count"); + + Variable var = newFile.addVariable(rootGroup, "longs", dataType, dims); + NcHelper.setAttributes(nc3Mode, var, atts); + + //define GLOBAL metadata + Attributes gatts = new Attributes(); + gatts.set("title", "test of 64bit integers"); + NcHelper.setAttributes(nc3Mode, rootGroup, gatts); + + //leave define mode + newFile.create(); + + ArrayLong.D1 array = new ArrayLong.D1(5); + array.set(0, Long.MIN_VALUE); + array.set(0, -999); + array.set(0, 0); + array.set(0, 999); + array.set(0, Long.MAX_VALUE); + + newFile.write(var, array); + + newFile.close(); newFile = null; + + String2.log("newFile=" + NcHelper.dumpString(dirFileName, true)); + + String2.log("\n*** testLongInNc3 finished successfully."); + + } catch (Exception e) { + try {newFile.close();} catch (Exception e2) {} + String2.log(MustBe.throwableToString(e)); + } + } + + /** * This is a test of reading a coastwatch .hdf file (as they distribute) * with the new netcdf-java 4.0 library. * With the hope of making a thredds iosp for these files (see - * http://www.unidata.ucar.edu/software/netcdf-java/tutorial/IOSPoverview.html ). + * https://www.unidata.ucar.edu/software/netcdf-java/tutorial/IOSPoverview.html ). * * @throws Exception if trouble */ @@ -3510,7 +3602,7 @@ public static void convertNewportCTD() throws Exception { String.class, String.class, float.class, float.class, String.class}; //find timezone America/Los_Angeles - //String2.log(String2.toCSSVString(DateTimeZone.getAvailableIDs().toArray())); + //String2.log(String2.toCSSVString(ZoneId.getAvailableZoneIDs().toArray())); //Test.ensureTrue(false, ""); //recursively delete any files in destDir @@ -3519,8 +3611,10 @@ public static void convertNewportCTD() throws Exception { //read the data source file String2.log("\nreading the data source file"); Table dataTable = new Table(); - dataTable.readASCII(sourceDir + sourceCsv, String2.readLinesFromFile(sourceDir + sourceCsv, null, 3), - -1, 0, null, null, null, null, false); //don't simplify + dataTable.readASCII(sourceDir + sourceCsv, + String2.readLinesFromFile(sourceDir + sourceCsv, null, 3), + -1, 0, "", + null, null, null, null, false); //don't simplify Test.ensureEqual(dataTable.nColumns(), dataColNames.length, "dataTable.nColumns() != dataColNames.length"); String2.log(""); @@ -3595,8 +3689,7 @@ public static void convertNewportCTD() throws Exception { StringArray localTimePa = (StringArray)dataTable.findColumn("local_time"); DoubleArray timePa = new DoubleArray(); DateTimeFormatter dateTimeFormatter = - DateTimeFormat.forPattern("MM/dd/yyyy hh:mm:ss aa").withZone( - DateTimeZone.forID("America/Los_Angeles")); + Calendar2.makeDateTimeFormatter("MM/dd/yyyy hh:mm:ss aa", "America/Los_Angeles"); for (int row = 0; row < datePa.size(); row++) { String tDate = datePa.get(row); if (tDate.length() == 0) { @@ -3612,7 +3705,7 @@ public static void convertNewportCTD() throws Exception { tDate = tDate.substring(0, 10) + tLocal.substring(10); } //Newport, OR is same time zone as Pacific Grove. so just use default local time zone. - double sec = Math2.roundToDouble(dateTimeFormatter.parseMillis(tDate) / 1000.0); //timeInMillis is zulu time + double sec = Calendar2.toEpochSeconds(tDate, dateTimeFormatter); if (row == 0 || row == 6053) String2.log("time example: row=" + row + " \"" + tDate + "\" was converted to " + Calendar2.safeEpochSecondsToIsoStringTZ(sec, "")); @@ -3627,7 +3720,7 @@ public static void convertNewportCTD() throws Exception { //read the latLon file String2.log("\nreading the latLon source file"); Table latLonTable = new Table(); - latLonTable.readASCII(sourceDir + sourceLatLon, -1, 0, null, null, null, null); + latLonTable.readASCII(sourceDir + sourceLatLon, -1, 0, "", null, null, null, null); Test.ensureEqual(latLonTable.nColumns(), latLonColNames.length, "latLonTable.nColumns() != latLonColNames.length"); for (int col = 0; col < latLonColNames.length; col++) { //set the column name @@ -3722,7 +3815,7 @@ public static void convertNewportCTD() throws Exception { table.saveAsFlatNc(fullName, "row", false); if (startRow < 100 || row == nRows - 1) - String2.log(table.toCSVString()); + String2.log(table.toString()); //if (startRow > 100) Test.ensureTrue(false, "Evaluate the tables."); lastStationCode = oldStationCodePa.get(row); @@ -3777,7 +3870,7 @@ region_caught Int8 (4) */ String2.log("\nreading the data source file"); Table dataTable = new Table(); dataTable.readASCII(sourceDir + sourceCsv, String2.readLinesFromFile(sourceDir + sourceCsv, null, 3), - -1, 0, null, null, null, null, false); //don't simplify + -1, 0, "", null, null, null, null, false); //don't simplify Test.ensureEqual(dataTable.nColumns(), dataColNames.length, "dataTable.nColumns() != dataColNames.length"); String2.log(""); @@ -3878,7 +3971,7 @@ comments Char (240) */ String2.log("\nreading the marCat source file"); Table marCatTable = new Table(); - marCatTable.readASCII(sourceDir + sourceMarCat, -1, 0, null, null, null, null); + marCatTable.readASCII(sourceDir + sourceMarCat, -1, 0, "", null, null, null, null); Test.ensureEqual(marCatTable.nColumns(), marCatColNames.length, "marCatTable.nColumns() != marCatColNames.length"); for (int col = 0; col < marCatColNames.length; col++) { //set the column name @@ -3952,7 +4045,7 @@ comments Char (240) */ table.saveAsFlatNc(fullName, "row", false); if (startRow < 100 || row == nRows - 1) - String2.log(table.toCSVString()); + String2.log(table.toString()); //if (startRow > 100) Test.ensureTrue(false, "Evaluate the tables."); lastRegion = tRegion; @@ -4997,7 +5090,7 @@ public static void getChristinaShowsData() throws Throwable { sa.intraReplaceAll("-blanket", "- blanket"); sa.intraReplaceAll("Other, includes gill netting, beach seining, round", "Other"); - String2.log("\n*** activity09=\n" + activity09.toCSVString()); + String2.log("\n*** activity09=\n" + activity09.toString()); //areaCode09 Table areaCode09 = new Table(); @@ -5011,7 +5104,7 @@ public static void getChristinaShowsData() throws Throwable { sa = (StringArray)areaCode09.getColumn(1); sa.intraReplaceAllIgnoreCase("\"Punta Baha - 28 \"\" 30.0' N.\"", "Punta Baha - 28.5N"); sa.intraReplaceAllIgnoreCase("\"28\"\" 30.0' - punta Eugenia\"", "Punta Eugenia - 28.5N"); - String2.log("\n*** areaCode09=\n" + areaCode09.toCSVString()); + String2.log("\n*** areaCode09=\n" + areaCode09.toString()); //idMethod09 Table idMethod09 = new Table(); @@ -5028,7 +5121,7 @@ public static void getChristinaShowsData() throws Throwable { "Echo characteristic"); sa.intraReplaceAll("detetions", "detections"); //ok??? sa.intraReplaceAll(".", ""); - String2.log("\n*** idMethod09=\n" + idMethod09.toCSVString()); + String2.log("\n*** idMethod09=\n" + idMethod09.toString()); //lengthUnit09 Table lengthUnit09 = new Table(); @@ -5048,12 +5141,12 @@ public static void getChristinaShowsData() throws Throwable { sa.intraReplaceAllIgnoreCase("millimeters", "mm"); sa.intraReplaceAll("length,squid measurement", "length (squid)"); sa.intraReplaceAll("length.", "length"); - String2.log("\n*** lengthUnit09=\n" + lengthUnit09.toCSVString()); + String2.log("\n*** lengthUnit09=\n" + lengthUnit09.toString()); //methodCapt09 Table methodCapt09 = new Table(); methodCapt09.readASCII(tsvDir + "methodCapt09.tsv"); - String2.log("\n*** original methodCapt09=\n" + methodCapt09.dataToCSVString()); + String2.log("\n*** original methodCapt09=\n" + methodCapt09.dataToString()); methodCapt09.setColumnName(0, "MethodLocatingCode"); //will be removed methodCapt09.removeColumn(2); //Description moved to metadata methodCapt09.setColumn(1, StringArray.fromCSV( @@ -5091,7 +5184,7 @@ public static void getChristinaShowsData() throws Throwable { "* 30H 30 foot-square opening trawl which was spread in same manner as above but with smaller hydrofoil doors.\n" + "* COT A seldom-used early model cotton twine trawl. Mouth opening was approximately 35 foot-square.\n" + "* MWT Midwater trawl (no designation)\n"); //18 - String2.log("\n*** methodCapt09=\n" + methodCapt09.toCSVString()); + String2.log("\n*** methodCapt09=\n" + methodCapt09.toString()); //MethodLocating09 (missing value is code="0") //Code AlphaCode Description @@ -5107,7 +5200,7 @@ public static void getChristinaShowsData() throws Throwable { .add("long_name", "Method Locating") .add("ioos_category", "Identifier"); methodLocating09.setStringData(1, 1, "No attempt"); - String2.log("\n*** methodLocating09=\n" + methodLocating09.toCSVString()); + String2.log("\n*** methodLocating09=\n" + methodLocating09.toString()); //speCode09 Table speCode09 = new Table(); @@ -5135,7 +5228,7 @@ public static void getChristinaShowsData() throws Throwable { sa2.switchFromTo("Unknow", "Unknown"); } - String2.log("\n*** speCode09=\n" + speCode09.toCSVString(20) + "..."); + String2.log("\n*** speCode09=\n" + speCode09.toString(20)); //stType09 Table stType09 = new Table(); @@ -5153,7 +5246,7 @@ public static void getChristinaShowsData() throws Throwable { sa.intraReplaceAll(",round", ", round"); sa.intraReplaceAll("-blanket", "- blanket"); sa.intraReplaceAll("Other, includes gill netting, beach seining, round", "Other"); - String2.log("\n*** stType09=\n" + stType09.toCSVString()); + String2.log("\n*** stType09=\n" + stType09.toString()); //vesselName (I put the info in the comments for CuiseNo) Table vesselName = new Table(); @@ -5162,7 +5255,7 @@ public static void getChristinaShowsData() throws Throwable { vesselName.columnAttributes(1) .add("long_name", "Vessel Name") .add("ioos_category", "Identifier"); - String2.log("\n*** vesselName09=\n" + vesselName.toCSVString()); + String2.log("\n*** vesselName09=\n" + vesselName.toString()); //*** stationData09 @@ -5307,7 +5400,7 @@ public static void getChristinaShowsData() throws Throwable { stationData09.columnAttributes(stationData09.findColumnNumber("")) .add("description", ""); - String2.log("\n*** stationData09=\n" + stationData09.toCSVString(20) + "..."); + String2.log("\n*** stationData09=\n" + stationData09.toString(20)); for (int col = 0; col < stationData09.nColumns(); col++) { if (!(stationData09.getColumn(col) instanceof StringArray)) { String2.log(String2.left(stationData09.getColumnName(col),14) + " " + @@ -5355,7 +5448,7 @@ public static void getChristinaShowsData() throws Throwable { .add("ioos_category", "Biology"); afData09.columnAttributes(afData09.findColumnNumber("YearClass")) .add(yearClassAtts); - String2.log("\n*** afData09=\n" + afData09.toCSVString(20) + "..."); + String2.log("\n*** afData09=\n" + afData09.toString(20)); //AnchSchoolData Table anchSchoolData = new Table(); @@ -5397,7 +5490,7 @@ public static void getChristinaShowsData() throws Throwable { anchSchoolData.columnAttributes(afData09.findColumnNumber("Frequencies")) .add("description", "Number of anchovy schools seen from the sonargram at a station.") .add("units", "count"); - String2.log("\n*** anchSchoolData=\n" + anchSchoolData.toCSVString(20) + "..."); + String2.log("\n*** anchSchoolData=\n" + anchSchoolData.toString(20)); //BTData09 Table btData09 = new Table(); @@ -5423,7 +5516,7 @@ public static void getChristinaShowsData() throws Throwable { po = btData09.findColumnNumber("CruiseNo"); btData09.join(3, po, "", stationData09); //keep the keyColumns - String2.log("\n*** btData09=\n" + btData09.toCSVString(20) + "..."); + String2.log("\n*** btData09=\n" + btData09.toString(20)); //LFData09 Table lfData09 = new Table(); @@ -5463,7 +5556,7 @@ public static void getChristinaShowsData() throws Throwable { po = lfData09.findColumnNumber("LengthUnit"); lfData09.join(1, po, "0", lengthUnit09); lfData09.removeColumn(po); - String2.log("\n*** lfData09=\n" + lfData09.toCSVString(20) + "..."); + String2.log("\n*** lfData09=\n" + lfData09.toString(20)); //NSData09 @@ -5509,7 +5602,7 @@ public static void getChristinaShowsData() throws Throwable { po = nsData09.findColumnNumber("AreaCode"); nsData09.join(1, po, "0", areaCode09); nsData09.removeColumn(po); - String2.log("\n*** nsData09=\n" + nsData09.toCSVString(20) + "..."); + String2.log("\n*** nsData09=\n" + nsData09.toString(20)); //speciesData09 Table speciesData09 = new Table(); @@ -5556,7 +5649,7 @@ public static void getChristinaShowsData() throws Throwable { po = speciesData09.findColumnNumber("LenUnit"); speciesData09.join(1, po, "0", lengthUnit09); speciesData09.removeColumn(po); - String2.log("\n*** speciesData09=\n" + speciesData09.toCSVString(20) + "..."); + String2.log("\n*** speciesData09=\n" + speciesData09.toString(20)); //speciesTally09 Table speciesTally09 = new Table(); @@ -5592,7 +5685,7 @@ public static void getChristinaShowsData() throws Throwable { speciesTally09.join(1, po, "0", speCode09); speciesTally09.setColumnName(po, "SpeciesCode"); //don't remove SpeCode speciesTally09.columnAttributes(po).add(speciesCodeAtts); - String2.log("\n*** speciesTally09=\n" + speciesTally09.toCSVString(20) + "..."); + String2.log("\n*** speciesTally09=\n" + speciesTally09.toString(20)); //done @@ -5619,7 +5712,7 @@ public static void calcofiBio() throws Exception { Table table = new Table(); table.readNDNc(oldDir + "calcofiBio_19840211_66.7_65_NH16.nc", null, null, Double.NaN, Double.NaN, true); - String2.log(table.toCSVString()); + String2.log(table.toString()); System.exit(0); } @@ -5801,7 +5894,7 @@ public static void calcofiBio() throws Exception { String2.modifyToBeFileNameSafe(obsCommonPA.get(row)) + ".nc", "row", false); //convertToFakeMissingValues if (startRow == 0) - String2.log(tTable.toCSVString()); + String2.log(tTable.toString()); startRow = row + 1; } } @@ -5847,7 +5940,7 @@ public static void calcofiSub() throws Exception { Table table = new Table(); table.readNDNc(oldDir + "subsurface_19490228_92_39.nc", null, null, Double.NaN, Double.NaN, true); - String2.log(table.toCSVString()); + String2.log(table.toString()); System.exit(0); } @@ -5864,7 +5957,7 @@ public static void calcofiSub() throws Exception { Table outTable = new Table(); outTable.readNDNc(oldDir + fileName[0], null, null, Double.NaN, Double.NaN, true); - String2.log(outTable.toCSVString()); + String2.log(outTable.toString()); String outColNames[] = outTable.getColumnNames(); String today = Calendar2.getCurrentISODateStringLocal(); outTable.globalAttributes().set("history", @@ -5928,7 +6021,7 @@ public static void calcofiSub() throws Exception { String2.modifyToBeFileNameSafe(lineStationPA.get(row)) + ".nc", "row", false); //convertToFakeMissingValues if (startRow == 0) - String2.log(tTable.toCSVString()); + String2.log(tTable.toString()); startRow = row + 1; } } @@ -5959,7 +6052,7 @@ public static void calcofiSur() throws Exception { Table table = new Table(); table.readNDNc(oldDir + "surface_19490228_92_39.nc", null, null, Double.NaN, Double.NaN, true); - String2.log(table.toCSVString()); + String2.log(table.toString()); //System.exit(0); } @@ -5976,7 +6069,7 @@ public static void calcofiSur() throws Exception { Table outTable = new Table(); outTable.readNDNc(oldDir + fileName[0], null, null, Double.NaN, Double.NaN, true); - String2.log(outTable.toCSVString()); + String2.log(outTable.toString()); String outColNames[] = outTable.getColumnNames(); String today = Calendar2.getCurrentISODateStringLocal(); outTable.globalAttributes().set("history", @@ -6040,7 +6133,7 @@ public static void calcofiSur() throws Exception { String2.modifyToBeFileNameSafe(lineStationPA.get(row)) + ".nc", "row", false); //convertToFakeMissingValues if (startRow == 0) - String2.log(tTable.toCSVString()); + String2.log(tTable.toString()); startRow = row + 1; } } @@ -6259,7 +6352,8 @@ public static void convertCchdoBottle() throws Exception { lines = tLines; //read ASCII info into a table - table.readASCII(inDir + fileNames[f], lines, colNamesLine, colNamesLine + 2, + table.readASCII(inDir + fileNames[f], lines, + colNamesLine, colNamesLine + 2, "", null, null, null, null, false); //false=simplify int nRows = table.nRows(); @@ -6794,7 +6888,7 @@ public static void convertCchdoBottle() throws Exception { public static void makeNetcheckErddapTests(String erddapUrl) throws Throwable { Table table = new Table(); erddapUrl += "info/index.json?page=1&itemsPerPage=100000000"; //all possible datasets - String json = SSR.getUncompressedUrlResponseString(erddapUrl); + String json = SSR.getUncompressedUrlResponseString(erddapUrl, String2.UTF_8); table.readJson(erddapUrl, json); int nRows = table.nRows(); PrimitiveArray pa = table.getColumn("Dataset ID"); @@ -6830,7 +6924,7 @@ public static void makeNetcheckErddapTests(String erddapUrl) throws Throwable { */ public static void fixKeywords(String fileName) throws Exception { String2.log("fixKeywords " + fileName); - String charset = "ISO-8859-1"; + String charset = String2.ISO_8859_1; String attKeywords = ""; int attKeywordsLength = attKeywords.length(); StringArray lines = StringArray.fromFile(fileName, charset); @@ -6969,6 +7063,7 @@ else throw new RuntimeException(beginError + //*Then* make ncOut. If this fails, no clean up needed. NetcdfFileWriter ncOut = NetcdfFileWriter.createNew( NetcdfFileWriter.Version.netcdf3, fullFileName + randomInt); + boolean nc3Mode = true; try { Group rootGroup = ncOut.addGroup(null, ""); ncOut.setFill(false); @@ -7016,21 +7111,21 @@ else throw new RuntimeException(beginError + //write global attributes in ncOut Attributes tAtts = new Attributes(); OpendapHelper.getAttributes(das, "GLOBAL", tAtts); - NcHelper.setAttributes(rootGroup, tAtts); + NcHelper.setAttributes(nc3Mode, rootGroup, tAtts); //write dimension attributes in ncOut for (int dim = 0; dim < nDims; dim++) { String dimName = dims[dim].getName(); tAtts.clear(); OpendapHelper.getAttributes(das, dimName, tAtts); - NcHelper.setAttributes(newDimVars[v], tAtts); + NcHelper.setAttributes(nc3Mode, newDimVars[v], tAtts); } //write data attributes in ncOut for (int v = 0; v < nVars; v++) { tAtts.clear(); OpendapHelper.getAttributes(das, vars[v], tAtts); - NcHelper.setAttributes(newVars[v], tAtts); + NcHelper.setAttributes(nc3Mode, newVars[v], tAtts); } //leave "define" mode in ncOut @@ -7666,7 +7761,7 @@ public static Table processOneCalcofi2012(String dir, String tableName, String2.log(lines.substring(0, Math.min(lines.length(), 1500))); Table table = new Table(); table.readJson(inFile, lines); - String2.log("Before adjustments:\n" + String2.annotatedString(table.dataToCSVString(5))); + String2.log("Before adjustments:\n" + String2.annotatedString(table.dataToString(5))); int nRows = table.nRows(); int nErrors = 0; @@ -7866,7 +7961,7 @@ public static Table processOneCalcofi2012(String dir, String tableName, } //save as .nc - String2.log("After adjustments:\n" + String2.annotatedString(table.dataToCSVString(5))); + String2.log("After adjustments:\n" + String2.annotatedString(table.dataToString(5))); table.saveAsFlatNc(dir + tableName + ".nc", rowName, false); return table; } @@ -7886,35 +7981,35 @@ public static void tallyUafAggregations(String datasetsXmlFileName) throws Excep String fromTo[] = { //alternate ferret url and sourceUrl "http://ferret.pmel.noaa.gov/geoide/dodsC/Datasets/20thC_ReanV2/", -"http://www.esrl.noaa.gov/psd/thredds/dodsC/Datasets/20thC_ReanV2/", +"https://www.esrl.noaa.gov/psd/thredds/dodsC/Datasets/20thC_ReanV2/", "http://ferret.pmel.noaa.gov/geoide/dodsC/Datasets/NARR", //several variants -"http://www.esrl.noaa.gov/psd/thredds/dodsC/Datasets/NARR", +"https://www.esrl.noaa.gov/psd/thredds/dodsC/Datasets/NARR", "http://ferret.pmel.noaa.gov/geoide/dodsC/Datasets/cpc_us_hour_precip/", -"http://www.esrl.noaa.gov/psd/thredds/dodsC/Datasets/cpc_us_hour_precip/", +"https://www.esrl.noaa.gov/psd/thredds/dodsC/Datasets/cpc_us_hour_precip/", "http://ferret.pmel.noaa.gov/geoide/dodsC/Datasets/cpc_us_precip/", -"http://www.esrl.noaa.gov/psd/thredds/dodsC/Datasets/cpc_us_precip/", +"https://www.esrl.noaa.gov/psd/thredds/dodsC/Datasets/cpc_us_precip/", "http://ferret.pmel.noaa.gov/geoide/dodsC/Datasets/cru/", -"http://www.esrl.noaa.gov/psd/thredds/dodsC/Datasets/cru/", +"https://www.esrl.noaa.gov/psd/thredds/dodsC/Datasets/cru/", "http://ferret.pmel.noaa.gov/geoide/dodsC/Datasets/godas/", -"http://www.esrl.noaa.gov/psd/thredds/dodsC/Datasets/godas/", +"https://www.esrl.noaa.gov/psd/thredds/dodsC/Datasets/godas/", "http://ferret.pmel.noaa.gov/geoide/dodsC/Datasets/gpcc/", -"http://www.esrl.noaa.gov/psd/thredds/dodsC/Datasets/gpcc/", +"https://www.esrl.noaa.gov/psd/thredds/dodsC/Datasets/gpcc/", "http://ferret.pmel.noaa.gov/geoide/dodsC/Datasets/interp_OLR/", -"http://www.esrl.noaa.gov/psd/thredds/dodsC/Datasets/interp_OLR/", +"https://www.esrl.noaa.gov/psd/thredds/dodsC/Datasets/interp_OLR/", "http://ferret.pmel.noaa.gov/geoide/dodsC/Datasets/msu/", -"http://www.esrl.noaa.gov/psd/thredds/dodsC/Datasets/msu/", +"https://www.esrl.noaa.gov/psd/thredds/dodsC/Datasets/msu/", "http://ferret.pmel.noaa.gov/geoide/dodsC/Datasets/ncep.reanalysis.derived/", -"http://www.esrl.noaa.gov/psd/thredds/dodsC/Datasets/ncep.reanalysis.derived/", +"https://www.esrl.noaa.gov/psd/thredds/dodsC/Datasets/ncep.reanalysis.derived/", "http://ferret.pmel.noaa.gov/geoide/dodsC/Datasets/ncep.reanalysis2.dailyavgs/", -"http://www.esrl.noaa.gov/psd/thredds/dodsC/Datasets/ncep.reanalysis2.dailyavgs/", +"https://www.esrl.noaa.gov/psd/thredds/dodsC/Datasets/ncep.reanalysis2.dailyavgs/", "http://ferret.pmel.noaa.gov/geoide/dodsC/Datasets/ncep.reanalysis2/", -"http://www.esrl.noaa.gov/psd/thredds/dodsC/Datasets/ncep.reanalysis2/", +"https://www.esrl.noaa.gov/psd/thredds/dodsC/Datasets/ncep.reanalysis2/", "http://ferret.pmel.noaa.gov/geoide/dodsC/Datasets/noaa.ersst/", -"http://www.esrl.noaa.gov/psd/thredds/dodsC/Datasets/noaa.ersst/", +"https://www.esrl.noaa.gov/psd/thredds/dodsC/Datasets/noaa.ersst/", "http://ferret.pmel.noaa.gov/geoide/dodsC/Datasets/snowcover/", -"http://www.esrl.noaa.gov/psd/thredds/dodsC/Datasets/snowcover/", +"https://www.esrl.noaa.gov/psd/thredds/dodsC/Datasets/snowcover/", "http://ferret.pmel.noaa.gov/geoide/dodsC/Datasets/udel.airt.precip/", -"http://www.esrl.noaa.gov/psd/thredds/dodsC/Datasets/udel.airt.precip/" +"https://www.esrl.noaa.gov/psd/thredds/dodsC/Datasets/udel.airt.precip/" }; //extract unique sourceUrls @@ -7992,6 +8087,7 @@ public static void viirsLatLon(boolean create) throws Exception { if (create) { NetcdfFileWriter nc = NetcdfFileWriter.createNew( NetcdfFileWriter.Version.netcdf3, dir + fileName); + boolean nc3Mode = true; try { Group rootGroup = nc.addGroup(null, ""); nc.setFill(false); @@ -8003,17 +8099,17 @@ public static void viirsLatLon(boolean create) throws Exception { Dimension latDim = nc.addDimension(rootGroup, latName, nLat); Variable latVar = nc.addVariable(rootGroup, latName, NcHelper.getDataType(double.class), Arrays.asList(latDim)); - NcHelper.setAttributes(latVar, atts); + NcHelper.setAttributes(nc3Mode, latVar, atts); //lon atts.add("units", "degrees_east"); Dimension lonDim = nc.addDimension(rootGroup, lonName, nLon); Variable lonVar = nc.addVariable(rootGroup, lonName, NcHelper.getDataType(double.class), Arrays.asList(lonDim)); - NcHelper.setAttributes(lonVar, atts); + NcHelper.setAttributes(nc3Mode, lonVar, atts); //write global attributes - //NcHelper.setAttributes(nc, "NC_GLOBAL", ada.globalAttributes()); + //NcHelper.setAttributes(nc3Mode, nc, "NC_GLOBAL", ada.globalAttributes()); //leave "define" mode nc.create(); @@ -8141,57 +8237,88 @@ public static String makeNcmlCoordValues(String location, String startIso, Strin return s; } - /** Convert FED Rockfish CTD .csv data files to .nc (from Lynn 2013-03-28 - * from /Volumes/PFEL_Shared_Space/PFEL_Share/Lynn2Bob/Rockfish_CTD.tar.gz). + /** + * Convert FED Rockfish CTD to .nc. + * I do the .xls to tsv by hand in Excel -- use time format yyyy-mm-dd h:mm (24 hour h, no am/pm). + * Then use macro in EditPlus to fix the hour data (regex search "-[0-9]{2} [0-9]:") + * and insert 0 so it is Thh (2 digits). + * Then copy rockfish_casts_yyyy.nc and rockfish_header_yyyy.nc to rockfish20130409 + * so they are part of erdFed + + * From Lynn 2013-03-28 from /Volumes/PFEL_Shared_Space/PFEL_Share/Lynn2Bob/Rockfish_CTD.tar.gz). + * From Lynn 2017-02-03 from xserve /home/ldewitt/FED_rockfish_CTD + * + * @param lastYear year of processing e.g., 2013, 2015 */ - public static void convertRockfish20130328() throws Throwable { - String2.log("\nProjects.convertRockfish20130328()"); - String dir = "C:/u00/data/points/rockfish20130328/"; - String outerName = "ERD_CTD_HEADER_2008_to_2011"; //.csv -> .nc - String innerName = "ERD_CTD_CAST_2008_to_2011"; //.csv -> .nc + public static void convertRockfish(int lastYear) throws Throwable { + String2.log("\nProjects.convertRockfish()"); + String dir = null, outerName = null, innerName = null, fileExtension = null; + if (lastYear == 2011) { + dir = "/u00/data/points/rockfish20130328/"; + outerName = "ERD_CTD_HEADER_2008_to_2011"; //.csv -> .nc + innerName = "ERD_CTD_CAST_2008_to_2011"; //.csv -> .nc + fileExtension = ".csv"; + + } else if (lastYear == 2015) { + dir = "/data/rockfish/"; + outerName = "NOAA NMFS SWFSC Santa Cruz CTD_HEADER 2012-2015bob"; //tsv -> .nc + innerName = "NOAA NMFS SWFSC Santa Cruz CTD_CAST 2012-2015"; //tsv -> .nc + fileExtension = ".txt"; - //read the outer .csv files + } else { + throw new RuntimeException("unsupported year"); + } + + //read the outer files Table outer = new Table(); - outer.readASCII(dir + outerName + ".csv", 0, 1, null, null, null, null, false); //simplify + outer.readASCII(dir + outerName + fileExtension, 0, 1, "\t", + null, null, null, null, false); //simplify Test.ensureEqual(outer.getColumnNamesCSVString(), "CRUISE,CTD_INDEX,CTD_NO,STATION,CTD_DATE,CTD_LAT,CTD_LONG,CTD_BOTTOM_DEPTH,BUCKET_TEMP,BUCKET_SAL,TS_TEMP,TS_SAL", "Unexpected outer column names"); - String2.log("outer (5 rows) as read:\n" + outer.dataToCSVString(5)); + for (int coli = 0; coli < outer.nColumns(); coli++) + outer.setColumnName(coli, outer.getColumnName(coli).toLowerCase()); + String2.log("outer (5 rows) as read:\n" + outer.dataToString(5)); //convert to short - String colNames[] = {"CTD_INDEX","CTD_NO","STATION","CTD_BOTTOM_DEPTH"}; + String colNames[] = {"ctd_index","ctd_no","station","ctd_bottom_depth"}; for (int coli = 0; coli < colNames.length; coli++) { int col = outer.findColumnNumber(colNames[coli]); outer.setColumn(col, new ShortArray(outer.getColumn(col))); + if (colNames[coli].equals("ctd_bottom_depth")) + outer.setColumnName(col, "bottom_depth"); } //convert to floats - colNames = new String[]{"BUCKET_TEMP","BUCKET_SAL","TS_TEMP","TS_SAL"}; + colNames = new String[]{"bucket_temp","bucket_sal","ts_temp","ts_sal"}; + String newColNames[] = new String[]{"bucket_temperature","bucket_salinity", + "ts_temperature","ts_salinity"}; for (int coli = 0; coli < colNames.length; coli++) { int col = outer.findColumnNumber(colNames[coli]); outer.setColumn(col, new FloatArray(outer.getColumn(col))); + outer.setColumnName(col, newColNames[coli]); } - //convert date time "5/5/2008 9:10" to time - DateTimeFormatter dtf = DateTimeFormat.forPattern("M/d/yyyy H:mm").withZone( - DateTimeZone.forID("America/Los_Angeles")); + //convert date time e.g., "2008-05-05 12:10" to epoch seconds + DateTimeFormatter dtf = Calendar2.makeDateTimeFormatter("yyyy-MM-dd HH:mm", "America/Los_Angeles"); //GMT: erddap/convert/time.html says "5/5/2008 19:10" = 1.2100146E9 // if 7 hours different in summer... - Test.ensureEqual(dtf.parseMillis("5/5/2008 12:10") / 1000.0, 1.2100146E9, //erddap/convert/time.html + Test.ensureEqual(Calendar2.toEpochSeconds("2008-05-05 12:10", dtf), 1.2100146E9, //erddap/convert/time.html "trouble with DateTimeFormatter"); int nOuter = outer.nRows(); { - int col = outer.findColumnNumber("CTD_DATE"); + int col = outer.findColumnNumber("ctd_date"); PrimitiveArray oldTimePA = outer.getColumn(col); DoubleArray newTimePA = new DoubleArray(); for (int row = 0; row < nOuter; row++) - newTimePA.add(dtf.parseMillis(oldTimePA.getString(row)) / 1000.0); + newTimePA.add(Calendar2.toEpochSeconds(oldTimePA.getString(row), dtf)); + outer.setColumnName(col, "time"); outer.setColumn(col, newTimePA); outer.columnAttributes(col).set("units", "seconds since 1970-01-01T00:00:00Z"); } //convert lat and lon from dddmm.mmmm to decimal degrees - colNames = new String[]{"CTD_LAT","CTD_LONG"}; + colNames = new String[]{"ctd_lat","ctd_long"}; for (int coli = 0; coli < colNames.length; coli++) { int col = outer.findColumnNumber(colNames[coli]); PrimitiveArray pa = outer.getColumn(col); @@ -8203,47 +8330,54 @@ public static void convertRockfish20130328() throws Throwable { if (d < 0) throw new SimpleException("d<0 requires more testing"); fa.add(scale * Math2.doubleToFloatNaN(Math.floor(d / 100.0) + (d % 100.0) / 60.0)); } + outer.setColumnName(col, coli == 0? "latitude": "longitude"); outer.setColumn(col, fa); } //save the outer as .nc - String2.log("outer (5 rows) before save:\n" + outer.toCSVString(5)); - outer.saveAsFlatNc(dir + outerName + ".nc", "row", false); //convertToFakeMissingValues + String2.log("outer (5 rows) before save:\n" + outer.toString(5)); + outer.saveAsFlatNc(dir + "rockfish_header_" + lastYear + ".nc", "row", false); //convertToFakeMissingValues //just keep the outer columns needed for inner table - StringArray desired = StringArray.fromCSV("CRUISE,CTD_INDEX,CTD_NO,STATION,CTD_DATE,CTD_LAT,CTD_LONG"); + StringArray desired = StringArray.fromCSV("cruise,ctd_index,ctd_no,station,time,latitude,longitude"); Test.ensureEqual(outer.reorderColumns(desired, true), desired.size(), "Not all desired columns were found."); //read inner table Table inner = new Table(); - inner.readASCII(dir + innerName + ".csv", 0, 1, null, null, null, null, false); //simplify + inner.readASCII(dir + innerName + fileExtension, 0, 1, "\t", + null, null, null, null, false); //simplify + for (int coli = 0; coli < inner.nColumns(); coli++) + inner.setColumnName(coli, inner.getColumnName(coli).toLowerCase()); Test.ensureEqual(inner.getColumnNamesCSVString(), - "CRUISE,CTD_INDEX,CTD_DEPTH,TEMPERATURE,SALINITY,DENSITY,DYN_HGT,IRRAD,FLUOR_VOLT,TRANSMISSIVITY,CHLOROPHYLL,OXYGEN_VOLT,OXYGEN", + "cruise,ctd_index,ctd_depth,temperature,salinity,density,dyn_hgt,irrad,fluor_volt,transmissivity,chlorophyll,oxygen_volt,oxygen", "Unexpected inner column names"); //convert to short - colNames = new String[]{"CTD_INDEX","CTD_DEPTH"}; + colNames = new String[]{"ctd_index","ctd_depth"}; for (int coli = 0; coli < colNames.length; coli++) { int col = inner.findColumnNumber(colNames[coli]); + inner.setColumnName(col, coli == 0? "ctd_index": "depth"); inner.setColumn(col, new ShortArray(inner.getColumn(col))); } //convert to floats - colNames = new String[]{"TEMPERATURE","SALINITY","DENSITY","DYN_HGT","IRRAD","FLUOR_VOLT","TRANSMISSIVITY","CHLOROPHYLL","OXYGEN_VOLT","OXYGEN"}; + colNames = new String[]{"temperature","salinity","density","dyn_hgt","irrad","fluor_volt","transmissivity","chlorophyll","oxygen_volt","oxygen"}; for (int coli = 0; coli < colNames.length; coli++) { int col = inner.findColumnNumber(colNames[coli]); inner.setColumn(col, new FloatArray(inner.getColumn(col))); + if (colNames[coli].equals("irrad")) + inner.setColumnName(col, "irradiance"); } //add outer info to inner table inner.join(2, 0, "", outer); //nKeys, keyCol, String mvKey, Table lookUpTable //save inner table - String2.log("inner (5 rows) before save:\n" + inner.toCSVString(5)); - inner.saveAsFlatNc(dir + innerName + ".nc", "row", false); //convertToFakeMissingValues + String2.log("inner (5 rows) before save:\n" + inner.toString(5)); + inner.saveAsFlatNc(dir + "rockfish_casts_" + lastYear + ".nc", "row", false); //convertToFakeMissingValues - String2.log("\n*** Projects.convertRockfish20130328() finished successfully"); + String2.log("\n*** Projects.convertRockfish() finished successfully"); } /** Convert FED Rockfish CTD .csv data files to .nc (from Lynn 2013-04-09 @@ -8260,7 +8394,7 @@ public static void convertRockfish20130409(boolean headerMode) throws Throwable for (int f = 0; f < tFileNames.length; f++) { Table table = new Table(); - table.readASCII(tFileNames[f], 0, 2, null, null, null, null, false); //simplify + table.readASCII(tFileNames[f], 0, 2, "", null, null, null, null, false); //simplify Test.ensureEqual(table.getColumnNamesCSVString(), headerMode? "cruise,ctd_index,ctd_no,station,time,longitude,latitude,bottom_depth," + @@ -8270,7 +8404,7 @@ public static void convertRockfish20130409(boolean headerMode) throws Throwable "chlorophyll,oxygen_volt,oxygen", "Unexpected column names"); if (f == 0) - String2.log("table (5 rows) as read:\n" + table.dataToCSVString(5)); + String2.log("table (5 rows) as read:\n" + table.dataToString(5)); //convert to short String colNames[] = headerMode? @@ -8306,7 +8440,7 @@ public static void convertRockfish20130409(boolean headerMode) throws Throwable //save as .nc String2.log("f=" + f + " finished."); if (f == 0) - String2.log("table (5 rows) before save:\n" + table.toCSVString(5)); + String2.log("table (5 rows) before save:\n" + table.toString(5)); table.saveAsFlatNc(dir + File2.getNameNoExtension(tFileNames[f]) + ".nc", "row", false); //convertToFakeMissingValues } @@ -8388,7 +8522,7 @@ public static void makeCRWNcml34(String isoStartDate, int firstIncrement, String String2.log(daysSince + " " + fileName); String contents = -"\n" + +"\n" + " \n" + " \n" + " \n" + @@ -8481,7 +8615,7 @@ public static void makeIsaacNPH() throws Throwable { table.columnAttributes(0).set("long_name", "Centered Time"); table.columnAttributes(0).set("units", Calendar2.SECONDS_SINCE_1970); - String2.log(table.toCSVString(3)); + String2.log(table.toString(3)); table.saveAsFlatNc("/u00/data/points/isaac/NPH_IDS.nc", "time", false); //convertToFakeMV=false } @@ -8595,7 +8729,7 @@ public static void makeIsaacPCUI() throws Throwable { time.add(Calendar2.gcToEpochSeconds(gc)); } table.setColumn(0, time); - String2.log(table.toCSVString(3)); + String2.log(table.toString(3)); table.saveAsFlatNc("/u00/data/points/isaac/PCUI_IDS.nc", "time", false); //convertToFakeMV=false } @@ -9127,6 +9261,50 @@ public static void extractSonarLatLon() throws Throwable { dir + foName); } + /** + * This makes ncml files for PH2. + * + * @param sstdn sstd or sstn + * @throws Exception if trouble + */ + public static void makePH2Ncml(String sstdn) + throws Exception { + String2.log("*** Projects.makePH2Ncml(" + sstdn + ")"); + + //get list of filenames (without dir) + //19811101145206-NODC-L3C_GHRSST-SSTskin-AVHRR_Pathfinder-PFV5.2_NOAA07_G_1981305_day-v02.0-fv01.0.nc + String dir = "/u00/satellite/PH2/" + sstdn + "/1day/ncml/"; + String regex = "\\d{14}-NODC.*\\.nc"; + + //String names[] = RegexFilenameFilter.list(dir, regex); + String names[] = String2.readLinesFromFile( + "/u00/satellite/PH2/" + sstdn + "/names.txt", null, 1); + + //for each file + for (int i = 0; i < names.length; i++) { + + //extract date yyyyddd + String tName = names[i]; +//19811101145206-NODC-L3C_GHRSST-SSTskin-AVHRR_Pathfinder-PFV5.2_NOAA07_G_1981305_day-v02.0-fv01.0.nc + double epochSeconds = + (Calendar2.parseYYYYDDDZulu(tName.substring(72, 79)).getTimeInMillis() / + 1000.0) + 12 * Calendar2.SECONDS_PER_HOUR; //center on noon of that day + + String2.log("writing " + dir + tName + ".ncml " + epochSeconds); + Writer writer = new FileWriter(dir + tName + ".ncml"); + StringBuilder values = new StringBuilder(); + writer.write( +"\n" + +" \n" + +" \n" + +" " + epochSeconds + "\n" + +" \n" + +"\n"); + writer.close(); + } + } + } diff --git a/WEB-INF/classes/gov/noaa/pfel/coastwatch/TestAll.java b/WEB-INF/classes/gov/noaa/pfel/coastwatch/TestAll.java index 018f0f481..e86b618b6 100644 --- a/WEB-INF/classes/gov/noaa/pfel/coastwatch/TestAll.java +++ b/WEB-INF/classes/gov/noaa/pfel/coastwatch/TestAll.java @@ -135,6 +135,8 @@ public static void main(String args[]) throws Throwable { // // Table.testAddIndexColumns(); +// Table.testEnhancedFlatNcFile(); +// Table.testOpendapSequence(); // Table.testReadMultidimNc(); // Table.testReadNcCFMATimeSeriesReversed(false); //readMultidimNc // { @@ -145,24 +147,27 @@ public static void main(String args[]) throws Throwable { // null, null, null); // String2.log(table.toCSVString()); // } -// { //find cwwcNDBCMet dataset with insane min time (it is 41002 !) -// Table table = new Table(); -// table.readFlatNc("/downloads/fileTable.nc", null, 0); //it logs fileName and nRows= -// table.justKeepColumns(new String[]{"fileName","TIME_min_"}, ""); -// table.tryToApplyConstraintsAndKeep(0, -// StringArray.fromCSV("TIME_min_"), -// StringArray.fromCSV("<"), -// StringArray.fromCSV("0")); -// String2.log(table.dataToCSVString()); -// } +// { //find file in dataset with insane min_time +// Table table = new Table(); +// table.readFlatNc("/downloads/fileTable.nc", null, 0); //it logs fileName and nRows= +// String2.log(table.getColumnNamesCSVString()); +// // table.justKeepColumns(new String[]{"fileList","min"}, ""); +// table.tryToApplyConstraintsAndKeep(0, +// StringArray.fromCSV("min"), +// StringArray.fromCSV("="), +// StringArray.fromCSV("1480854360")); +// String2.log(table.dataToString()); +// } +// // Table.testReadGocdNcCF(); // Table.testOpendapSequence(); // Table.debugMode = true; DasDds.main(new String[]{ -// "knb_lter_sbc_14_t1", -// "NTL_DEIMS_5672_t1", +// "nesdisVHNchlaDaily2", +// "testNccsvScalar", +// "srtm15plus", // "ChukchiSea_454a_037a_fcf4", //for Kevin, SocketException: Connection reset -// "noaaOSP", +// "ncdcOisst2AmsrAgg", // "-verbose"}); /* if (false) { //one time fixup of scrippsGliders @@ -177,12 +182,57 @@ public static void main(String args[]) throws Throwable { } } /* */ -// String2.log(NcHelper.dumpString("/git/erddapTest/nc/invalidShortened.nc", false)); -// String2.log(NcHelper.dumpString("/u00/satellite/VN/chla/1day/V2012003_D1_WW00_chlor_a.nc", false)); -// String2.log(NcHelper.dumpString("/data/kerfoot/ce05/deployment0002_CE05MOAS-GL381-05-CTDGVM000-telemetered-ctdgv_m_glider_instrument_20160506T224259.276030-20160529T234932.889860.nc", "time")); +// String2.log(NcHelper.dumpString("/u00/srtm/topo15_compressed.nc", false)); +// String2.log(NcHelper.dumpString("/u00/data/points/scrippsGlidersUpload2/sp034-20170418T0959/sp034-20170419T024900_rt.nc", false)); +// String2.log(NcHelper.dumpString("/u00/data/points/scrippsGlidersUpload2/sp022-20170209T1616/sp022-20170420T055200_rt.nc", false)); +// String2.log(NcHelper.dumpString("/data/scrippsGliders/qaqc/sp025-20160914T132800.nc", true)); // String2.log(NcHelper.dumpString("/data/kerfoot/ce05/deployment0002_CE05MOAS-GL381-05-CTDGVM000-telemetered-ctdgv_m_glider_instrument_20160530T010752.621670-20160621T054549.394040.nc", "time")); // String2.log(NcHelper.dumpString("/u00/data/points/trinidadCTD/CoralSea_CS150513.nc", "maxStationNameLength")); //short data // String2.log(NcHelper.dumpString("/data/ncei/ind199105_ctd.nc", "time")); +// String2.log(NcHelper.dumpString("/u00/satellite/VH3/chla/1day/V2016001.ncml", false)); +// String2.log(NcHelper.dumpString("/data/argo/briand/2901175_prof.nc", "PRES_QC")); +// String2.log(NcHelper.dumpString("/data/craig/20160929.pwrsys.nc", "feature_type_instance")); +// String2.log(NcHelper.dumpString("/downloads/testNC4_20170329112201.nc", true)); + +// String2.log(NcHelper.dumpString("/data/pathfinder/20001231151243-NODC-L3C_GHRSST-SSTskin-AVHRR_Pathfinder-PFV5.2_NOAA16_G_2000366_day-v02.0-fv01.0.nc", false)); +/* byte aerosol_dynamic_indicator(time=1, lat=4320, lon=8640); + :long_name = "aerosol dynamic indicator"; + :grid_mapping = "Equidistant Cylindrical"; + :units = ""; + :add_offset = 0.0; // double + :scale_factor = 1.0; // double + :valid_min = -127B; // byte + :valid_max = 127B; // byte + :_FillValue = -128B; // byte + :time_offset = 360.0; // double + :source = "CLASS_AVHRRPF_AOT"; + :reference = "http://www.class.ncdc.noaa.gov/saa/products/search?sub_id=0&datatype_family=AVHRRPF"; + :comment = "Aerosol optical thickness (AOT) data are taken from the CLASS Pathfinder (from AVHRR) (AVHRRP +F). The aerosol optical thickness/depth (AOT/AOD) measurements are extracted from PATMOS-A2 monthly mean and re +projected and interpolated from their original 1 degree x 1 degree resolution to the 4km Pathfinder Version 5.2 + grid."; + :_ChunkSizes = 1, 540, 540; // int +*/ +// String2.log(NcHelper.dumpString("/data/pathfinder/20010101163014-NODC-L3C_GHRSST-SSTskin-AVHRR_Pathfinder-PFV5.2_NOAA16_G_2001001_day-v02.0-fv01.0.nc", false)); +/* byte aerosol_dynamic_indicator(time=1, lat=4320, lon=8640); + :long_name = "aerosol dynamic indicator"; + :grid_mapping = "Equidistant Cylindrical"; + :units = ""; + :add_offset = 1.1; // double + :scale_factor = 0.01; // double + :valid_min = -127B; // byte + :valid_max = 127B; // byte + :_FillValue = -128B; // byte + :time_offset = 81.0; // double + :source = "CLASS_AERO100_AOT"; + :reference = "http://www.class.ncdc.noaa.gov/saa/products/search?sub_id=0&datatype_family=AERO100&submit. +x=25&submit.y=12"; + :comment = "Aerosol optical thickness (100 KM) data are taken from the CLASS AERO100 products, which are +created from AVHRR channel 1 optical thickness retrievals from AVHRR global area coverage (GAC) data. The aeros +ol optical thickness measurements are interpolated from their original 1 degree x 1 degree resolution to the 4k +m Pathfinder Version 5.2 grid."; + :_ChunkSizes = 1, 540, 540; // int +*/ // StringWriter sw = new StringWriter(); // NCdumpW.print(String2.unitTestDataDir + "nccf/ncei/ind199105_ctd.nc", @@ -239,7 +289,7 @@ public static void main(String args[]) throws Throwable { //http://www.validome.org/xml/validate/ /*{ String dirName = "c:/downloads/test.xml"; - Writer writer = new OutputStreamWriter(new FileOutputStream(dirName, false), "UTF-8"); + Writer writer = new OutputStreamWriter(new FileOutputStream(dirName, false), String2.UTF_8); //EDD.oneFromDatasetsXml(null, "erdMHchla8day").writeFGDC(writer, null); EDD.oneFromDatasetsXml(null, "erdMHchla8day").writeISO19115(writer, null); //EDD.oneFromDatasetsXml(null, "pmelTaoDyAirt").writeFGDC(writer, null); @@ -271,9 +321,9 @@ public static void main(String args[]) throws Throwable { // Calendar2.getCompactCurrentISODateTimeStringLocal() + ".xml"; // EDDGridFromDap.generateDatasetsXmlFromThreddsCatalog(ftcName, // //one catalog.xml URL: -// "http://oceanwatch.pfeg.noaa.gov/thredds/catalog/catalog.xml", -// "http://opendap-uat.jpl.nasa.gov/thredds/catalog.xml", //a test catalog -// "http://thredds.jpl.nasa.gov/thredds/catalog.xml", +// "https://oceanwatch.pfeg.noaa.gov/thredds/catalog/catalog.xml", +// "https://opendap-uat.jpl.nasa.gov/thredds/catalog.xml", //a test catalog +// "https://thredds.jpl.nasa.gov/thredds/catalog.xml", // ".*", -1); // String2.log(String2.readFromFile(ftcName)[1]); // @@ -284,15 +334,17 @@ public static void main(String args[]) throws Throwable { // // EDD.debugMode = true; // String2.log("\n" + EDDGridFromDap.generateDatasetsXml(false, //directions -// "http://oceanwatch.pfeg.noaa.gov/thredds/dodsC/satellite/MPOC/1day", +// "https://thredds.jpl.nasa.gov/thredds/dodsC/ncml_aggregation/SalinityDensity/smap/aggregate__SMAP_JPL_L3_SSS_CAP_MONTHLY_V3.ncml.html", // String2.readLinesFromFile("/temp/urlsFromUAF.txt", "", 1)[2342], //one URL // String2.readLinesFromFile("/temp/urlsFromPodaac.txt", "", 1)[161], //one URL -// "http://thredds.jpl.nasa.gov/thredds/dodsC/ncml_aggregation/OceanTemperature/avhrr/pathfinder_v51/daily/day/aggregate__avhrr_AVHRR_PATHFINDER_L3_BSST_DAILY_DAYTIME_V51.ncml", -// null, null, null, -1, null)); +// "http://cwcgom.aoml.noaa.gov/thredds/dodsC/AFAI/USFAFAI3D.nc", +// null, null, null, //new String[]{"time","altitude","lat","lon"}, //dimensions (or null) +// -1, null)); +// DasDds.main(new String[]{"nceiSMOSsssMonthly20170317", "-verbose"}); // // Crawl UAF clean catalog: // done 2012-10-17, 2012-12-09, 2013-11-06, 2014-03-31, 2014-12-18, -// 2015-10-22, 2016-04-19, 2016-08-26 +// 2015-10-22, 2016-04-19, 2016-08-26, 2017-04-16 (fail) // Results file is /temp/datasetsUAF{uafi}_{dateTime}.xml // Log file is /u00/cwatch/erddap2/logs/TestAll.log / // Look at problems, creator_, title, .... Make improvements. @@ -317,6 +369,8 @@ public static void main(String args[]) throws Throwable { // Arrays.sort(ar); // String2.log(String2.toNewlineString(ar)); +// EDDGridFromDap.testScale1Offset0(); +// EDDGridFromDap.testFromJson(); // EDDGridFromDap.testUInt16Dap(); // EDDGridFromErddap.testDataVarOrder(); // String gx = ""; @@ -331,15 +385,18 @@ public static void main(String args[]) throws Throwable { // //create an invalid .nc file // byte tb[] = SSR.getFileBytes("/u00/satellite/MW/cdom/1day/MW2012072_2012072_cdom.nc"); -// FileOutputStream fos = new FileOutputStream("/git/erddapTest/nc/invalidShortened2.nc"); +// FileOutputStream fos = new FileOutputStream("/erddapTest/nc/invalidShortened2.nc"); // fos.write(tb, 0, tb.length / 10000); // fos.close(); // // EDDGridFromNcFiles.testBadNcFile(false); //runIncrediblySlowTest +// EDDGridFromNcFiles.testGenerateDatasetsXml3(); // EDDGridFromNcFiles.testIgor(); +// EDDGridFromNcFiles.testInvalidShortenedNcFile(); // EDDGridFromNcFiles.testSpecialAxis0Time(); // EDDGridFromNcFiles.testSpecialAxis0FileNameInt(); // EDDGridFromNcFiles.testSpecialAxis0GlobalDouble(); +// EDDGridFromNcFiles.testTimePrecisionMillis(); // EDDGridFromNcFiles.testUInt16File(); // EDDGridFromNcFilesUnpacked.testUInt16File(); // EDDGridFromNcFilesUnpacked.testMissingValue(); @@ -380,8 +437,6 @@ public static void main(String args[]) throws Throwable { File2.delete("/u00/cwatch/erddap2/dataset/ay/erd" + ds + vn + cl + "day/fileTable.nc"); String2.log(EDD.testDasDds("erd" + ds + vn + cl + "day")); /* */ -// String2.log(NcHelper.dumpString( -// "/u00/satellite/MH1/sst/8day/A20030012003008.L3m_8D_SST_sst_4km.ncml", false)); // String2.log(String2.noLongLines(NcHelper.dumpString( // "/u00/satellite/SW1/1day/S1998002.L3m_DAY_CHL_chlor_a_9km.nc", // false), 80, "")); @@ -391,14 +446,19 @@ public static void main(String args[]) throws Throwable { // ds = String2.getStringFromSystemIn("datasetID?"); // String2.log(EDD.testDasDds(ds)); // } -// if (true) { -// String s = EDDGridFromNcFiles.generateDatasetsXml( -// "/u00/satellite/MUR41/ssta/mday/", ".*\\.nc", "", 1440, null); -// String2.setClipboardString(s); -// String2.log(s); -// } else { -// String2.log(EDD.testDasDds("jplMURSST41mday")); -// } +// Projects.makeSoda331Ncml("ocean", 1980, 2025); +// Projects.makePH2Ncml("sstn"); +// Projects.testLongInNc3(); +// String2.log(NcHelper.dumpString( +// "/u00/satellite/PH2/sstd/1day/19811101145206-NODC-L3C_GHRSST-SSTskin-AVHRR_Pathfinder-PFV5.2_NOAA07_G_1981305_day-v02.0-fv01.0.nc.ncml", "time")); + if (false) { + String s = EDDGridFromNcFiles.generateDatasetsXml( + "/u00/satellite/PH3/sstn/", ".*\\.nc", "", -1, null); + String2.setClipboardString(s); + String2.log(s); + } +// String2.log(EDD.testDasDds("nceiOisst2agg")); +// // for (int di4 = 0; di4 < 6; di4++) // for (int dd = 0; dd < 3; dd++) // EDD.deleteCachedDatasetInfo("erdVH2" + @@ -430,7 +490,7 @@ public static void main(String args[]) throws Throwable { "NOAA NMFS SWFSC ERD (erd.data@noaa.gov) uses ERDDAP to add the time variable and slightly modify the metadata.\n" + "Direct read of HDF4 file through CDM library."); addAtts.set("license", -"http://science.nasa.gov/earth-science/earth-science-data/data-information-policy/\n" + +"https://science.nasa.gov/earth-science/earth-science-data/data-information-policy/\n" + "[standard]"); addAtts.remove("Northernmost_Northing"); addAtts.remove("processing_control_input_parameters_ifile"); @@ -641,7 +701,7 @@ public static void main(String args[]) throws Throwable { //EDDTableCopyPost.run(-1); //-1=allTests, 0..6 // String2.log(EDDTableFromAsciiFiles.generateDatasetsXml( -// "/u00/data/points/austin/", "EOS\\.csv", "", +// "/u00/data/points/austin/", "EOS20161130\\.csv", "", // "", 1, 2, -1, //colNamesRow, firstDataRow, reloadEvery // "", "", "", "", "", // "", // tSortFilesBySourceNames, @@ -705,10 +765,10 @@ public static void main(String args[]) throws Throwable { // EDDTableFromCassandra.testMaxRequestFraction(false); // EDDTableFromCassandra.testCass1Device(false); //pauseBetweenTests to read the stats // String2.log(EDDTableFromDapSequence.generateDatasetsXml( -// "http://www.ifremer.fr/oceanotron/OPENDAP/INS_CORIOLIS_GLO_TS_NRT_OBS_PROFILE_LATEST", -// "http://opendap.co-ops.nos.noaa.gov/dods/IOOS/Raw_Water_Level", +// "http://localhost:8080/cwexperimental/tabledap/testNccsvScalar.html", +// "https://opendap.co-ops.nos.noaa.gov/dods/IOOS/Raw_Water_Level", // "http://gisweb.wh.whoi.edu:8080/dods/whoi/drift_data", -// 180, null)); +// 10040, null)); /* //tallyXml String tfn = EDStatic.fullLogsDirectory + "tallyLterSbsStorageUnitsMV.log"; @@ -760,7 +820,7 @@ public static void main(String args[]) throws Throwable { // EDDTableFromColumnarAsciiFiles.testGlerl2(); /* String s9 = EDDTableFromColumnarAsciiFiles.generateDatasetsXml( "/u00/data/points/lterWiscAoss/", ".*\\.dat", "", - "ISO-8859-1", 1, 2, 10080, + String2.ISO_8859_1, 1, 2, 10080, "", "", "", "", "EPOCH_TIME", "https://lter.limnology.wisc.edu/datafile/greenhouse-gas-emissions-gas-fluxes", @@ -791,9 +851,9 @@ public static void main(String args[]) throws Throwable { // ".*", true, 10080, "","","","",null)); // EDDTableFromHttpGet.testStatic(); // String2.log(EDDTableFromHyraxFiles.generateDatasetsXml( -// "http://data.nodc.noaa.gov/opendap/wod/monthly/APB/201103-201103/", +// "https://data.nodc.noaa.gov/opendap/wod/monthly/APB/201103-201103/", // "wod_01345934.O\\.nc", -// "http://data.nodc.noaa.gov/opendap/wod/monthly/APB/201103-201103/wod_013459340O.nc", +// "https://data.nodc.noaa.gov/opendap/wod/monthly/APB/201103-201103/wod_013459340O.nc", // 10080, // "", "", "", "", //columnFromFileName // "time", //String tSortedColumnSourceName, @@ -802,12 +862,13 @@ public static void main(String args[]) throws Throwable { // EDDTableFromHyraxFiles.testJpl(true); //deleteCachedInfoAndOneFile // String s = EDDTableFromNcCFFiles.generateDatasetsXml( -// "/data/ncei/", ".*\\.nc", -// "", 10080, //sample file +// "/data/joe/", ".*\\.nc", +// "", 1440, //sample file // "", "", "", // "", "", // "", "", "", "", new Attributes()); // String2.setClipboardString(s); String2.log(s); +// EDDTableFromNcFiles.testGenerateDatasetsXmlNcdump(); // EDDTableFromNcCFFiles.testNoAttName(); // EDDTableFromNcCFFiles.testKevin20160519(); // EDDTableFromNcCFFiles.testJP14323(); @@ -822,9 +883,12 @@ public static void main(String args[]) throws Throwable { //String2.log(tTable.toCSVString()); // +// EDDTableFromNcFiles.testGlobec(); // EDDTableFromNcFiles.testTimeSince19000101(); // EDDTableFromNcFiles.testIgor(); -// EDDTableFromNcFiles.testOrderBy(); +// EDDTableFromNcFiles.testOrderByClosest(); +// EDDTableFromNcFiles.testOrderByLimit(); +// EDDTableFromNcFiles.testSpeed(13); // NOT FINISHED EDDTableFromNcFiles.bobConsolidateWOD("APB", "1960-01-01"); // EDDTableFromNcFiles.getAllSourceVariableNames( // "c:/data/wod/monthly/APB/", ".*\\.nc"); //201103-201103/ @@ -839,7 +903,7 @@ public static void main(String args[]) throws Throwable { // "PL_WDIR3", "SPD3", "PL_WSPD3", "DIR3", "P3", "T3", "TS3", "RH3", "PRECIP3", "RRATE3"}, // "long_name"); // String2.log(EDDTableFromNcFiles.generateDatasetsXml( -// "/u00/data/points/sonar/", "nceiNmfsSonar.*\\.nc", +// "/data/joe/", ".*\\.nc", // "", // "", 1440, // "", "", "", @@ -856,12 +920,16 @@ public static void main(String args[]) throws Throwable { // EDDTableFromNcFiles.testHardFlag(); // String s = EDDTableFromMultidimNcFiles.generateDatasetsXml( -// "/u00/data/points/trinidadCTD/", "CoralSea_.*\\.nc", "", //sample file -// "", 10080, //dimensions +// "/data/briand/W1M3A/", ".*\\.nc", "", //sample file +// "TIME,DEPTH", -1, //dimensions // "", "", "", "", true, //removeMVRows // "", "", "", "", "", new Attributes()); // String2.setClipboardString(s); String2.log(s); +// Table.debugMode = true; DasDds.main(new String[]{"rich", "-verbose"}); // EDDTableFromMultidimNcFiles.testBasic(); +// EDDTableFromMultidimNcFiles.testGenerateDatasetsXmlSeaDataNet(); +// EDDTableFromMultidimNcFiles.testLongAndNetcdf4(); +// EDDTableFromMultidimNcFiles.testW1M3A(true); // *** To update GTSPP (~10th of every month): //Don't add source_id or stream_ident: they are usually (always?) empty @@ -873,12 +941,13 @@ public static void main(String args[]) throws Throwable { // 2) Overnight (still! because it's still sluggish and programming interrupts the log file), // unzip and consolidate the profiles // (full run takes 36 hours(?) on Dell M4700, was 2 days 14 hours on old Dell Opti). -// !!! Close all other windows, even EditPlus. +// !!! CLOSE all other windows, even EditPlus. +// !!! EMPTY Recycle Bin // Great speed up, but no longer under my control: // Temporarily switching off parts of McAfee : Virus Scan Console (2X speedup!) // On Access Scanner : All Processes // Scan Items: check: specified file types only (instead of usual All Files) -// EDDTableFromNcFiles.bobConsolidateGtsppTgz(2016, 5, 2016, 9, false); //first/last year(1990..)/month(1..), testMode +// EDDTableFromNcFiles.bobConsolidateGtsppTgz(2015, 6, 2017, 4, false); //first/last year(1985..)/month(1..), testMode // log file is c:/data/gtspp/logYYYYMMDD.txt // 2b) Email the "good" but "impossible" stations to Charles Sun // [was Melanie Hamilton, now retired] @@ -892,7 +961,7 @@ public static void main(String args[]) throws Throwable { // (If processed in chunks, use date of start of last chunk.) // 4) * In [tomcat]/content/erddap/subset/ // delete erdGtsppBestNc.json and erdGtsppBest.json -// * Load erdGtsppBestNc in localHost ERDDAP. (~20 minutes) +// * Load erdGtsppBestNc in localHost ERDDAP. (~20 minutes) // * Generate .json file from // http://localhost:8080/cwexperimental/tabledap/erdGtsppBestNc.json?trajectory,org,type,platform,cruise&distinct() // and save it as [tomcat]/content/erddap/subset/erdGtsppBestNc.json @@ -903,7 +972,7 @@ public static void main(String args[]) throws Throwable { // EDDTableFromNcFiles.testErdGtsppBest("erdGtsppBestNc"); // 6) Create ncCF files with the same date range as 2a) above: // It takes ~2 minutes per month processed. -// EDDTableFromNcFiles.bobCreateGtsppNcCFFiles(2016, 5, 2016, 9); //e.g., first/last year(1990..)/month(1..) +// EDDTableFromNcFiles.bobCreateGtsppNcCFFiles(2015, 6, 2017, 4); //e.g., first/last year(1990..)/month(1..) // String2.log(NcHelper.dumpString("/u00/data/points/gtsppNcCF/201406a.nc", false)); // 7) * Load erdGtsppBest in localHost ERDDAP. (long time if lots of files changed) // * Generate .json file from @@ -1008,13 +1077,14 @@ public static void main(String args[]) throws Throwable { // "http://coaps.fsu.edu/thredds/catalog/samos/data/research/WTEP/2012/catalog.xml", // "WTEP_20120215.*", // "http://coaps.fsu.edu/thredds/dodsC/samos/data/quick/WTEP/2012/WTEP_20120215v10002.nc", -// "http://data.nodc.noaa.gov/thredds/dodsC/testdata/netCDFTemplateExamples/timeSeries/catalog.xml", +// "https://data.nodc.noaa.gov/thredds/dodsC/testdata/netCDFTemplateExamples/timeSeries/catalog.xml", // "BodegaMarineLabBuoyCombined.nc", -// "http://data.nodc.noaa.gov/thredds/dodsC/testdata/netCDFTemplateExamples/timeSeries/BodegaMarineLabBuoyCombined.nc", +// "https://data.nodc.noaa.gov/thredds/dodsC/testdata/netCDFTemplateExamples/timeSeries/BodegaMarineLabBuoyCombined.nc", // 60, // "", "", "", "", "", // "time", null)); +// Erddap.testBasic(); // Erddap.makeErddapContentZip("c:/programs/_tomcat/samples/", "c:/backup/"); // Erddap.testHammerGetDatasets(); // File2.touch("c:/u00/cwatch/erddap2/copy/nmspWcosTemp/ANO001/2005/ANO001_021MTBD020R00_20051105.nc"); @@ -1023,6 +1093,14 @@ public static void main(String args[]) throws Throwable { // FileVisitorDNLS.testSync(); // FileVisitorDNLS.testMakeTgz(); // FileVisitorDNLS.testOneStepToString(); +// FileVisitorDNLS.testWAF(); +// FileVisitorDNLS.verbose = true; +// FileVisitorDNLS.reallyVerbose = true; +// FileVisitorDNLS.debugMode = true; +// FileVisitorDNLS.sync( +// "https://www.ncei.noaa.gov/data/global-precipitation-climatology-project-gpcp-daily/", +// "/u00/data/points/gpcp/", +// ".*", false, ".*", false); // FileVisitorDNLS.findFileWith("/Temp/access_logs/", ".*", //dir, fileNameRegex // true, ".*", //recursive, pathRegex //lines below: //lineRegex, tallyWhich, interactiveNLines @@ -1089,7 +1167,19 @@ public static void main(String args[]) throws Throwable { // if (matcher.find(1)) String2.log("matched at start=" + matcher.start()); // else String2.log("didn't match"); // post.TestJdbc.test(); -// PrimitiveArray.testBasic(); +// +// TestUtil.testCalendar2(); //has some nccsv testing +// TestUtil.testMath2(); //has some nccsv testing +// TestUtil.testString2(); //has some nccsv testing +// PrimitiveArray.testNccsv(); +// Table.testNccsv(); +// EDDTableFromNccsvFiles.test(); +// EDDTableFromNccsvFiles.testBasic(true); +// EDDTableFromNccsvFiles.testChar(); +// EDDGridFromNcFiles.testNccsv(); +// EDDTableFromDapSequence.testGenerateDatasetsXml2(); +// EDDTableFromErddap.test(); +// // DasDds.main(new String[]{"erdMWpp3day", "-verbose"}); // Projects.dapToNc("http://coastwatch.pfeg.noaa.gov/erddap/griddap/erdBAsstamday", // new String[]{"sst"}, "[112:112][0:0][750:760][1800:1820]", @@ -1103,8 +1193,14 @@ public static void main(String args[]) throws Throwable { // "/u00/data/points/tao/daily/", "airt.*_dy\\.cdf"); // Projects.lastTime("http://coastwatch.pfeg.noaa.gov/erddap/griddap/", // StringArray.fromFile("c:/content/scripts/erdGridDatasets.csv")); +// Projects.convertRockfish(2015); +// Table.debugMode = true; DasDds.main(new String[]{"erdFedRockfishStation", "-verbose"}); +// Table.debugMode = true; DasDds.main(new String[]{"erdFedRockfishCtd", "-verbose"}); // Projects.downloadInPort(); +// SimpleXMLReader.testValidity( +// "/programs/_tomcat/content/erddap/datasetsFED31UAF.xml", "erddapDatasets"); + /* //Run to update jplG1SST String2.log("\n*** jplG1SST update"); @@ -1153,6 +1249,7 @@ public static void main(String args[]) throws Throwable { String2.log(" sleeping for 30 minutes"); Math2.sleep(30 * Calendar2.MILLIS_PER_MINUTE); } + /* */ //NOT WORKING ftp the file to upwell //String commands = @@ -1161,6 +1258,9 @@ public static void main(String args[]) throws Throwable { // "put " + jplFiles[jf]; //SSR.sftp("upwell.pfeg.noaa.gov", "ERDadmin", password, commands); + //SSR.downloadFile( + // "https://opendap.co-ops.nos.noaa.gov/ioos-dif-sos/SOS?service=SOS&request=GetCapabilities", + // "/downloads/nosSosATempGetCap.xml", false); //tryToUseCompression // set jplG1SST flags !!!!! //SSR.touchUrl( @@ -1184,7 +1284,7 @@ public static void main(String args[]) throws Throwable { // "http://upwell.pfeg.noaa.gov/erddap/"); // "http://75.101.155.155/erddap/"); // :8081 led to out-of-date oceanwatch dataset!! but now disabled -// Projects.testOpendapAvailability("http://oceanwatch.pfeg.noaa.gov/thredds/dodsC/satellite/CM/usfc/hday", +// Projects.testOpendapAvailability("https://oceanwatch.pfeg.noaa.gov/thredds/dodsC/satellite/CM/usfc/hday", // "CMusfc", 5, 1, true); //nIter, maxSec // Projects.touchUrls(); @@ -1197,7 +1297,7 @@ public static void main(String args[]) throws Throwable { // String dirs[] = {"APB", "CTD", "DRB", "GLD", "MBT", "MRB", "OSD", "PFL", "UOR", "XBT"}; // for (int i = 9; i < 10; i++) // Projects2.copyHyraxFiles( -// "http://data.nodc.noaa.gov/opendap/wod/" + dirs[i] + "/", +// "https://data.nodc.noaa.gov/opendap/wod/" + dirs[i] + "/", // ".*\\.nc", true/false?, "", // "c:/data/wod/monthly/" + dirs[i] + "/", // "c:/data/wod/copyHyraxFiles20110713b.log"); @@ -1216,9 +1316,9 @@ public static void main(String args[]) throws Throwable { // for (int pe = 0; pe < 1000000; pe++) { // long peTime = System.currentTimeMillis(); // SSR.downloadFile( //throws Exception -// //"http://coastwatch.pfeg.noaa.gov/erddap/tabledap/cwwcNDBCMet.pngInfo", -// "http://oceanwatch.pfeg.noaa.gov/thredds/catalog/Satellite/aggregsatMY/k490/catalog.xml", -// //"http://oceanwatch.pfeg.noaa.gov/thredds/dodsC/satellite/QS/vekm/7day.das", +// //"https://coastwatch.pfeg.noaa.gov/erddap/tabledap/cwwcNDBCMet.pngInfo", +// "https://oceanwatch.pfeg.noaa.gov/thredds/catalog/Satellite/aggregsatMY/k490/catalog.xml", +// //"https://oceanwatch.pfeg.noaa.gov/thredds/dodsC/satellite/QS/vekm/7day.das", // "c:/downloads/peTest", true); // String2.log("Attempt #" + pe + " time=" + (System.currentTimeMillis() - peTime)); // } @@ -1226,7 +1326,7 @@ public static void main(String args[]) throws Throwable { // String fullFileName, boolean tryToUseCompression); //throws Exception // String2.log(SSR.getUrlResponseString( // "http://coastwatch.pfeg.noaa.gov/erddap/tabledap/pmelTao.csv?&time>=2008-11-13T00:00:00Z")); -// "http://localhost/cwexperimental/index.html")); +// "https://oceandata.sci.gsfc.nasa.gov/VIIRS/Mapped/Daily/4km/CHL_chlor_a/2016/")); // "http://localhost/cwexperimental/tabledap/rPmelTao.csv?&time>=2008-11-13T00:00:00Z")); // String2.log(SSR.getUrlResponseString("https://coastwatch.pfeg.noaa.gov:8443/erddap2/griddap/etopo180.htmlTable?altitude[(-90.0):1000:(90.0)][(-180.0):1000:(180.0)]")); // String2.log(SSR.minimalPercentEncode("sst[(1870-01-01):1:(2011-07-01T00:00:00Z)][(29.5):1:(29.5)][(-179.5):1:(179.5)]")); @@ -1241,6 +1341,8 @@ public static void main(String args[]) throws Throwable { // String2.log(String2.annotatedString(SSR.getFileString( // "/Temp/test.md5"))); // SSR.zipEach("c:/temp/codarsf/"); +// SSR.zipADirectory("/u00/cwatch/erddap2/ArchiveADataset/cwwcNDBCMet_20170308160247Z", 600); + // String.matches dddd_ddd_dddd_add // String regex="[0-9]{4}_[0-9]{3}_[0-9]{4}_[a-zA-Z][0-9]{2}"; // String2.log("match=" + ("1234_567_1234_k00".matches(regex))); @@ -1259,7 +1361,7 @@ public static void main(String args[]) throws Throwable { // Table table = new Table(); // table.readASCII("/u00/data/points/lterSbc/arroyoquemado_mooring_arq_20150416.txt"); //large // String2.log("nColumns=" + table.nColumns()); -// String2.log(table.dataToCSVString(3)); +// String2.log(table.dataToString(3)); // Table taoTable = new Table(); // taoTable.readJson("tao", SSR.getUrlResponseString( // EDStatic.erddapUrl + "/tabledap/pmel_dapper/tao.json?longitude,latitude,altitude,time,station_id,sea_surface_temperature,sea_surface_temperature_quality,air_temperature,air_temperature_quality,relative_humidity,relative_humidity_quality,relative_humidity_source,wind_to_direction,wind_direction_quality,wind_direction_source,wind_speed,wind_speed_quality,wind_speed_source,eastward_wind,northward_wind&time>=2007-08-01&time<=2007-10-01")); @@ -1271,7 +1373,8 @@ public static void main(String args[]) throws Throwable { // null, null, null); // table.leftToRightSort(5); // String2.log(table.toCSVString(10)); -// Table.testParseDapQuery(); +// Table.testOrderByClosest(); +// Table.testOrderByLimit(); // Table.testReadNcCF7SampleDims(); // Table.testReadVlenNc(); // Table.testSubsetViaDapQuery(); @@ -1318,6 +1421,10 @@ public static void main(String args[]) throws Throwable { CWBrowserWW360 cwBrowserWW360; CWDataBrowser cwDataBrowser; dods.dap.DConnect dConnect; +dods.dap.DFloat64 dFloat64; +dods.dap.DInt16 dInt16; +dods.dap.DString dString; +dods.dap.parser.DASParser dasParser; DataHelper dh; DigirHelper dh2; dods.dap.DSequence dseq; @@ -1355,6 +1462,7 @@ public static void main(String args[]) throws Throwable { NcHelper ncHelper; NetCheck netCheck; OneOf oneOf; +OpendapHelper opendapHelper; ParseJSON parseJSON; PauseTest pt; PlainAxis2 sgtpa2; @@ -1471,6 +1579,7 @@ public static void main(String args[]) throws Throwable { //EDDTableFromMWFS eddtfm; EDDTableFromMultidimNcFiles eddtfmdnf; EDDTableFromNcFiles eddtfnf; +EDDTableFromNccsvFiles eddtfnccsvf; EDDTableFromNWISDV eddtfnwisdv; EDDTableFromOBIS eddtfo; //EDDTableFromPostDatabase eddtfpdb; @@ -1500,6 +1609,7 @@ public static void main(String args[]) throws Throwable { GridDataRandomAccessor gdracc; HtmlWidgets hw; LoadDatasets ld; +OpendapHelper oh; OutputStreamSource oss; OutputStreamFromHttpResponse osfhr; PersistentTable pert; @@ -1516,7 +1626,11 @@ public static void main(String args[]) throws Throwable { TableWriterGeoJson twgj; TableWriterHtmlTable twht; TableWriterJson twj; +TableWriterJsonl twjl; +TableWriterNccsv twn; TableWriterOrderBy twob; +TableWriterOrderByClosest twobc; +TableWriterOrderByLimit twobl; TableWriterOrderByMax twobm; TableWriterSeparatedValue twsv; TableWriterUnits twu; @@ -1552,7 +1666,7 @@ public static void main(String args[]) throws Throwable { //test that THREDDS is up (use ip name here, not numeric ip) try { OneOf.ensureDataServerIsUp( - "http://oceanwatch.pfeg.noaa.gov/thredds/catalog.html", + "https://oceanwatch.pfeg.noaa.gov/thredds/catalog.html", String2.split("ERD THREDDS Data Server`Satellite Datasets`HF Radio-derived Currents Datasets", '`'), true); @@ -1710,6 +1824,7 @@ public static void main(String args[]) throws Throwable { EDDTableFromNcFiles.test(true); //doGraphicsTests); //the best table graphics tests are always done EDDTableFromNcCFFiles.test(); EDDTableFromMultidimNcFiles.test(); + EDDTableFromNccsvFiles.test(); EDDTableFromHyraxFiles.test(); EDDTableFromAsciiFiles.test(false); //rarely: true=delete cached info EDDTableFromColumnarAsciiFiles.test(); @@ -1724,7 +1839,8 @@ public static void main(String args[]) throws Throwable { //EDDTableFromPostDatabase.test(); //INACTIVE. very slow? EDDTableFromCassandra.test(); //UPDATE nosCoops every 3 months: true, true, - // then copy /subset/nosCoops*.json files to coastwatch and UAF + // then copy /subset/nosCoops*.json files to coastwatch and UAF, + // and flag all the nosCoops datasets on coastwatch EDDTableFromAsciiServiceNOS.test(false, false); //makeSubsetFiles, reloadStationFiles EDDTableFromErddap.test(); //EDDTableFromMWFS.test(false); //doLongTest); //as of 2009-01-14 INACTIVE @@ -1761,7 +1877,7 @@ public static void main(String args[]) throws Throwable { } //don't run often //Opendap.threddsTunnelTest(10, //200 for a good test - // "http://oceanwatch.pfeg.noaa.gov/thredds/dodsC/satellite/CM/usfc/hday", + // "https://oceanwatch.pfeg.noaa.gov/thredds/dodsC/satellite/CM/usfc/hday", // "CMusfc"); } catch (Exception e) { String2.pressEnterToContinue(MustBe.throwableToString(e) + diff --git a/WEB-INF/classes/gov/noaa/pfel/coastwatch/TimePeriods.java b/WEB-INF/classes/gov/noaa/pfel/coastwatch/TimePeriods.java index 63cbcd6b9..39bdbe077 100644 --- a/WEB-INF/classes/gov/noaa/pfel/coastwatch/TimePeriods.java +++ b/WEB-INF/classes/gov/noaa/pfel/coastwatch/TimePeriods.java @@ -471,7 +471,7 @@ public static GregorianCalendar getEndCalendar(String timePeriod, * so use before calling this: *

          //fix old-style 25 and 33 hour end times  so 59:59
-         if (timePeriodNHours > 1 && timePeriodNHours % 24 != 0)
+         if (timePeriodNHours > 1 && timePeriodNHours % 24 != 0)
              cal.add(Calendar2.SECOND, -1);
      * 
* diff --git a/WEB-INF/classes/gov/noaa/pfel/coastwatch/TrajectoryScreen.java b/WEB-INF/classes/gov/noaa/pfel/coastwatch/TrajectoryScreen.java index 8c7645fd5..23350d706 100644 --- a/WEB-INF/classes/gov/noaa/pfel/coastwatch/TrajectoryScreen.java +++ b/WEB-INF/classes/gov/noaa/pfel/coastwatch/TrajectoryScreen.java @@ -337,7 +337,7 @@ public GraphDataLayer getMapGDL() { "0001-01-01", "3000-01-01", //data from all time new String[]{individualValue}, tDataVariables.toArray()); - //String2.log(table.toString("row", 10)); + //String2.log(table.toString(10)); //make the colorMap Color color = null; @@ -466,7 +466,7 @@ public GraphDataLayer getGraphGDL() { if (yColumnNumber < 0) Test.error("yVariableName=" + yVariableName + " not in table:" + String2.toCSSVString(table.getColumnNames())); - //String2.log(table.toString("row", 10)); + //String2.log(table.toString(10)); //make colorMap Color color = defaultLineColor; diff --git a/WEB-INF/classes/gov/noaa/pfel/coastwatch/griddata/DataHelper.java b/WEB-INF/classes/gov/noaa/pfel/coastwatch/griddata/DataHelper.java index 8020e49c2..e4f958a6a 100644 --- a/WEB-INF/classes/gov/noaa/pfel/coastwatch/griddata/DataHelper.java +++ b/WEB-INF/classes/gov/noaa/pfel/coastwatch/griddata/DataHelper.java @@ -18,14 +18,9 @@ import java.util.List; /** - * Get netcdf-X.X.XX.jar from - * http://www.unidata.ucar.edu/software/thredds/current/netcdf-java/index.html + * Get netcdfAll-......jar from ftp://ftp.unidata.ucar.edu/pub * and copy it to /WEB-INF/lib renamed as netcdf-latest.jar. - * Get slf4j-jdk14.jar from - * ftp://ftp.unidata.ucar.edu/pub/netcdf-java/slf4j-jdk14.jar - * and copy it to /WEB-INF/lib. - * 2013-02-21 new netcdfAll uses Java logging, not slf4j. - * Put both of these .jar files in the classpath for the compiler and for Java. + * Put it in the classpath for the compiler and for Java. */ import ucar.nc2.*; import ucar.nc2.dataset.NetcdfDataset; diff --git a/WEB-INF/classes/gov/noaa/pfel/coastwatch/griddata/FileNameUtility.java b/WEB-INF/classes/gov/noaa/pfel/coastwatch/griddata/FileNameUtility.java index db5acff1c..ba5189b4c 100644 --- a/WEB-INF/classes/gov/noaa/pfel/coastwatch/griddata/FileNameUtility.java +++ b/WEB-INF/classes/gov/noaa/pfel/coastwatch/griddata/FileNameUtility.java @@ -308,7 +308,7 @@ public static String convertDaveNameToCWBrowserName(String daveName) //must be 1 month; ensure start date is 1 and rawEnd date is last in same month GregorianCalendar rawEndGC = Calendar2.parseYYYYDDDZulu(daveName.substring(10, 17)); //throws Exception if trouble timePeriodIndex = TimePeriods.exactTimePeriod(TimePeriods.MONTHLY_OPTION); - Test.ensureEqual(startGC.get(Calendar2.YEAR), rawEndGC.get(Calendar2.YEAR), "Monthly file: Begin and end year not the same."); + Test.ensureEqual(Calendar2.getYear(startGC), Calendar2.getYear(rawEndGC), "Monthly file: Begin and end year not the same."); Test.ensureEqual(startGC.get(Calendar2.MONTH), rawEndGC.get(Calendar2.MONTH),"Monthly file: Begin and end month not the same."); Test.ensureEqual(startGC.get(Calendar2.DATE), 1, "Monthly file: Begin date isn't 1."); Test.ensureEqual(rawEndGC.get(Calendar2.DATE), rawEndGC.getActualMaximum(Calendar2.DATE), "Monthly file: End date isn't last date in month."); diff --git a/WEB-INF/classes/gov/noaa/pfel/coastwatch/griddata/GenerateThreddsXml.java b/WEB-INF/classes/gov/noaa/pfel/coastwatch/griddata/GenerateThreddsXml.java index 9112d06a7..0b625b259 100644 --- a/WEB-INF/classes/gov/noaa/pfel/coastwatch/griddata/GenerateThreddsXml.java +++ b/WEB-INF/classes/gov/noaa/pfel/coastwatch/griddata/GenerateThreddsXml.java @@ -89,7 +89,7 @@ public class GenerateThreddsXml { * * *

Very terse catalog.xml documentation is at - * http://www.unidata.ucar.edu/software/thredds/current/tds/catalog/InvCatalogSpec.html + * https://www.unidata.ucar.edu/software/thredds/current/tds/catalog/InvCatalogSpec.html *

The finished files are in, e.g., on otter * /opt/tomcat1/content/thredds/Satellite/aggregsatAG/ssta/catalog.xml *

Bob has some of Jerome/Yao's handmade files @@ -268,9 +268,9 @@ public static StringArray generateThreddsXml(String dataMainDir, String dataSubD //it is good if maxShow=MAX_TITLE_LENGTH, but more important that info fit without wrapping int maxShow = Math.min(60, MAX_TITLE_LENGTH); String2.log( - " boldTitle =" + (boldTitle == null || boldTitle.length() <= maxShow? boldTitle : boldTitle.substring( 0, maxShow) + " ...") + - "\n shortTitle=" + (shortTitle == null || shortTitle.length() <= maxShow? shortTitle : shortTitle.substring(0, maxShow) + " ...") + - "\n summary=" + (summary == null || summary.length() <= maxShow? summary : summary.substring( 0, maxShow) + " ...") + + " boldTitle =" + boldTitle + + "\n shortTitle=" + shortTitle + + "\n summary=" + summary + "\n units=" + units + "\n standardName=" + standardName + "\n category=" + category); @@ -296,7 +296,7 @@ public static StringArray generateThreddsXml(String dataMainDir, String dataSubD " \n" + //" \n" + + //" -->\n" + " \n" + "\n" + " \n"); @@ -475,13 +475,13 @@ public static StringArray generateThreddsXml(String dataMainDir, String dataSubD "urlPath=\"" + dataSubDir + twoFourTime + "\">\n" + " all\n" + //2006/10/11 I added netcdf tag: - //documentation: http://oceanwatch.pfeg.noaa.gov/thredds/docs/NcML.htm + //documentation: https://oceanwatch.pfeg.noaa.gov/thredds/docs/NcML.htm " \n" + " \n" + " \n" + //Eeek! Is dateFormatMark correct? it catches start date and treats composite end date as HHmmss //Is it even relevant? (I think it is for joinNew, not joinExisting) - //see http://www.unidata.ucar.edu/software/netcdf/ncml/v2.2/Aggregation.html + //see https://www.unidata.ucar.edu/software/netcdf/ncml/v2.2/Aggregation.html //10/11/06 let's try not having it " \n" + - " \n" + //add link to CWBrowsers? "\n" + @@ -558,7 +558,7 @@ public static StringArray generateThreddsXml(String dataMainDir, String dataSubD //DataSet.properties) often doesn't have an appropriate standardName! //Yao had non-standard name, e.g., Wind Stress. //Examples in - //http://www.unidata.ucar.edu/software/thredds/current/tds/catalog/InvCatalogSpec.html + //https://www.unidata.ucar.edu/software/thredds/current/tds/catalog/InvCatalogSpec.html // are like Yao's -- not strict CF, but closer -- just humanized versions. //Tests with thredds show that value = "" is fine with Thredds. "vocabulary_name=\"" + standardName + "\" " + diff --git a/WEB-INF/classes/gov/noaa/pfel/coastwatch/griddata/Grid.java b/WEB-INF/classes/gov/noaa/pfel/coastwatch/griddata/Grid.java index 080574cf9..f76750a11 100644 --- a/WEB-INF/classes/gov/noaa/pfel/coastwatch/griddata/Grid.java +++ b/WEB-INF/classes/gov/noaa/pfel/coastwatch/griddata/Grid.java @@ -39,14 +39,9 @@ import java.util.GregorianCalendar; /** - * Get netcdf-X.X.XX.jar from - * http://www.unidata.ucar.edu/software/thredds/current/netcdf-java/index.html + * Get netcdfAll-......jar from ftp://ftp.unidata.ucar.edu/pub * and copy it to /WEB-INF/lib renamed as netcdf-latest.jar. - * Get slf4j-jdk14.jar from - * ftp://ftp.unidata.ucar.edu/pub/netcdf-java/slf4j-jdk14.jar - * and copy it to /WEB-INF/lib. - * 2013-02-21 new netcdfAll uses Java logging, not slf4j. - * Put both of these .jar files in the classpath for the compiler and for Java. + * Put it in the classpath for the compiler and for Java. */ import ucar.ma2.*; import ucar.nc2.*; @@ -927,14 +922,10 @@ public void readGrd(String fullFileName) throws Exception { * *

.grd (GMT-style NetCDF) files are read with code in * netcdf-X.X.XX.jar which is part of the - * NetCDF Java Library * renamed as netcdf-latest.jar. - * Get slf4j-jdk14.jar from - * ftp://ftp.unidata.ucar.edu/pub/netcdf-java/slf4j-jdk14.jar - * and copy it to /WEB-INF/lib. - * 2013-02-21 new netcdfAll uses Java logging, not slf4j. - * Put both of these .jar files in the classpath for the compiler and for Java. + * Put it in the classpath for the compiler and for Java. * * @param fullFileName * @param desiredMinLon the minimum desired longitude. @@ -1919,13 +1910,9 @@ public void readNetCDF(String fullFileName, String dataName) throws Exception { * *

.nc files are read with code in * netcdf-X.X.XX.jar which is part of the - * NetCDF Java Library + * NetCDF Java Library * renamed as netcdf-latest.jar. - * Get slf4j-jdk14.jar from - * ftp://ftp.unidata.ucar.edu/pub/netcdf-java/slf4j-jdk14.jar - * and copy it to /WEB-INF/lib. - * 2013-02-21 new netcdfAll uses Java logging, not slf4j. - * Put both of these .jar files in the classpath for the compiler and for Java. + * Put it in the classpath for the compiler and for Java. * *

This sets globalAttributes, latAttributes, lonAttributes, * and dataAttributes. @@ -3192,6 +3179,7 @@ public void saveAsGrd(String directory, String name) throws Exception { //items determined by looking at a .grd file; items written in that order NetcdfFileWriter grd = NetcdfFileWriter.createNew( NetcdfFileWriter.Version.netcdf3, directory + randomInt); + boolean nc3Mode = true; boolean success = false; try { Group rootGroup = grd.addGroup(null, ""); @@ -4188,6 +4176,8 @@ public void saveAsNetCDF(String directory, String name, String dataName) throws //items determined by looking at a .nc file; items written in that order NetcdfFileWriter nc = NetcdfFileWriter.createNew( NetcdfFileWriter.Version.netcdf3, directory + randomInt); + boolean nc3Mode = true; + try { Group rootGroup = nc.addGroup(null, ""); nc.setFill(false); @@ -4292,7 +4282,7 @@ public void saveAsNetCDF(String directory, String name, String dataName) throws rootGroup.addAttribute(new Attribute("et_affine", NcHelper.get1DArray(matrix))); //float64[] {a, b, c, d, e, f} } else { - rootGroup.addAttribute(NcHelper.createAttribute(names[i], globalAttributes.get(names[i]))); + rootGroup.addAttribute(NcHelper.createAttribute(nc3Mode, names[i], globalAttributes.get(names[i]))); } } @@ -4322,15 +4312,15 @@ public void saveAsNetCDF(String directory, String name, String dataName) throws altitudeVar.addAttribute(new Attribute("_CoordinateZisPositive", "up")); //lat - NcHelper.setAttributes(latVar, latAttributes); + NcHelper.setAttributes(nc3Mode, latVar, latAttributes); latVar.addAttribute(new Attribute("axis", "Y")); //lon - NcHelper.setAttributes(lonVar, lonAttributes); + NcHelper.setAttributes(nc3Mode, lonVar, lonAttributes); lonVar.addAttribute(new Attribute("axis", "X")); //data - NcHelper.setAttributes(dataVar, dataAttributes); + NcHelper.setAttributes(nc3Mode, dataVar, dataAttributes); //leave "define" mode nc.create(); diff --git a/WEB-INF/classes/gov/noaa/pfel/coastwatch/griddata/GridDataSet.java b/WEB-INF/classes/gov/noaa/pfel/coastwatch/griddata/GridDataSet.java index a891515a2..b8437ebba 100644 --- a/WEB-INF/classes/gov/noaa/pfel/coastwatch/griddata/GridDataSet.java +++ b/WEB-INF/classes/gov/noaa/pfel/coastwatch/griddata/GridDataSet.java @@ -71,7 +71,7 @@ public abstract class GridDataSet { public int[] activeTimePeriodNHours; /** The active time period opendapUrl (or array of nulls if none available). - * E.g., http://oceanwatch.pfeg.noaa.gov/thredds/dodsC/satellite/MO/k490/hday + * E.g., https://oceanwatch.pfeg.noaa.gov/thredds/dodsC/satellite/MO/k490/hday * which just needs .html suffix to make link for user to click on. */ public String[] activeTimePeriodOpendapUrls; //set by the constructor diff --git a/WEB-INF/classes/gov/noaa/pfel/coastwatch/griddata/GridDataSetAnomaly.java b/WEB-INF/classes/gov/noaa/pfel/coastwatch/griddata/GridDataSetAnomaly.java index 2df53139d..119cc83d1 100644 --- a/WEB-INF/classes/gov/noaa/pfel/coastwatch/griddata/GridDataSetAnomaly.java +++ b/WEB-INF/classes/gov/noaa/pfel/coastwatch/griddata/GridDataSetAnomaly.java @@ -353,7 +353,7 @@ public static void test() throws Exception { //make the regular, climatology and anomaly datasets GridDataSet gridDataSet = new GridDataSetThredds(fnu, "TQSux10", - //was "http://oceanwatch.pfeg.noaa.gov/thredds/Satellite/aggregsatQS/ux10/", //was :8081 + //was "https://oceanwatch.pfeg.noaa.gov/thredds/Satellite/aggregsatQS/ux10/", //was :8081 "http://thredds1.pfeg.noaa.gov/thredds/Satellite/aggregsatQS/ux10/", "BlueWhiteRed", "Linear", "-10", "10", -1, "", null, null, "S", 1, 1, "", 1, 1); GridDataSetCWLocalClimatology climatologyDataSet = new GridDataSetCWLocalClimatology( diff --git a/WEB-INF/classes/gov/noaa/pfel/coastwatch/griddata/GridDataSetCWLocal.java b/WEB-INF/classes/gov/noaa/pfel/coastwatch/griddata/GridDataSetCWLocal.java index 63d8fac87..65bdedc3f 100644 --- a/WEB-INF/classes/gov/noaa/pfel/coastwatch/griddata/GridDataSetCWLocal.java +++ b/WEB-INF/classes/gov/noaa/pfel/coastwatch/griddata/GridDataSetCWLocal.java @@ -23,14 +23,9 @@ import java.util.Vector; /** - * Get netcdf-X.X.XX.jar from - * http://www.unidata.ucar.edu/software/thredds/current/netcdf-java/index.html + * Get netcdfAll-......jar from ftp://ftp.unidata.ucar.edu/pub * and copy it to /WEB-INF/lib renamed as netcdf-latest.jar. - * Get slf4j-jdk14.jar from - * ftp://ftp.unidata.ucar.edu/pub/netcdf-java/slf4j-jdk14.jar - * and copy it to /WEB-INF/lib. - * 2013-02-21 new netcdfAll uses Java logging, not slf4j. - * Put both of these .jar files in the classpath for the compiler and for Java. + * Put it in the classpath for the compiler and for Java. */ import ucar.nc2.*; import ucar.nc2.dataset.NetcdfDataset; diff --git a/WEB-INF/classes/gov/noaa/pfel/coastwatch/griddata/GridDataSetCWLocalClimatology.java b/WEB-INF/classes/gov/noaa/pfel/coastwatch/griddata/GridDataSetCWLocalClimatology.java index 0d866ab98..6f2920b71 100644 --- a/WEB-INF/classes/gov/noaa/pfel/coastwatch/griddata/GridDataSetCWLocalClimatology.java +++ b/WEB-INF/classes/gov/noaa/pfel/coastwatch/griddata/GridDataSetCWLocalClimatology.java @@ -25,14 +25,9 @@ import java.util.Vector; /** - * Get netcdf-X.X.XX.jar from - * http://www.unidata.ucar.edu/software/thredds/current/netcdf-java/index.html + * Get netcdfAll-......jar from ftp://ftp.unidata.ucar.edu/pub * and copy it to /WEB-INF/lib renamed as netcdf-latest.jar. - * Get slf4j-jdk14.jar from - * ftp://ftp.unidata.ucar.edu/pub/netcdf-java/slf4j-jdk14.jar - * and copy it to /WEB-INF/lib. - * 2013-02-21 new netcdfAll uses Java logging, not slf4j. - * Put both of these .jar files in the classpath for the compiler and for Java. + * Put it in the classpath for the compiler and for Java. */ import ucar.nc2.*; import ucar.nc2.dataset.NetcdfDataset; diff --git a/WEB-INF/classes/gov/noaa/pfel/coastwatch/griddata/GridDataSetOpendap.java b/WEB-INF/classes/gov/noaa/pfel/coastwatch/griddata/GridDataSetOpendap.java index 71b0b5653..d105cc494 100644 --- a/WEB-INF/classes/gov/noaa/pfel/coastwatch/griddata/GridDataSetOpendap.java +++ b/WEB-INF/classes/gov/noaa/pfel/coastwatch/griddata/GridDataSetOpendap.java @@ -32,14 +32,9 @@ import java.util.Vector; /** - * Get netcdf-X.X.XX.jar from - * http://www.unidata.ucar.edu/software/thredds/current/netcdf-java/index.html + * Get netcdfAll-......jar from ftp://ftp.unidata.ucar.edu/pub * and copy it to /WEB-INF/lib renamed as netcdf-latest.jar. - * Get slf4j-jdk14.jar from - * ftp://ftp.unidata.ucar.edu/pub/netcdf-java/slf4j-jdk14.jar - * and copy it to /WEB-INF/lib. - * 2013-02-21 new netcdfAll uses Java logging, not slf4j. - * Put both of these .jar files in the classpath for the compiler and for Java. + * Put it in the classpath for the compiler and for Java. */ import ucar.nc2.*; import ucar.nc2.dataset.NetcdfDataset; @@ -85,7 +80,7 @@ public class GridDataSetOpendap extends GridDataSet { * @param internalName TMOk490 * @param gridName e.g., Ok490; may be null or "" to use the first one in the datasource * @param title the title for the dataset (may be null or "" to use the title in the datasource metadata) - * @param baseUrl e.g., http://oceanwatch.pfeg.noaa.gov:8081/thredds/dodsC/satellite/QS/ux10/ + * @param baseUrl e.g., https://oceanwatch.pfeg.noaa.gov:8081/thredds/dodsC/satellite/QS/ux10/ * The dividing line between baseUrl and timePeriodUrls is up to you. * The time periodUrls are added to baseUrl to make the individual dataset urls. * @param timePeriodUrls e.g., 1day, 5day @@ -779,7 +774,7 @@ private Table getSuperTimeSeries(String newDir, double x, double y, */ public static void testGetTimeSeries() throws Exception { String2.log("\n*** start TestBrowsers.testGetTimeSeries"); - String url = "http://oceanwatch.pfeg.noaa.gov/thredds/dodsC/satellite/GA/ssta/"; //oceanwatch + String url = "https://oceanwatch.pfeg.noaa.gov/thredds/dodsC/satellite/GA/ssta/"; //oceanwatch DataHelper.verbose = true; GridDataSetOpendap.verbose = true; @@ -949,7 +944,7 @@ public static void test() throws Exception { //********************************************************************* internalName = "OQSux10"; sixName = internalName.substring(1); - String url = "http://oceanwatch.pfeg.noaa.gov/thredds/dodsC/satellite/QS/ux10/"; //oceanwatch + String url = "https://oceanwatch.pfeg.noaa.gov/thredds/dodsC/satellite/QS/ux10/"; //oceanwatch gridDataSet = new GridDataSetOpendap( internalName, "QSux10", null, url, new String[]{"1day", "3day", "4day", "8day", "14day", "mday"}, diff --git a/WEB-INF/classes/gov/noaa/pfel/coastwatch/griddata/GridDataSetThredds.java b/WEB-INF/classes/gov/noaa/pfel/coastwatch/griddata/GridDataSetThredds.java index 948175226..fe168d945 100644 --- a/WEB-INF/classes/gov/noaa/pfel/coastwatch/griddata/GridDataSetThredds.java +++ b/WEB-INF/classes/gov/noaa/pfel/coastwatch/griddata/GridDataSetThredds.java @@ -39,14 +39,9 @@ import org.w3c.dom.NodeList; /** - * Get netcdf-X.X.XX.jar from - * http://www.unidata.ucar.edu/software/thredds/current/netcdf-java/index.html + * Get netcdfAll-......jar from ftp://ftp.unidata.ucar.edu/pub * and copy it to /WEB-INF/lib renamed as netcdf-latest.jar. - * Get slf4j-jdk14.jar from - * ftp://ftp.unidata.ucar.edu/pub/netcdf-java/slf4j-jdk14.jar - * and copy it to /WEB-INF/lib. - * 2013-02-21 new netcdfAll uses Java logging, not slf4j. - * Put both of these .jar files in the classpath for the compiler and for Java. + * Put it in the classpath for the compiler and for Java. */ import ucar.nc2.*; import ucar.nc2.dataset.NetcdfDataset; @@ -145,9 +140,9 @@ public GridDataSetThredds(FileNameUtility fileNameUtility, String internalName, String baseBaseUrl = baseUrl.substring(0, threddsPo); /* -top level catalog is at http://oceanwatch.pfeg.noaa.gov:8081/thredds/catalog.xml -excerpt from http://oceanwatch.pfeg.noaa.gov:8081/thredds/Satellite/aggregsatMO/k490/catalog.xml -to see opendap server e.g., http://oceanwatch.pfeg.noaa.gov:8081/thredds/dodsC/satellite/MO/k490/1day.html +top level catalog is at https://oceanwatch.pfeg.noaa.gov/thredds/catalog.xml +excerpt from https://oceanwatch.pfeg.noaa.gov/thredds/Satellite/aggregsatMO/k490/catalog.xml +to see opendap server e.g., https://oceanwatch.pfeg.noaa.gov/thredds/dodsC/satellite/MO/k490/1day.html oceanwatch numerical ip from coastwatch computer is 192.168.31.13) Most datasets have 2 levels: @@ -234,8 +229,8 @@ public GridDataSetThredds(FileNameUtility fileNameUtility, String internalName, long tTime = System.currentTimeMillis(); //need to convert - // baseUrl "http://oceanwatch.pfeg.noaa.gov/thredds/Satellite/aggregsatMO/k490/" + "catalog.xml", - // to "http://oceanwatch.pfeg.noaa.gov/thredds/dodsC/" + "satellite/MO/k490/hday", + // baseUrl "https://oceanwatch.pfeg.noaa.gov/thredds/Satellite/aggregsatMO/k490/" + "catalog.xml", + // to "https://oceanwatch.pfeg.noaa.gov/thredds/dodsC/" + "satellite/MO/k490/hday", // just add ".html" to make user-friendly link dataSetUrl = baseBaseUrl + opendapServiceBase + urlPath; @@ -882,7 +877,7 @@ private Table getSuperTimeSeries(String newDir, double x, double y, public static void testGetTimeSeries() throws Exception { String2.log("\n*** start TestBrowsers.testGetTimeSeries"); String url = - //was "http://oceanwatch.pfeg.noaa.gov/thredds/Satellite/aggregsatGA/ssta/"; //oceanwatch + //was "https://oceanwatch.pfeg.noaa.gov/thredds/Satellite/aggregsatGA/ssta/"; //oceanwatch "http://thredds1.pfeg.noaa.gov/thredds/Satellite/aggregsatGA/ssta/"; DataHelper.verbose = true; @@ -1038,14 +1033,14 @@ public static void test() throws Exception { FileNameUtility.verbose = true; FileNameUtility fnu = new FileNameUtility("gov.noaa.pfel.coastwatch.CWBrowser"); -// String baseUrl = "http://oceanwatch.pfeg.noaa.gov:8081/thredds/Satellite/aggregsatMO/k490/"; +// String baseUrl = "https://oceanwatch.pfeg.noaa.gov/thredds/Satellite/aggregsatMO/k490/"; // String2.log("getUrlString for " + baseUrl + "catalog.xml"); // String2.log(SSR.getUrlResponseString(baseUrl + "catalog.xml")); // Opendap opendap = new Opendap( -// //"http://oceanwatch.pfeg.noaa.gov/thredds/Satellite/aggregsatMO/k490/hday", -// //"http://oceanwatch.pfeg.noaa.gov/thredds/Satellite/aggregsatMO/k490/hday", -// "http://oceanwatch.pfeg.noaa.gov/thredds/dodsC/satellite/MO/k490/hday.das", +// //"https://oceanwatch.pfeg.noaa.gov/thredds/Satellite/aggregsatMO/k490/hday", +// //"https://oceanwatch.pfeg.noaa.gov/thredds/Satellite/aggregsatMO/k490/hday", +// "https://oceanwatch.pfeg.noaa.gov/thredds/dodsC/satellite/MO/k490/hday.das", // acceptDeflate); //throws Exception if trouble // public GridDataSetThredds(FileNameUtility fileNameUtility, String internalName, String baseUrl, @@ -1073,7 +1068,7 @@ public static void test() throws Exception { internalName = "TJ1ugeo"; sixName = internalName.substring(1); gridDataSet = new GridDataSetThredds(fnu, internalName, - //was "http://oceanwatch.pfeg.noaa.gov/thredds/Satellite/aggregsatJ1/ugeo/", //was :8081 + //was "https://oceanwatch.pfeg.noaa.gov/thredds/Satellite/aggregsatJ1/ugeo/", //was :8081 "http://thredds1.pfeg.noaa.gov/thredds/Satellite/aggregsatJ1/ugeo/", "BlueWhiteRed", "Linear", "-10", "10", -1, "", null, null, "S", 1, 0, "", 1, 1); @@ -1217,7 +1212,7 @@ public static void test() throws Exception { //These mimic tests in Grid.testReadGrdSubset(). //Comment out this line with /* to comment out this test. gridDataSet = new GridDataSetThredds(fnu, "TMBchla", - //was "http://oceanwatch.pfeg.noaa.gov/thredds/Satellite/aggregsatMB/chla/", //was :8081 + //was "https://oceanwatch.pfeg.noaa.gov/thredds/Satellite/aggregsatMB/chla/", //was :8081 "http://thredds1.pfeg.noaa.gov/thredds/Satellite/aggregsatMB/chla/", "Rainbow", "Log", ".001", "30", -1, "", null, null, "S", 1, 0, "", 1, 1); fileName = "temp"; @@ -1307,7 +1302,7 @@ public static void test() throws Exception { //one time only: get one of these files for testing readGrd //actual dataset minX=120.0 maxX=320.0 minY=-45.0 maxY=65.0 xInc=0.025 yInc=0.025 gridDataSet = new GridDataSetThredds(fnu, "TMBchla", - "http://oceanwatch.pfeg.noaa.gov/thredds/Satellite/aggregsatMB/chla/", //was :8081 + "https://oceanwatch.pfeg.noaa.gov/thredds/Satellite/aggregsatMB/chla/", //was :8081 "Rainbow", "Log", ".001", "30", -1, "", null, null); fileName = "TestReadGrgTMBchla"; @@ -1345,7 +1340,7 @@ public static void quickTest(String twoName, String fourName) throws Exception { "gov/noaa/pfel/coastwatch/griddata/"; GridDataSetThredds gridDataSet = new GridDataSetThredds(fnu, internalName, - //was "http://oceanwatch.pfeg.noaa.gov/thredds/Satellite/aggregsat" + + //was "https://oceanwatch.pfeg.noaa.gov/thredds/Satellite/aggregsat" + "http://thredds1.pfeg.noaa.gov/thredds/Satellite/aggregsat" + twoName + "/" + fourName + "/", "BlueWhiteRed", "Linear", "-10", "10", -1, "", null, null, diff --git a/WEB-INF/classes/gov/noaa/pfel/coastwatch/griddata/NcHelper.java b/WEB-INF/classes/gov/noaa/pfel/coastwatch/griddata/NcHelper.java index 4dd942f5c..fbaa627c8 100644 --- a/WEB-INF/classes/gov/noaa/pfel/coastwatch/griddata/NcHelper.java +++ b/WEB-INF/classes/gov/noaa/pfel/coastwatch/griddata/NcHelper.java @@ -21,14 +21,9 @@ import java.util.List; /** - * Get netcdf-X.X.XX.jar from - * http://www.unidata.ucar.edu/software/thredds/current/netcdf-java/index.html + * Get netcdfAll-......jar from ftp://ftp.unidata.ucar.edu/pub * and copy it to /WEB-INF/lib renamed as netcdf-latest.jar. - * Get slf4j-jdk14.jar from - * ftp://ftp.unidata.ucar.edu/pub/netcdf-java/slf4j-jdk14.jar - * and copy it to /WEB-INF/lib. - * 2013-02-21 new netcdfAll uses Java logging, not slf4j. - * Put both of these .jar files in the classpath for the compiler and for Java. + * Put it in the classpath for the compiler and for Java. */ import ucar.nc2.*; import ucar.nc2.dataset.NetcdfDataset; @@ -75,8 +70,8 @@ public class NcHelper { */ public final static String StringLengthSuffix = "_strlen"; //pre 2012-06-05 was StringLength="StringLength"; - /** Since .nc files can store 16bit char or longs, those data types are stored as - * shorts or Strings and these messages are added to the attributes. + /** Since .nc files can't store 16bit char or longs, those data types are stored as + * shorts or doubles and these messages are added to the attributes. */ public final static String originally_a_CharArray = "originally a CharArray"; public final static String originally_a_LongArray = "originally a LongArray"; @@ -86,6 +81,15 @@ public class NcHelper { */ public final static int LONG_MAXSTRINGLENGTH = 20; + /** + * Tell netcdf-java to object if a file is truncated + * (e.g., didn't get completely copied over) + * See email from Christian Ward-Garrison Nov 2, 2016 + */ + static { + ucar.nc2.iosp.netcdf3.N3header.disallowFileTruncation = true; + } + /** * This generates a String with a dump of the contents of a netcdf file. * WARNING: if the file is big, this can be very slow. @@ -241,7 +245,9 @@ public static int get1DArrayLength(Array array) { long n; if (array instanceof ArrayChar.D2) { n = ((ArrayChar.D2)array).getShape()[0]; - } else n = array.getSize(); + } else { + n = array.getSize(); + } Test.ensureTrue(n < Integer.MAX_VALUE, String2.ERROR + " in NcHelper.getSize; n = " + n); return (int)n; //safe since checked above @@ -255,19 +261,39 @@ public static int get1DArrayLength(Array array) { * @param pa * @return an Attribute */ - public static Attribute createAttribute(String name, PrimitiveArray pa) { + public static Attribute createAttribute(boolean nc3Mode, String name, PrimitiveArray pa) { if (pa instanceof StringArray) { - //String2.log("***getAttribute nStrings=" + pa.size()); - String ts = Attributes.valueToNcString(pa); - //int maxLength = 32000; //pre 2010-10-13 8000 ok; 9000 not; now >32K ok; unclear what new limit is - //if (ts.length() > maxLength) - // ts = ts.substring(0, maxLength - 3) + "..."; - //String2.log("***getAttribute string=\"" + ts + "\""); - return new Attribute(name, ts); + if (nc3Mode) { + //String2.log("***getAttribute nStrings=" + pa.size()); + String ts = Attributes.valueToNcString(pa); + //int maxLength = 32000; //pre 2010-10-13 8000 ok; 9000 not; now >32K ok; unclear what new limit is + //if (ts.length() > maxLength) + // ts = ts.substring(0, maxLength - 3) + "..."; + //String2.log("***getAttribute string=\"" + ts + "\""); + return new Attribute(name, ts); + } else { + String s = ((StringArray)pa).toNewlineString(); + return new Attribute(name, s.length() == 0? "" : s.substring(0, s.length() - 1)); + } } - return new Attribute(name, get1DArray(pa.toObjectArray())); + return new Attribute(name, pa instanceof CharArray? + //pass all (Unicode) chars through unchanged + Array.factory(char.class, new int[]{pa.size()}, pa.toObjectArray()) : + get1DArray(pa.toObjectArray())); //this would convert chars to ISO_8859_1 } + /** + * This makes an ArrayString.D1 for use with netcdf-4. + */ + public static ArrayString.D1 getStringArrayD1(StringArray sa) { + int n = sa.size(); + ArrayString.D1 asd1 = new ArrayString.D1(n); + for (int i = 0; i < n; i++) + asd1.set(i, sa.get(i)); + return asd1; + } + + /** * This converts a String or array of primitives into a * ucar.nc2.ArrayXxx.D1. @@ -283,13 +309,24 @@ public static Array get1DArray(Object o) { //will be handled below } - if (o instanceof char[]) return Array.factory(char.class, new int[]{((char[])o).length}, o); + if (o instanceof char[]) { + //netcdf-java just writes low byte, so use String2.toIso88591Chars() + char[] car1 = (char[])o; + int n = car1.length; + char[] car2 = new char[n]; + for (int i = 0; i < n; i++) + car2[i] = String2.toIso88591Char(car1[i]); + return Array.factory(char.class, new int[]{n}, car2); + } + + if (o instanceof byte[]) return Array.factory(byte.class, new int[]{((byte[])o).length}, o); if (o instanceof short[]) return Array.factory(short.class, new int[]{((short[])o).length}, o); if (o instanceof int[]) return Array.factory(int.class, new int[]{((int[])o).length}, o); if (o instanceof long[]) { - o = (new LongArray((long[])o)).toStringArray(); - //then falls through to String[] handling + //String2.log("\n>> long values=" + String2.toCSSVString((long[])o)); + o = (new DoubleArray(new LongArray((long[])o))).toArray(); //then falls through to Double handling + //String2.log(">> as doubles=" + String2.toCSSVString((double[])o)); } if (o instanceof float[]) return Array.factory(float.class, new int[]{((float[])o).length}, o); if (o instanceof double[]) return Array.factory(double.class, new int[]{((double[])o).length}, o); @@ -305,7 +342,8 @@ public static Array get1DArray(Object o) { //String2.log("NcHelper.get1DArray String[] max=" + max); ArrayChar.D2 ac = new ArrayChar.D2(sar.length, max); for (int i = 0; i < sar.length; i++) { - ac.setString(i, sar[i]); + //setString just does low byte, so use String2.toIso88591String() + ac.setString(i, String2.toIso88591String(sar[i])); //String s = sar[i]; //int sLength = s.length(); //for (int po = 0; po < sLength; po++) @@ -400,7 +438,7 @@ public static Object getArray(Array nc2Array, boolean buildStringsFromChars) { ArrayObject ao = ((ArrayChar)nc2Array).make1DStringArray(); String sa[] = String2.toStringArray((Object[])ao.copyTo1DJavaArray()); for (int i = 0; i < sa.length; i++) - sa[i] = String2.canonical(sa[i]); + sa[i] = String2.canonical(String2.trimEnd(sa[i])); return sa; } @@ -446,22 +484,25 @@ public static Class getElementClass(DataType dataType) { /** * This converts an ElementType (e.g., int.class for integer primitives) - * into an netcdf DataType. + * into an netcdf-3 DataType. * BEWARE: .nc files store strings as char arrays, so * if variable.getRank()==1 it is a char variable, but - * if variable.getRang()==2 it is a String variable. + * if variable.getRang()==2 it is a String variable. [It isn't that simple!] * This throws Exception if elementClass not found. * - * @param elementClass the PrimitiveArray elementClass (e.g., int.class for integer primitives) + * @param elementClass the PrimitiveArray elementClass + * (e.g., int.class for integer primitives). + * longs are converted to doubles. * @return the corresponding netcdf dataType */ public static DataType getDataType(Class elementClass) { - if (elementClass == boolean.class) return DataType.BOOLEAN; + if (elementClass == boolean.class) return DataType.BOOLEAN; //? if (elementClass == byte.class) return DataType.BYTE; if (elementClass == char.class) return DataType.CHAR; if (elementClass == double.class) return DataType.DOUBLE; if (elementClass == float.class) return DataType.FLOAT; if (elementClass == int.class) return DataType.INT; + if (elementClass == long.class) return DataType.DOUBLE; // long -> double if (elementClass == short.class) return DataType.SHORT; if (elementClass == String.class) return DataType.STRING; //STRUCTURE not converted @@ -888,19 +929,20 @@ public static Variable findVariable(NetcdfFile netcdfFile, String variableName) /** * This adds global (group) attributes to a netcdf file's group. * - * @param rootGroup + * @param group usually the rootGroup * @param attributes the Attributes that will be set */ - public static void setAttributes(Group group, Attributes attributes) { + public static void setAttributes(boolean nc3Mode, Group group, Attributes attributes) { String names[] = attributes.getNames(); for (int ni = 0; ni < names.length; ni++) { String tName = names[ni]; - if (!String2.isSomething(tName)) + if (!String2.isSomething(tName) || + tName.equals("_NCProperties")) //If I write this, netcdf nc4 code later throws Exception when it writes its own version continue; PrimitiveArray tValue = attributes.get(tName); if (tValue == null || tValue.size() == 0 || tValue.toString().length() == 0) continue; //do nothing - group.addAttribute(createAttribute(tName, tValue)); + group.addAttribute(createAttribute(nc3Mode, tName, tValue)); } } @@ -910,7 +952,7 @@ public static void setAttributes(Group group, Attributes attributes) { * @param var e.g., from findVariable(netcdfFile, varName) * @param attributes the Attributes that will be set */ - public static void setAttributes(Variable var, Attributes attributes) { + public static void setAttributes(boolean nc3Mode, Variable var, Attributes attributes) { String names[] = attributes.getNames(); for (int ni = 0; ni < names.length; ni++) { String tName = names[ni]; @@ -919,7 +961,7 @@ public static void setAttributes(Variable var, Attributes attributes) { PrimitiveArray tValue = attributes.get(tName); if (tValue == null || tValue.size() == 0 || tValue.toString().length() == 0) continue; //do nothing - var.addAttribute(createAttribute(tName, tValue)); + var.addAttribute(createAttribute(nc3Mode, tName, tValue)); } } @@ -1147,8 +1189,8 @@ public static void unpackAttributes(Variable var, Attributes sAtts) { addPA != null? addPA.elementClass() : unsigned && oClass == byte.class? short.class : //similar code below unsigned && oClass == short.class? int.class : - unsigned && oClass == int.class? double.class : //longs are trouble - unsigned && oClass == long.class? double.class : + unsigned && oClass == int.class? double.class : //longs are converted to double + unsigned && oClass == long.class? double.class : //longs are converted to double oClass; if (sAtts.remove("_FillValue") != null) sAtts.set( "_FillValue", PrimitiveArray.factory(destClass, 1, "")); @@ -1264,8 +1306,8 @@ public static PrimitiveArray unpackPA(Variable var, PrimitiveArray dataPa, if (unsigned && tClass == dataPaClass) { if (tClass == byte.class) tClass = short.class; else if (tClass == short.class) tClass = int.class; - else if (tClass == int.class) tClass = double.class; //longs are trouble - else if (tClass == long.class) tClass = double.class; + else if (tClass == int.class) tClass = double.class; //longs are converted to doubles + else if (tClass == long.class) tClass = double.class; //longs are converted to doubles } //switch data type @@ -1342,7 +1384,7 @@ public static PrimitiveArray unpackPA(Variable var, PrimitiveArray dataPa, } //convert numeric time to epochSeconds - dataPa2.scaleAddOffset(baseFactor[1], baseFactor[0]); + dataPa2 = Calendar2.unitsSinceToEpochSeconds(baseFactor[0], baseFactor[1], dataPa2); if (debugMode) String2.log( ">> numeric time as epochSeconds: " + dataPa2.subset(0, 1, Math.min(10, dataPa2.size()-1)).toString()); @@ -1842,41 +1884,23 @@ public static String[] readColumnNames(String fullName) throws Exception { /** * This writes values to a 1D netcdf variable in a NetcdfFileWriter. - * This works with all PrimitiveArray types, but - *
LongArray is stored as a StringArray, so retrieve with + * This works with all PrimitiveArray types, but in nc3mode: + *
LongArray is stored as doubles, so retrieve with *
pa = new LongArray(pa), and - *
CharArray is stored as a ShortArray, so retrieve with - *
pa = new CharArray(((ShortArray)pa).toArray()). + *
CharArray is stored as chars (ISO-8859-1). * * @param netcdfFileWriter * @param variableName - * @param firstRow + * @param firstRow This is the origin/where to write this chunk of data + * within the complete var in the nc file. * @param pa will be converted to the appropriate numeric type * @throws Exception if trouble */ - public static void write(NetcdfFileWriter netcdfFileWriter, + public static void write(boolean nc3Mode, NetcdfFileWriter netcdfFileWriter, Variable var, int firstRow, PrimitiveArray pa) throws Exception { - write(netcdfFileWriter, + write(nc3Mode, netcdfFileWriter, var, new int[]{firstRow}, new int[]{pa.size()}, pa); - - } - - /** - * This returns a PrimitiveArray (usually the same one) that has a - * data type that is suitable for .nc files - * (LongArray becomes StringArray, CharArray becomes ShortArray). - * - * @param pa a PrimitiveArray - * @return a PrimitiveArray (usually the same one) that has a - * data type that is suitable for .nc files. - */ - public static PrimitiveArray getNcSafePA(PrimitiveArray pa) { - if (pa.elementClass() == char.class) - return new ShortArray(((CharArray)pa).toArray()); - if (pa.elementClass() == long.class) - return new StringArray(pa); - return pa; } /** @@ -1894,10 +1918,17 @@ public static PrimitiveArray getNcSafePA(PrimitiveArray pa) { * Don't include StringLength dimension. * @param pa the data to be written */ - public static void write(NetcdfFileWriter netcdfFileWriter, + public static void write(boolean nc3Mode, NetcdfFileWriter netcdfFileWriter, Variable var, int origin[], int shape[], PrimitiveArray pa) throws Exception { - pa = getNcSafePA(pa); + if (nc3Mode) { + if (pa.elementClass() == long.class) + pa = new DoubleArray(pa); + else if (pa.elementClass() == char.class) + pa = (new CharArray(pa)).toIso88591(); //netcdf-java just writes low byte + else if (pa.elementClass() == String.class) + pa = (new StringArray(pa)).toIso88591(); //netcdf-java just writes low byte + } if (pa instanceof StringArray) { netcdfFileWriter.writeStringData(var, origin, @@ -1983,7 +2014,7 @@ public static BitSet testRows(Variable testVariables[], double min[], double max /** * This writes the PrimitiveArrays into an .nc file. - * This works with all PrimitiveArray types, but some datatypes + * This works with all PrimitiveArray types, but some datatypes (chars and longs) * are specially encoded in the files and then automatically decoded when read. * * @param fullName for the file (This writes to an intermediate file then renames quickly.) @@ -2101,7 +2132,10 @@ else if (pas[var].elementClass() == String.class) /** * This reads the PAs in the .nc file. * This works with all PrimitiveArray types, but some datatypes - * are specially encoded in the files and then automatically decoded when read. + * (char, long, String) + * are specially encoded in the files and then automatically decoded when read, + * so that this fully supports 2byte chars, longs, and Unicode Strings + * (by encoding as json in file). * * @param fullName the name of the .nc file. * @param loadVarNames the names of the variables to load (null to get all) @@ -2563,7 +2597,7 @@ public static void testBasic() throws Throwable { DoubleArray da = new DoubleArray(new double[]{1.1, 2.2, 9.9, Double.NaN}); StringArray sa = new StringArray(); for (int i = 0; i < 65536; i++) - sa.add("a" + (char)i + "z"); + sa.add("a" + (i==8?" " : (char)i) + "z"); //backspace not saved //write to file fullName = "c:/temp/PAsInNc.nc"; File2.delete(fullName); //for test, make double sure it doesn't already exist diff --git a/WEB-INF/classes/gov/noaa/pfel/coastwatch/griddata/OQNux10S1day_20050712_x-135_X-105_y22_Y50.nc b/WEB-INF/classes/gov/noaa/pfel/coastwatch/griddata/OQNux10S1day_20050712_x-135_X-105_y22_Y50.nc index afcea0733879620cad76d7fa5f2e54c5d9ee0fcf..41f43348ad99d1bc8f2159c633c42c9965d9cdc5 100644 GIT binary patch delta 68 zcmccdl=;R}<_+mg?B==#rn&}}lk=El`JwD61|Sf2O`QCWPkQrKrZQ#$s6>dNg_Qvi LnQpdboBIX;tA`U9 delta 68 zcmccdl=;R}<_+mg>}I-#2D(P3lk=El`JwD61|Sf2O`QCWPkQrKrZQ#$s6>d7k(G(5 Mm4V@AYqq&>0H`Mt`v3p{ diff --git a/WEB-INF/classes/gov/noaa/pfel/coastwatch/griddata/Opendap.java b/WEB-INF/classes/gov/noaa/pfel/coastwatch/griddata/Opendap.java index 30600d34d..fd23917dd 100644 --- a/WEB-INF/classes/gov/noaa/pfel/coastwatch/griddata/Opendap.java +++ b/WEB-INF/classes/gov/noaa/pfel/coastwatch/griddata/Opendap.java @@ -1155,7 +1155,7 @@ public static int findFirstGE(PrimitiveVector pv, double x) { /** * This connects to the opendapUrl and gets the dataDds from the query. * - * @param opendapUrl e.g., "http://oceanwatch.pfeg.noaa.gov:8081/thredds/dodsC/satellite/AG/ssta/3day" + * @param opendapUrl e.g., "https://oceanwatch.pfeg.noaa.gov:8081/thredds/dodsC/satellite/AG/ssta/3day" * @param query e.g., "?CMusfc.CMusfc[0:1:0][0:1:0][0:1:20][0:1:20]", already percentEncoded as needed * @param doAsciiTestToo * @throws Exception if trouble @@ -1246,7 +1246,7 @@ private static void doSimpleSpeedTests(String satelliteUrl, boolean doDotTestToo public static void doOceanWatchSpeedTests(boolean doDotTestToo, boolean doAsciiTestToo) throws Exception { System.out.println("\nOpendap.doOceanWatchSpeedTests"); try { - doSimpleSpeedTests("http://oceanwatch.pfeg.noaa.gov/thredds/dodsC/satellite/", + doSimpleSpeedTests("https://oceanwatch.pfeg.noaa.gov/thredds/dodsC/satellite/", doDotTestToo, doAsciiTestToo); } catch (Exception e) { String2.log(MustBe.throwableToString(e)); @@ -1408,7 +1408,7 @@ public static void main(String args[]) throws Exception { //test THREDDS //was :8081 - opendap = new Opendap("http://oceanwatch.pfeg.noaa.gov/thredds/dodsC/satellite/GA/ssta/3day", true, null); + opendap = new Opendap("https://oceanwatch.pfeg.noaa.gov/thredds/dodsC/satellite/GA/ssta/3day", true, null); DConnect dConnect = new DConnect(opendap.url, opendap.acceptDeflate, 1, 1); opendap.getGridInfo(dConnect.getDAS(OpendapHelper.DEFAULT_TIMEOUT), dConnect.getDDS(OpendapHelper.DEFAULT_TIMEOUT), "GAssta", "-1.0e34"); @@ -1421,7 +1421,7 @@ public static void main(String args[]) throws Exception { //test THREDDS opendap = new Opendap( //was :8081 - "http://oceanwatch.pfeg.noaa.gov/thredds/dodsC/satellite/AG/ssta/3day", + "https://oceanwatch.pfeg.noaa.gov/thredds/dodsC/satellite/AG/ssta/3day", true, null); dConnect = new DConnect(opendap.url, opendap.acceptDeflate, 1, 1); opendap.getGridInfo(dConnect.getDAS(60000), dConnect.getDDS(60000), "AGssta", "-1.0e34"); diff --git a/WEB-INF/classes/gov/noaa/pfel/coastwatch/griddata/OpendapDump.java b/WEB-INF/classes/gov/noaa/pfel/coastwatch/griddata/OpendapDump.java index 442f5de9f..79df9b0a0 100644 --- a/WEB-INF/classes/gov/noaa/pfel/coastwatch/griddata/OpendapDump.java +++ b/WEB-INF/classes/gov/noaa/pfel/coastwatch/griddata/OpendapDump.java @@ -34,8 +34,8 @@ public static void main(String args[]) throws Exception { boolean verbose = true; boolean acceptDeflate = true; String urlName = - //"http://data.nodc.noaa.gov/cgi-bin/nph-dods/pathfinder/Version5.0/5day/1990/1990001-1990005.s0451pfv50-sst-16b.hdf"; - //"http://data.nodc.noaa.gov/cgi-bin/nph-dods/pathfinder/Version5.0/Monthly/1985/198501.m04m1pfv50-qual.hdf"; + //"https://data.nodc.noaa.gov/cgi-bin/nph-dods/pathfinder/Version5.0/5day/1990/1990001-1990005.s0451pfv50-sst-16b.hdf"; + //"https://data.nodc.noaa.gov/cgi-bin/nph-dods/pathfinder/Version5.0/Monthly/1985/198501.m04m1pfv50-qual.hdf"; "http://las.pfeg.noaa.gov/cgi-bin/nph-dods/data/oceanwatch/nrt/gac/AG1day.nc"; String expr = //"?qual[0:2:20][0:2:20]"; diff --git a/WEB-INF/classes/gov/noaa/pfel/coastwatch/griddata/OpendapHelper.java b/WEB-INF/classes/gov/noaa/pfel/coastwatch/griddata/OpendapHelper.java index 293acbc2c..16e6071b7 100644 --- a/WEB-INF/classes/gov/noaa/pfel/coastwatch/griddata/OpendapHelper.java +++ b/WEB-INF/classes/gov/noaa/pfel/coastwatch/griddata/OpendapHelper.java @@ -255,6 +255,7 @@ else if (type == Attribute.INT16 || attributes.set(name.trim(), pa); } } + attributes.fromNccsvStrings(); } /** @@ -486,7 +487,7 @@ public static PrimitiveArray[] getPrimitiveArrays(DConnect dConnect, String quer * @throws Exception if trouble */ public static PrimitiveArray[] getPrimitiveArrays(BaseType baseType) throws Exception { - //String2.log(" baseType=" + baseType.getTypeName()); + //String2.log(">> baseType=" + baseType.getTypeName()); if (baseType instanceof DGrid) { ArrayList al = String2.toArrayList( ((DGrid)baseType).getVariables() ); //enumeration -> arraylist PrimitiveArray paAr[] = new PrimitiveArray[al.size()]; @@ -517,7 +518,7 @@ public static PrimitiveArray[] getPrimitiveArrays(BaseType baseType) throws Exce return new PrimitiveArray[]{ new ByteArray( new byte[] {(byte)(((DBoolean)baseType).getValue()? 1 : 0)})}; } else if (baseType instanceof DString) { -String2.log(" baseType is DString=" + String2.toJson(((DString)baseType).getValue())); + //String2.log(">> baseType is DString=" + String2.toJson(((DString)baseType).getValue())); return new PrimitiveArray[]{ new StringArray(new String[]{((DString)baseType).getValue()})}; } else { @@ -692,21 +693,21 @@ else throw new Exception(String2.ERROR + "in OpendapHelper.getPrimitiveVector: T * PrimitiveArray's type. * *

Some Java types don't have exact matches. The closest match is returned, - * e.g., short and char become int, long becomes double + * e.g., char becomes String, long becomes double * * @param c the Java type class e.g., float.class * @return the corresponding atomic-type String * @throws Exception if trouble */ public static String getAtomicType(Class c) throws Exception { - if (c == long.class || //imperfect; there will be loss of precision + if (c == long.class || // DAP has no long. This is imperfect; there will be loss of precision c == double.class) return "Float64"; if (c == float.class) return "Float32"; if (c == int.class) return "Int32"; - if (c == short.class || - c == char.class) return "Int16"; + if (c == short.class) return "Int16"; if (c == byte.class) return "Byte"; - if (c == String.class) return "String"; + if (c == char.class || // DAP has no char, so represent it as a String + c == String.class) return "String"; throw new Exception(String2.ERROR + "in OpendapHelper.getAtomicType: The classType=" + PrimitiveArray.elementClassToString(c) + " is not supported."); } @@ -754,6 +755,7 @@ public static StringBuilder dasToStringBuilder(String varName, Attributes attrib StringBuilder sb = new StringBuilder(); //see EOL definition for comments about it + int firstUEncodedChar = encodeAsHTML? 65536 : 127; sb.append(" " + XML.encodeAsHTML(varName, encodeAsHTML) + " {" + EOL); String names[] = attributes.getNames(); for (int ni = 0; ni < names.length; ni++) { @@ -761,24 +763,28 @@ public static StringBuilder dasToStringBuilder(String varName, Attributes attrib Class et = pa.elementClass(); sb.append(XML.encodeAsHTML(" " + getAtomicType(et) + " " + names[ni] + " ", encodeAsHTML)); int paSize = pa.size(); - if (et == String.class) { + if (et == char.class || et == String.class) { //enquote, and replace internal quotes with \" - for (int pai = 0; pai < paSize; pai++) { - String ts = pa.getString(pai); - if (encodeAsHTML) { - ts = String2.noLongLinesAtSpace(ts, 78, ""); - if (ts.indexOf('\n') >= 0) - sb.append('\n'); //start on new line, so first line isn't super long - } - sb.append(XML.encodeAsHTML( - "\"" + String2.replaceAll(ts, "\"", "\\\"") + "\"", encodeAsHTML)); - sb.append(pai < paSize - 1 ? ", " : ""); + String ts = String2.toSVString(pa.toStringArray(), "\n", false); + if (encodeAsHTML) { + ts = String2.noLongLinesAtSpace(ts, 78, ""); + if (ts.indexOf('\n') >= 0) + sb.append('\n'); //start on new line, so first line isn't super long } - } else if (et == double.class) { + //DAP 2.0 appendix A says \ becomes \\ and " becomes \" + //2017-05-05 I considered toJson, but DASParser doesn't like e.g., \\uhhhh + //ts = String2.toJson(ts, firstUEncodedChar, false), + // encodeAsHTML)); + ts = String2.replaceAll(ts, "\\", "\\\\"); + ts = "\"" + String2.replaceAll(ts, "\"", "\\\"") + "\""; + //String2.log(">> ts=" + ts); + sb.append(XML.encodeAsHTML(ts, encodeAsHTML)); + } else if (et == double.class || + et == long.class) { //the spec says must be like Ansi C printf, %g format, precision=6 - //I couldn't get Jikes to compile String.format. for (int pai = 0; pai < paSize; pai++) { String ts = "" + pa.getDouble(pai); + //if (et==long.class) String2.log(">> Opendap long att #" + pai + " = " + pa.getString(pai) + " => " + ts); ts = String2.replaceAll(ts, "E-", "e-"); //do first ts = String2.replaceAll(ts, "E", "e+"); sb.append(ts + @@ -786,9 +792,13 @@ public static StringBuilder dasToStringBuilder(String varName, Attributes attrib (pai < paSize - 1 ? ", " : "")); } } else if (et == float.class) { - for (int pai = 0; pai < paSize; pai++) - sb.append(pa.getFloat(pai) + - (pai < paSize - 1 ? ", " : "")); + for (int pai = 0; pai < paSize; pai++) { + String ts = "" + pa.getFloat(pai); + ts = String2.replaceAll(ts, "E-", "e-"); //do first + ts = String2.replaceAll(ts, "E", "e+"); + sb.append(ts + + (pai < paSize - 1 ? ", " : "")); + } } else { sb.append(pa.toString()); } @@ -1190,7 +1200,7 @@ public static void testFindAllScalarOrMultiDimVars() throws Throwable { //***** test of NODC template dataset String2.log("\n*** test of NODC template dataset"); - url = "http://data.nodc.noaa.gov/thredds/dodsC/testdata/netCDFTemplateExamples/timeSeries/BodegaMarineLabBuoyCombined.nc"; + url = "https://data.nodc.noaa.gov/thredds/dodsC/testdata/netCDFTemplateExamples/timeSeries/BodegaMarineLabBuoyCombined.nc"; dConnect = new DConnect(url, true, 1, 1); dds = dConnect.getDDS(DEFAULT_TIMEOUT); results = String2.toCSSVString(findAllScalarOrMultiDimVars(dds)); @@ -1270,6 +1280,7 @@ public static void allDapToNc(String dapUrl, String fullFileName) throws Throwab NetcdfFileWriter ncOut = NetcdfFileWriter.createNew(NetcdfFileWriter.Version.netcdf3, fullFileName + randomInt); + boolean nc3Mode = true; try { Group rootGroup = ncOut.addGroup(null, ""); @@ -1411,13 +1422,13 @@ public static void allDapToNc(String dapUrl, String fullFileName) throws Throwab } //write data variable attributes in ncOut - NcHelper.setAttributes(newVars[v], varAtts); + NcHelper.setAttributes(nc3Mode, newVars[v], varAtts); } //write global attributes in ncOut Attributes gAtts = new Attributes(); getAttributes(das, "GLOBAL", gAtts); - NcHelper.setAttributes(rootGroup, gAtts); + NcHelper.setAttributes(nc3Mode, rootGroup, gAtts); //leave "define" mode in ncOut ncOut.create(); @@ -1483,7 +1494,7 @@ public static void allDapToNc(String dapUrl, String fullFileName) throws Throwab * @param whichTests -1 for all, or 0.. for specific ones */ public static void testAllDapToNc(int whichTests) throws Throwable { - //tests from nodc template examples http://www.nodc.noaa.gov/data/formats/netcdf/ + //tests from nodc template examples https://www.nodc.noaa.gov/data/formats/netcdf/ String2.log("\n*** OpendapHelper.testAllDapToNc(" + whichTests + ")"); String dir = "c:/data/nodcTemplates/"; String fileName; @@ -1493,15 +1504,14 @@ public static void testAllDapToNc(int whichTests) throws Throwable { try { //this tests numeric scalars, and numeric and String 1D arrays fileName = "pointKachemakBay.nc"; - url = "http://data.nodc.noaa.gov/thredds/dodsC/testdata/netCDFTemplateExamples/point/KachemakBay.nc"; + url = "https://data.nodc.noaa.gov/thredds/dodsC/testdata/netCDFTemplateExamples/point/KachemakBay.nc"; allDapToNc(url, dir + fileName); results = NcHelper.dds(dir + fileName); String2.log(results); //expected = "zztop"; //Test.ensureEqual(results, expected, ""); } catch (Throwable t) { - String2.pressEnterToContinue(MustBe.throwableToString(t) + - "\n2016-09-15 This dataset is gone! Fix this!"); + String2.pressEnterToContinue(MustBe.throwableToString(t)); } } @@ -1509,7 +1519,7 @@ public static void testAllDapToNc(int whichTests) throws Throwable { try { //this tests numeric and String scalars, and numeric 1D arrays fileName = "timeSeriesBodegaMarineLabBuoy.nc"; - url = "http://data.nodc.noaa.gov/thredds/dodsC/testdata/netCDFTemplateExamples/timeSeries/BodegaMarineLabBuoy.nc"; + url = "https://data.nodc.noaa.gov/thredds/dodsC/testdata/netCDFTemplateExamples/timeSeries/BodegaMarineLabBuoy.nc"; allDapToNc(url, dir + fileName); results = NcHelper.dds(dir + fileName); expected = @@ -1542,8 +1552,7 @@ public static void testAllDapToNc(int whichTests) throws Throwable { "}\n"; Test.ensureEqual(results, expected, "results=\n" + results); } catch (Throwable t) { - String2.pressEnterToContinue(MustBe.throwableToString(t) + - "\n2016-09-15 This dataset is gone! Fix this!"); + String2.pressEnterToContinue(MustBe.throwableToString(t)); } } @@ -1551,7 +1560,7 @@ public static void testAllDapToNc(int whichTests) throws Throwable { try { //this tests numeric scalars, and grids fileName = "trajectoryAoml_tsg.nc"; - url = "http://data.nodc.noaa.gov/thredds/dodsC/testdata/netCDFTemplateExamples/trajectory/aoml_tsg.nc"; + url = "https://data.nodc.noaa.gov/thredds/dodsC/testdata/netCDFTemplateExamples/trajectory/aoml_tsg.nc"; allDapToNc(url, dir + fileName); results = NcHelper.dds(dir + fileName); String2.log(results); @@ -1591,8 +1600,7 @@ public static void testAllDapToNc(int whichTests) throws Throwable { "}\n"; Test.ensureEqual(results, expected, ""); } catch (Throwable t) { - String2.pressEnterToContinue(MustBe.throwableToString(t) + - "\n2016-09-15 This dataset is gone! Fix this!"); + String2.pressEnterToContinue(MustBe.throwableToString(t)); } } @@ -1601,7 +1609,7 @@ public static void testAllDapToNc(int whichTests) throws Throwable { try { //this tests numeric scalars, and byte/numeric arrays fileName = "trajectoryJason2_satelliteAltimeter.nc"; - url = "http://data.nodc.noaa.gov/thredds/dodsC/testdata/netCDFTemplateExamples/trajectory/jason2_satelliteAltimeter.nc"; + url = "https://data.nodc.noaa.gov/thredds/dodsC/testdata/netCDFTemplateExamples/trajectory/jason2_satelliteAltimeter.nc"; allDapToNc(url, dir + fileName); results = NcHelper.dds(dir + fileName); String2.log(results); @@ -1631,14 +1639,13 @@ public static void testAllDapToNc(int whichTests) throws Throwable { "}\n"; Test.ensureEqual(results, expected, ""); } catch (Throwable t) { - String2.pressEnterToContinue(MustBe.throwableToString(t) + - "\n2016-09-15 This dataset is gone! Fix this!"); + String2.pressEnterToContinue(MustBe.throwableToString(t)); } } /* if (whichTests == -1 || whichTests == 4) { //JDAP fails to read/parse the .dds: -//Exception in thread "main" com.cohort.util.SimpleException: Error while getting DDS from http://data.nodc.noaa.gov/thredds/dodsC/testdata/ne +//Exception in thread "main" com.cohort.util.SimpleException: Error while getting DDS from https://data.nodc.noaa.gov/thredds/dodsC/testdata/ne //tCDFTemplateExamples/profile/wodObservedLevels.nc.dds . // //Parse Error on token: String @@ -1649,7 +1656,7 @@ public static void testAllDapToNc(int whichTests) throws Throwable { // at gov.noaa.pfel.coastwatch.TestAll.main(TestAll.java:741) //this tests numeric scalars, and numeric and string arrays fileName = "profileWodObservedLevels.nc"; - url = "http://data.nodc.noaa.gov/thredds/dodsC/testdata/netCDFTemplateExamples/profile/wodObservedLevels.nc"; + url = "https://data.nodc.noaa.gov/thredds/dodsC/testdata/netCDFTemplateExamples/profile/wodObservedLevels.nc"; allDapToNc(url, dir + fileName); results = NcHelper.dumpString(dir + fileName, false); String2.log(results); @@ -1661,7 +1668,7 @@ public static void testAllDapToNc(int whichTests) throws Throwable { try { //this tests numeric scalars, and numeric arrays fileName = "timeSeriesProfileUsgs_internal_wave_timeSeries.nc"; - url = "http://data.nodc.noaa.gov/thredds/dodsC/testdata/netCDFTemplateExamples/timeSeriesProfile/usgs_internal_wave_timeSeries.nc"; + url = "https://data.nodc.noaa.gov/thredds/dodsC/testdata/netCDFTemplateExamples/timeSeriesProfile/usgs_internal_wave_timeSeries.nc"; allDapToNc(url, dir + fileName); results = NcHelper.dds(dir + fileName); String2.log(results); @@ -1689,8 +1696,7 @@ public static void testAllDapToNc(int whichTests) throws Throwable { "}\n"; Test.ensureEqual(results, expected, ""); } catch (Throwable t) { - String2.pressEnterToContinue(MustBe.throwableToString(t) + - "\n2016-09-15 This dataset is gone! Fix this!"); + String2.pressEnterToContinue(MustBe.throwableToString(t)); } } @@ -1855,6 +1861,7 @@ public static void dapToNc(String dapUrl, String varNames[], String projection, File2.makeDirectory(File2.getDirectory(fullFileName)); NetcdfFileWriter ncOut = NetcdfFileWriter.createNew( NetcdfFileWriter.Version.netcdf3, fullFileName + randomInt); + boolean nc3Mode = true; try { Group rootGroup = ncOut.addGroup(null, ""); @@ -1996,7 +2003,7 @@ public static void dapToNc(String dapUrl, String varNames[], String projection, //write global attributes in ncOut Attributes tAtts = new Attributes(); getAttributes(das, "GLOBAL", tAtts); - NcHelper.setAttributes(rootGroup, tAtts); + NcHelper.setAttributes(nc3Mode, rootGroup, tAtts); //write dimension attributes in ncOut if (isDGrid) { @@ -2004,7 +2011,7 @@ public static void dapToNc(String dapUrl, String varNames[], String projection, String dimName = dims.get(d).getName(); tAtts.clear(); getAttributes(das, dimName, tAtts); - NcHelper.setAttributes(newDimVars[d], tAtts); + NcHelper.setAttributes(nc3Mode, newDimVars[d], tAtts); } } @@ -2012,7 +2019,7 @@ public static void dapToNc(String dapUrl, String varNames[], String projection, for (int v = 0; v < nVars; v++) { if (varNames[v] == null) continue; - NcHelper.setAttributes(newVars[v], varAtts[v]); + NcHelper.setAttributes(nc3Mode, newVars[v], varAtts[v]); } //leave "define" mode in ncOut @@ -2555,7 +2562,8 @@ public static void testDapToNcDGrid() throws Throwable { " :Conventions = \"COARDS, CF-1.6, ACDD-1.3\";\n" + " :creator_email = \"erd.data@noaa.gov\";\n" + " :creator_name = \"NOAA NMFS SWFSC ERD\";\n" + -" :creator_url = \"http://www.pfeg.noaa.gov\";\n" + +" :creator_type = \"institution\";\n" + +" :creator_url = \"https://www.pfeg.noaa.gov\";\n" + " :date_created = \"2010-07-02Z\";\n" + " :date_issued = \"2010-07-02Z\";\n" + " :defaultGraphQuery = \"&.draw=vectors\";\n" + @@ -2574,10 +2582,10 @@ public static void testDapToNcDGrid() throws Throwable { " :geospatial_vertical_units = \"m\";\n" + " :history = \"Remote Sensing Systems, Inc.\n" + "2010-07-02T15:36:22Z NOAA CoastWatch (West Coast Node) and NOAA SFSC ERD\n" + -today + "T"; // + time " http://oceanwatch.pfeg.noaa.gov/thredds/dodsC/satellite/QS/ux10/mday\n" + +today + "T"; // + time " https://oceanwatch.pfeg.noaa.gov/thredds/dodsC/satellite/QS/ux10/mday\n" + //today + " http://coastwatch.pfeg.noaa.gov/erddap/griddap/erdQSwindmday.das\";\n" + String expected2 = -" :infoUrl = \"http://coastwatch.pfeg.noaa.gov/infog/QS_ux10_las.html\";\n" + +" :infoUrl = \"https://coastwatch.pfeg.noaa.gov/infog/QS_ux10_las.html\";\n" + " :institution = \"NOAA NMFS SWFSC ERD\";\n" + " :keywords = \"altitude, atmosphere,\n" + "Atmosphere > Atmospheric Winds > Surface Winds,\n" + @@ -2601,7 +2609,8 @@ public static void testDapToNcDGrid() throws Throwable { " :projection_type = \"mapped\";\n" + " :publisher_email = \"erd.data@noaa.gov\";\n" + " :publisher_name = \"NOAA NMFS SWFSC ERD\";\n" + -" :publisher_url = \"http://www.pfeg.noaa.gov\";\n" + +" :publisher_type = \"institution\";\n" + +" :publisher_url = \"https://www.pfeg.noaa.gov\";\n" + " :references = \"RSS Inc. Winds: http://www.remss.com/ .\";\n" + " :satellite = \"QuikSCAT\";\n" + " :sensor = \"SeaWinds\";\n" + @@ -2772,7 +2781,8 @@ public static void testDapToNcDGrid() throws Throwable { " :Conventions = \"COARDS, CF-1.6, ACDD-1.3\";\n" + " :creator_email = \"erd.data@noaa.gov\";\n" + " :creator_name = \"NOAA NMFS SWFSC ERD\";\n" + -" :creator_url = \"http://www.pfeg.noaa.gov\";\n" + +" :creator_type = \"institution\";\n" + +" :creator_url = \"https://www.pfeg.noaa.gov\";\n" + " :date_created = \"2010-07-02Z\";\n" + " :date_issued = \"2010-07-02Z\";\n" + " :defaultGraphQuery = \"&.draw=vectors\";\n" + @@ -2791,10 +2801,10 @@ public static void testDapToNcDGrid() throws Throwable { " :geospatial_vertical_units = \"m\";\n" + " :history = \"Remote Sensing Systems, Inc.\n" + "2010-07-02T15:36:22Z NOAA CoastWatch (West Coast Node) and NOAA SFSC ERD\n" + -today + "T"; //time http://oceanwatch.pfeg.noaa.gov/thredds/dodsC/satellite/QS/ux10/mday\n" + +today + "T"; //time https://oceanwatch.pfeg.noaa.gov/thredds/dodsC/satellite/QS/ux10/mday\n" + //today + time " http://coastwatch.pfeg.noaa.gov/erddap/griddap/erdQSwindmday.das\";\n" + expected2 = -" :infoUrl = \"http://coastwatch.pfeg.noaa.gov/infog/QS_ux10_las.html\";\n" + +" :infoUrl = \"https://coastwatch.pfeg.noaa.gov/infog/QS_ux10_las.html\";\n" + " :institution = \"NOAA NMFS SWFSC ERD\";\n" + " :keywords = \"altitude, atmosphere,\n" + "Atmosphere > Atmospheric Winds > Surface Winds,\n" + @@ -2818,7 +2828,8 @@ public static void testDapToNcDGrid() throws Throwable { " :projection_type = \"mapped\";\n" + " :publisher_email = \"erd.data@noaa.gov\";\n" + " :publisher_name = \"NOAA NMFS SWFSC ERD\";\n" + -" :publisher_url = \"http://www.pfeg.noaa.gov\";\n" + +" :publisher_type = \"institution\";\n" + +" :publisher_url = \"https://www.pfeg.noaa.gov\";\n" + " :references = \"RSS Inc. Winds: http://www.remss.com/ .\";\n" + " :satellite = \"QuikSCAT\";\n" + " :sensor = \"SeaWinds\";\n" + @@ -2949,13 +2960,13 @@ public static void testParseStartStrideStop() { public static void test() throws Throwable{ String2.log("\n*** OpendapHelper.test..."); -/* +/* */ testGetAttributes(); testParseStartStrideStop(); testFindVarsWithSharedDimensions(); testFindAllScalarOrMultiDimVars(); testDapToNcDArray(); - */ testDapToNcDGrid(); + testDapToNcDGrid(); testAllDapToNc(-1); //-1 for all tests, or 0.. for specific test String2.log("\n***** OpendapHelper.test finished successfully"); diff --git a/WEB-INF/classes/gov/noaa/pfel/coastwatch/griddata/SaveOpendap.java b/WEB-INF/classes/gov/noaa/pfel/coastwatch/griddata/SaveOpendap.java index a888c78e3..c72e6edb9 100644 --- a/WEB-INF/classes/gov/noaa/pfel/coastwatch/griddata/SaveOpendap.java +++ b/WEB-INF/classes/gov/noaa/pfel/coastwatch/griddata/SaveOpendap.java @@ -19,14 +19,9 @@ import java.util.List; /** - * Get netcdf-X.X.XX.jar from - * http://www.unidata.ucar.edu/software/thredds/current/netcdf-java/index.html + * Get netcdfAll-......jar from ftp://ftp.unidata.ucar.edu/pub * and copy it to /WEB-INF/lib renamed as netcdf-latest.jar. - * Get slf4j-jdk14.jar from - * ftp://ftp.unidata.ucar.edu/pub/netcdf-java/slf4j-jdk14.jar - * and copy it to /WEB-INF/lib. - * 2013-02-21 new netcdfAll uses Java logging, not slf4j. - * Put both of these .jar files in the classpath for the compiler and for Java. + * Put it in the classpath for the compiler and for Java. */ import ucar.nc2.*; import ucar.nc2.dataset.NetcdfDataset; @@ -418,13 +413,13 @@ public static void test() throws Exception { /* doesn't work yet //test an opendap sequence (see Table.testConvert) name = "sequence.nc"; - asNc("http://oceanwatch.pfeg.noaa.gov/opendap/GLOBEC/GLOBEC_bottle?t0,oxygen&month=\"5\"", + asNc("https://oceanwatch.pfeg.noaa.gov/opendap/GLOBEC/GLOBEC_bottle?t0,oxygen&month=\"5\"", dir + name); //String outName = testDir + "convert.nc"; //convert(inName, READ_OPENDAP_SEQUENCE, outName, SAVE_AS_NC, "row", false); //Table table = new Table(); //table.readFlatNc(outName, null, 0); //should be already unpacked - //String2.log(table.toString("row", 3)); + //String2.log(table.toString(3)); //Test.ensureEqual(table.nColumns(), 2, ""); //Test.ensureEqual(table.nRows(), 190, ""); //Test.ensureEqual(table.getColumnName(0), "t0", ""); diff --git a/WEB-INF/classes/gov/noaa/pfel/coastwatch/griddata/TwoGrids.java b/WEB-INF/classes/gov/noaa/pfel/coastwatch/griddata/TwoGrids.java index 5afa8404e..4f457e8a5 100644 --- a/WEB-INF/classes/gov/noaa/pfel/coastwatch/griddata/TwoGrids.java +++ b/WEB-INF/classes/gov/noaa/pfel/coastwatch/griddata/TwoGrids.java @@ -39,14 +39,9 @@ import java.util.GregorianCalendar; /** - * Get netcdf-X.X.XX.jar from - * http://www.unidata.ucar.edu/software/thredds/current/netcdf-java/index.html + * Get netcdfAll-......jar from ftp://ftp.unidata.ucar.edu/pub * and copy it to /WEB-INF/lib renamed as netcdf-latest.jar. - * Get slf4j-jdk14.jar from - * ftp://ftp.unidata.ucar.edu/pub/netcdf-java/slf4j-jdk14.jar - * and copy it to /WEB-INF/lib. - * 2013-02-21 new netcdfAll uses Java logging, not slf4j. - * Put both of these .jar files in the classpath for the compiler and for Java. + * Put it in the classpath for the compiler and for Java. */ import ucar.ma2.*; import ucar.nc2.*; @@ -346,6 +341,7 @@ public static void saveAsNetCDF(Grid grid1, Grid grid2, NetcdfFileWriter nc = NetcdfFileWriter.createNew( NetcdfFileWriter.Version.netcdf3, directory + randomInt + ".nc"); + boolean nc3Mode = true; try { Group rootGroup = nc.addGroup(null, ""); nc.setFill(false); @@ -410,7 +406,7 @@ public static void saveAsNetCDF(Grid grid1, Grid grid2, NcHelper.get1DArray(matrix))); //float64[] {a, b, c, d, e, f} } else { rootGroup.addAttribute( - NcHelper.createAttribute(names[i], grid1.globalAttributes().get(names[i]))); + NcHelper.createAttribute(nc3Mode, names[i], grid1.globalAttributes().get(names[i]))); } } @@ -435,18 +431,18 @@ public static void saveAsNetCDF(Grid grid1, Grid grid2, altitudeVar.addAttribute(new Attribute("_CoordinateZisPositive", "up")); //lat - NcHelper.setAttributes(latVar, grid1.latAttributes()); + NcHelper.setAttributes(nc3Mode, latVar, grid1.latAttributes()); latVar.addAttribute(new Attribute("axis", "Y")); //lon - NcHelper.setAttributes(lonVar, grid1.lonAttributes()); + NcHelper.setAttributes(nc3Mode, lonVar, grid1.lonAttributes()); lonVar.addAttribute(new Attribute("axis", "X")); //data1 - NcHelper.setAttributes(data1Var, grid1.dataAttributes()); + NcHelper.setAttributes(nc3Mode, data1Var, grid1.dataAttributes()); //data2 - NcHelper.setAttributes(data2Var, grid2.dataAttributes()); + NcHelper.setAttributes(nc3Mode, data2Var, grid2.dataAttributes()); //leave "define" mode nc.create(); diff --git a/WEB-INF/classes/gov/noaa/pfel/coastwatch/netcheck/HttpTest.java b/WEB-INF/classes/gov/noaa/pfel/coastwatch/netcheck/HttpTest.java index 7d1e3683f..7d9437a0d 100644 --- a/WEB-INF/classes/gov/noaa/pfel/coastwatch/netcheck/HttpTest.java +++ b/WEB-INF/classes/gov/noaa/pfel/coastwatch/netcheck/HttpTest.java @@ -228,7 +228,7 @@ public static void unitTest() throws Exception { String2.log("\n*** netcheck.HttpTest"); long time = System.currentTimeMillis(); HttpTest httpTest = new HttpTest("OceanWatch LAS", - //"http://oceanwatch.pfeg.noaa.gov/"); //old + //"https://oceanwatch.pfeg.noaa.gov/"); //old "http://las.pfeg.noaa.gov/oceanWatch/oceanwatch_safari.php"); //new (8/16/06) httpTest.addResponseMustInclude("AVHRR"); httpTest.addResponseMustNotInclude("ZZTop"); diff --git a/WEB-INF/classes/gov/noaa/pfel/coastwatch/netcheck/NetCheck.java b/WEB-INF/classes/gov/noaa/pfel/coastwatch/netcheck/NetCheck.java index cfaaede6d..c39af433b 100644 --- a/WEB-INF/classes/gov/noaa/pfel/coastwatch/netcheck/NetCheck.java +++ b/WEB-INF/classes/gov/noaa/pfel/coastwatch/netcheck/NetCheck.java @@ -37,18 +37,13 @@ *

A log file will be created with the name of the xml file + ".log". * *

For Opendap, this program uses Java DAP 1.1.7. - * See http://www.opendap.org/download/index.html for more information. + * See https://www.opendap.org/download/index.html for more information. * The .java and .class files for this are in the classes/dods directory. * *

For reading/writing .nc files, - * get netcdf-X.X.XX.jar from - * http://www.unidata.ucar.edu/software/thredds/current/netcdf-java/index.html + * get netcdfAll-......jar from ftp://ftp.unidata.ucar.edu/pub * and copy it to /WEB-INF/lib renamed as netcdf-latest.jar. - * Get slf4j-jdk14.jar from - * ftp://ftp.unidata.ucar.edu/pub/netcdf-java/slf4j-jdk14.jar - * and copy it to /WEB-INF/lib. - * 2013-02-21 new netcdfAll uses Java logging, not slf4j. - * Put both of these .jar files in the classpath for the compiler and for Java. + * Put it in the classpath for the compiler and for Java. * *

To send emails, this program uses the JavaMail API * and the JavaBeans Activation Framework extension or JAF (javax.activation). diff --git a/WEB-INF/classes/gov/noaa/pfel/coastwatch/netcheck/OpendapTest.java b/WEB-INF/classes/gov/noaa/pfel/coastwatch/netcheck/OpendapTest.java index 25a64725a..08127a71a 100644 --- a/WEB-INF/classes/gov/noaa/pfel/coastwatch/netcheck/OpendapTest.java +++ b/WEB-INF/classes/gov/noaa/pfel/coastwatch/netcheck/OpendapTest.java @@ -307,7 +307,7 @@ public static void unitTest() throws Exception { time = System.currentTimeMillis(); opendapTest = new OpendapTest( "THREDDS OPeNDAP AGssta", // - "http://oceanwatch.pfeg.noaa.gov/thredds/dodsC/satellite/AG/ssta/3day", // + "https://oceanwatch.pfeg.noaa.gov/thredds/dodsC/satellite/AG/ssta/3day", // "AGssta", // "-1.0e32", // "1970-01-01", // @@ -327,7 +327,7 @@ public static void unitTest() throws Exception { time = System.currentTimeMillis(); opendapTest = new OpendapTest( "THREDDS OPeNDAP CMusfc", // - "http://oceanwatch.pfeg.noaa.gov/thredds/dodsC/satellite/CM/usfc/hday", // + "https://oceanwatch.pfeg.noaa.gov/thredds/dodsC/satellite/CM/usfc/hday", // "CMusfc", // "-1.0e32", // "1970-01-01", // @@ -347,7 +347,7 @@ public static void unitTest() throws Exception { time = System.currentTimeMillis(); opendapTest = new OpendapTest( "THREDDS OPeNDAP GAssta", // - "http://oceanwatch.pfeg.noaa.gov/thredds/dodsC/satellite/GA/ssta/hday", // + "https://oceanwatch.pfeg.noaa.gov/thredds/dodsC/satellite/GA/ssta/hday", // "GAssta", // "-1.0e32", // "1970-01-01", // @@ -367,7 +367,7 @@ public static void unitTest() throws Exception { time = System.currentTimeMillis(); opendapTest = new OpendapTest( "THREDDS OPeNDAP MBchla", // - "http://oceanwatch.pfeg.noaa.gov/thredds/dodsC/satellite/MB/chla/1day", // + "https://oceanwatch.pfeg.noaa.gov/thredds/dodsC/satellite/MB/chla/1day", // "MBchla", // "-1.0e32", // "1970-01-01", // @@ -388,7 +388,7 @@ public static void unitTest() throws Exception { time = System.currentTimeMillis(); opendapTest = new OpendapTest( "THREDDS OPeNDAP QScurl", // - "http://oceanwatch.pfeg.noaa.gov/thredds/dodsC/satellite/QS/curl/8day", // + "https://oceanwatch.pfeg.noaa.gov/thredds/dodsC/satellite/QS/curl/8day", // "QScurl", // "-1.0e32", // "1970-01-01", // diff --git a/WEB-INF/classes/gov/noaa/pfel/coastwatch/netcheck/Touch.java b/WEB-INF/classes/gov/noaa/pfel/coastwatch/netcheck/Touch.java index 02be6944e..7dd8e3e06 100644 --- a/WEB-INF/classes/gov/noaa/pfel/coastwatch/netcheck/Touch.java +++ b/WEB-INF/classes/gov/noaa/pfel/coastwatch/netcheck/Touch.java @@ -32,7 +32,7 @@ public class Touch { * @param urlEnd the end of the url (already percentEncoded as needed) */ private static void tThredds(String urlEnd) throws Exception { - String base = "http://oceanwatch.pfeg.noaa.gov:8081/thredds/dodsC/"; + String base = "https://oceanwatch.pfeg.noaa.gov/thredds/dodsC/"; String find = "Dataset {"; String2.log(" touching " + base + urlEnd); String response = SSR.getUrlResponseString(base + urlEnd); @@ -232,7 +232,7 @@ public static void thredds() throws Exception { */ public static void getPiscoUrls() throws Exception { //get the main catlog - String cat = SSR.getUrlResponseString("http://oceanwatch.pfeg.noaa.gov/thredds/catalog.html"); + String cat = SSR.getUrlResponseString("https://oceanwatch.pfeg.noaa.gov/thredds/catalog.html"); //String2.log(cat); //extract all pisco urls diff --git a/WEB-INF/classes/gov/noaa/pfel/coastwatch/pointdata/CacheOpendapStation.java b/WEB-INF/classes/gov/noaa/pfel/coastwatch/pointdata/CacheOpendapStation.java index 30df4ac06..e6e5e07f4 100644 --- a/WEB-INF/classes/gov/noaa/pfel/coastwatch/pointdata/CacheOpendapStation.java +++ b/WEB-INF/classes/gov/noaa/pfel/coastwatch/pointdata/CacheOpendapStation.java @@ -28,14 +28,9 @@ import java.util.Random; /** - * Get netcdf-X.X.XX.jar from - * http://www.unidata.ucar.edu/software/thredds/current/netcdf-java/index.html + * Get netcdfAll-......jar from ftp://ftp.unidata.ucar.edu/pub * and copy it to /WEB-INF/lib renamed as netcdf-latest.jar. - * Get slf4j-jdk14.jar from - * ftp://ftp.unidata.ucar.edu/pub/netcdf-java/slf4j-jdk14.jar - * and copy it to /WEB-INF/lib. - * 2013-02-21 new netcdfAll uses Java logging, not slf4j. - * Put both of these .jar files in the classpath for the compiler and for Java. + * Put it in the classpath for the compiler and for Java. */ import ucar.nc2.*; import ucar.nc2.dataset.NetcdfDataset; @@ -391,7 +386,7 @@ public boolean createNewCache() { String2.log(" post read table nRows=" + table.nRows() + " nCols=" + table.nColumns()); - String2.log(table.toString("row", 3)); + String2.log(table.toString(3)); //print column data ranges for (int col = 0; col < table.nColumns(); col++) String2.log("col=" + col + " " + table.getColumn(col).statsString()); @@ -684,7 +679,7 @@ public boolean createNewCache() { String2.log(" post read table nRows=" + table.nRows() + " nCols=" + table.nColumns()); - String2.log(table.toString("row", 3)); + String2.log(table.toString(3)); //print column data ranges for (int col = 0; col < table.nColumns(); col++) String2.log("col=" + col + " " + table.getColumn(col).statsString()); @@ -893,7 +888,7 @@ public boolean updateCache() { table.saveAs4DNc(fullStationFileName, 0, 1, 2, 3, null, null, null); nNewRows = table.nRows() - oldNRows; if (verbose) { - //String2.log(table.toString("row", 3)); + //String2.log(table.toString(3)); //print column data ranges //for (int col = 0; col < table.nColumns(); col++) // String2.log("col=" + col + " " + table.getColumn(col).statsString()); @@ -1123,7 +1118,7 @@ public static void test() throws Exception { //***THE TEST WILL CHANGE IF THEY THROW OUT OLD NRT DATA. table.clear(); table.read4DNc(fileName, null, 1, null, -1); - //String2.log(table.toString("row", 10)); + //String2.log(table.toString(10)); Test.ensureEqual(table.nColumns(), 6, ""); Test.ensureEqual(table.getColumnName(0), "longitude", ""); //was adcp_longitude Test.ensureEqual(table.getColumnName(1), "latitude", ""); //was adcp_latitude @@ -1192,7 +1187,7 @@ public static void test() throws Exception { //M1: compare first part of cache file to ascii response table.clear(); table.read4DNc(fileName, null, 1); - //String2.log(table.toString("row", 10)); + //String2.log(table.toString(10)); Test.ensureEqual(table.nColumns(), 6, ""); Test.ensureEqual(table.getColumnName(0), "adcp_longitude", ""); Test.ensureEqual(table.getColumnName(1), "adcp_latitude", ""); @@ -1257,7 +1252,7 @@ public static void test() throws Exception { //M2: compare first part of cache file to ascii response table.clear(); table.read4DNc(fileName, null, 1); - //String2.log(table.toString("row", 10)); + //String2.log(table.toString(10)); Test.ensureEqual(table.nColumns(), 6, ""); Test.ensureEqual(table.getColumnName(0), "adcp_longitude", ""); Test.ensureEqual(table.getColumnName(1), "adcp_latitude", ""); diff --git a/WEB-INF/classes/gov/noaa/pfel/coastwatch/pointdata/DigirHelper.java b/WEB-INF/classes/gov/noaa/pfel/coastwatch/pointdata/DigirHelper.java index 4325007b4..6167f8461 100644 --- a/WEB-INF/classes/gov/noaa/pfel/coastwatch/pointdata/DigirHelper.java +++ b/WEB-INF/classes/gov/noaa/pfel/coastwatch/pointdata/DigirHelper.java @@ -454,7 +454,7 @@ public static Table getMetadataTable(String url, String version) throws Exceptio validate, "/response/content/metadata/provider/resource", null, true); //simplify if (reallyVerbose) String2.log("DigirHelper.getMetadataTable, first 3 rows:\n" + - table.toString("row", 3)); + table.toString(3)); return table; @@ -477,7 +477,7 @@ public static void testGetMetadata() throws Exception { if (false) { //this used to work and probably still does; but I have stopped testing rutgers because it is often down. table = getMetadataTable(RUTGERS_OBIS_URL, OBIS_VERSION); - String2.log("metadata table=" + table.toString("row", 10)); + String2.log("metadata table=" + table.toString(10)); Test.ensureTrue(table.nRows() >= 142, "nRows=" + table.nRows()); Test.ensureEqual(table.getColumnName(0), "name", ""); Test.ensureEqual(table.getColumnName(1), "code", ""); @@ -507,7 +507,7 @@ public static void testGetMetadata() throws Exception { // 0 IndOBIS, India indobis Vishwas Chavan Scientist vs.chavan@ncl. 91 20 2590 248 Asavari Navlak Technical Offi ar.navlakhe@nc 91 20 2590 248 IndOBIS (India Indian Ocean, Chavan, VIshwa http://digir.n sciname 41880 2007-06-21T02: 3 100 10000 // 1 Biological Col NIOCOLLECTION Achuthankutty, Coordinator, B achu@nio.org http://digir.n sciname 803 2006-11-03 3 10000 10000 table = getMetadataTable(IND_OBIS_URL, OBIS_VERSION); - String2.log("metadata table=" + table.toString("row", 10)); + String2.log("metadata table=" + table.toString(10)); Test.ensureTrue(table.nRows() >= 2, "nRows=" + table.nRows()); Test.ensureEqual(table.getColumnName(0), "name", ""); Test.ensureEqual(table.getColumnName(1), "code", ""); @@ -527,7 +527,7 @@ public static void testGetMetadata() throws Exception { // 1 Benthic fauna pechorasea http://www.mar Dahle, Salve Data owner sd@akvaplan.ni +47-(0)77-75 0 Cochrane, Sabi Coordinator Bi sc@akvaplan.ni+47-777-50327 Quantitative s Release with p http://www.iob http://digir.n PechoraSea O 1324 2004-09-02 18: 0 1000 10000 Denisenko, Sta dest@unitel.sp Benthic fauna // 2 N3 data of Kie n3data http://www.mar Rumohr, Heye hrumohr@ifm-ge +49-(0)431-600 Release with p http://www.iob http://digir.n N3Data O 8944 2005-11-22 17: 0 1000 10000 Benthic fauna, table = getMetadataTable(FLANDERS_OBIS_URL, OBIS_VERSION); - String2.log("metadata table=" + table.toString("row", 10)); + String2.log("metadata table=" + table.toString(10)); Test.ensureTrue(table.nRows() >= 37, "nRows=" + table.nRows()); Test.ensureEqual(table.getColumnName(0), "name", ""); Test.ensureEqual(table.getColumnName(1), "code", ""); @@ -1876,7 +1876,7 @@ public static void testObis() throws Exception { // 2 VLIZ Tisbe 415428 Abietinaria ab 1.95 51.23 NaN // 3 VLIZ Tisbe 562956 Abietinaria ab 1.62 50.77 NaN - String results = table.dataToCSVString(); + String results = table.dataToString(); String expected = "darwin:InstitutionCode,darwin:CollectionCode,darwin:CatalogNumber,darwin:ScientificName,darwin:Longitude,darwin:Latitude,obis:Temperature\n" + "VLIZ,Tisbe,405183,Abietinaria abietina,-20.0,46.0,\n" + @@ -2186,8 +2186,8 @@ public static void testOpendapStyleObis() throws Exception { } } catch (Exception e) { - String2.pressEnterToContinue( - "UNEXPECTED ERROR: " + MustBe.throwableToString(e)); + String2.pressEnterToContinue(MustBe.throwableToString(e) + + "\nUnexpected error."); } } @@ -2204,7 +2204,7 @@ public static void testObisAbietinariaTable(Table table) { "darwin:ScientificName, obis:Temperature", ""); - String results = table.dataToCSVString(); + String results = table.dataToString(); String expected = //pre 2010-07-27 was 7 rows "LON,LAT,DEPTH,TIME,ID,darwin:InstitutionCode,darwin:CollectionCode,darwin:ScientificName,obis:Temperature\n" + diff --git a/WEB-INF/classes/gov/noaa/pfel/coastwatch/pointdata/NdbcMetStation.java b/WEB-INF/classes/gov/noaa/pfel/coastwatch/pointdata/NdbcMetStation.java index c0863ec41..39f270439 100644 --- a/WEB-INF/classes/gov/noaa/pfel/coastwatch/pointdata/NdbcMetStation.java +++ b/WEB-INF/classes/gov/noaa/pfel/coastwatch/pointdata/NdbcMetStation.java @@ -33,14 +33,9 @@ import java.util.Vector; /** - * Get netcdf-X.X.XX.jar from - * http://www.unidata.ucar.edu/software/thredds/current/netcdf-java/index.html + * Get netcdfAll-......jar from ftp://ftp.unidata.ucar.edu/pub * and copy it to /WEB-INF/lib renamed as netcdf-latest.jar. - * Get slf4j-jdk14.jar from - * ftp://ftp.unidata.ucar.edu/pub/netcdf-java/slf4j-jdk14.jar - * and copy it to /WEB-INF/lib. - * 2013-02-21 new netcdfAll uses Java logging, not slf4j. - * Put both of these .jar files in the classpath for the compiler and for Java. + * Put it in the classpath for the compiler and for Java. */ import ucar.nc2.*; import ucar.nc2.dataset.NetcdfDataset; @@ -195,8 +190,8 @@ * *

The .nc files created by this class NO LONGER EXACTLY follow the * Unidata Observation Dataset Convention (see - * http://www.unidata.ucar.edu/software/netcdf-java/formats/UnidataObsConvention.html). - * Observation Dataset Conventions + * https://www.unidata.ucar.edu/software/netcdf-java/formats/UnidataObsConvention.html). + * Observation Dataset Conventions because the Coventions have been abandoned. * * @author Bob Simons (bob.simons@noaa.gov) 2005-12-22 */ @@ -212,9 +207,9 @@ public class NdbcMetStation { * and near real time data (less quality controlled). * This changes every month when I get the latest historical data. */ - public static String firstNearRealTimeData = "2016-10-01T00:00:00"; + public static String firstNearRealTimeData = "2017-04-01T00:00:00"; /** Change current year ~Feb 28 when Jan historical files become available. */ - public static String HISTORICAL_FILES_CURRENT_YEAR = "2016"; + public static String HISTORICAL_FILES_CURRENT_YEAR = "2017"; public final static String ID_NAME = "ID"; @@ -513,7 +508,8 @@ public static Table readStationTxt(String fileName, String lines[], //read the data into the table Table table = new Table(); table.allowRaggedRightInReadASCII = true; - table.readASCII(fileName, lines, columnNamesLine, dataStartLine, null, null, null, null, true); + table.readASCII(fileName, lines, columnNamesLine, dataStartLine, "", + null, null, null, null, true); int nRows = table.nRows(); //convert YY (byte) (in some files) to YYYY (short) @@ -738,7 +734,20 @@ public static Table readStationTxt(String fileName, String lines[], } } - //if (verbose) String2.log("table at end of readStationText:\n" + table.toString("row", 5)); + //remove rows where time is NaN (station 42361, minutes=MM, should be 30: +//2016 12 03 01 MM MM MM MM MM MM MM MM 1015.2 MM MM MM MM -0.4 MM +//2016 12 03 00 MM MM MM MM MM MM MM MM 1015.2 MM MM MM MM -0.0 MM +//2016 12 02 23 MM MM MM MM MM MM MM MM 1013.9 MM MM MM MM -1.7 MM +//2016 12 02 22 MM MM MM MM MM MM MM MM 1015.6 MM MM MM MM -0.3 MM +//2016 12 02 21 MM MM MM MM MM MM MM MM 1015.2 MM MM MM MM -1.4 MM + int on = table.nRows(); + int tn = table.oneStepApplyConstraint(0, metColumnNames[timeIndex], "!=", "NaN"); + if (on != tn) { + String2.log("\n*** removed " + (on-tn) + " rows with time=NaN!"); + Math2.sleep(5000); + } + + //if (verbose) String2.log("table at end of readStationText:\n" + table.toString(5)); //return the table return table; @@ -1415,7 +1424,8 @@ public static void makeStationNcFile(String ndbcStationHtmlDir, double lat = Double.NaN; double lon = Double.NaN; String location = lines[ownerLine + 3]; - if (stationName.equals("46108")) { lat = 59.760; lon = -152.090; + if (stationName.equals("41117")) { lat = 30.000; lon = -81.080; + } else if (stationName.equals("46108")) { lat = 59.760; lon = -152.090; } else if (stationName.equals("42097")) { lat = 25.7; lon = -83.65; } else if (stationName.equals("44089")) { lat = 37.756; lon = -75.334; } else if (stationName.equals("44090")) { lat = 41.840; lon = -70.329; @@ -1665,7 +1675,7 @@ public static void makeStationNcFile(String ndbcStationHtmlDir, //save as UPPERCASE-name .nc file String id0 = cumulative.getColumn(idIndex).getString(0); - //String2.log("\nbefore save\n" + cumulative.toString("row", 5)); + //String2.log("\nbefore save\n" + cumulative.toString(5)); Test.ensureEqual(id0.length(), 5, "ID length should be 5: " + id0); Test.ensureTrue(((StringArray)cumulative.getColumn(idIndex)).maxStringLength() <= 5, "ID maxlength should be <= 5"); cumulative.saveAs4DNcWithStringVariable(ndbcNcDir + "NDBC_" + stdStationName + "_met.nc", @@ -1777,7 +1787,7 @@ public static void makeSeparateNcFiles(String ndbcStationHtmlDir, //!lcStationName.equals(ignoreStationsBefore) || //this following lines are standard stations to avoid - //unless otherwise marked: no station info (checked 2012-02-26) + //unless otherwise marked: no station info (checked 2016-11-26) //e.g., http://www.ndbc.noaa.gov/station_page.php?station=4h362 lcStationName.equals("32st1") || lcStationName.equals("32st2") || @@ -1787,6 +1797,8 @@ public static void makeSeparateNcFiles(String ndbcStationHtmlDir, lcStationName.equals("42a01") || lcStationName.equals("42a02") || lcStationName.equals("42a03") || + lcStationName.equals("46067") || + lcStationName.equals("46074") || lcStationName.equals("46a35") || lcStationName.equals("46a54") || lcStationName.equals("47072") || @@ -1807,6 +1819,7 @@ public static void makeSeparateNcFiles(String ndbcStationHtmlDir, lcStationName.equals("4h902") || lcStationName.equals("51wh1") || lcStationName.equals("51wh2") || + lcStationName.equals("b040z") || lcStationName.equals("misma") || lcStationName.equals("plsfa") || lcStationName.equals("mnmm4")) { //bad info in some files @@ -2006,7 +2019,7 @@ public static void download45DayTxtFiles(String ndbc45DayTxtDir) throws Exceptio public static void displayNc(String fullFileName, int showFirstNRows) throws Exception { Table table = new Table(); table.read4DNc(fullFileName, null, 0, ID_NAME, 4); //0 to force looking at what is actually there, without unpacking - String2.log(fullFileName + "=" + table.toString("row", showFirstNRows)); + String2.log(fullFileName + "=" + table.toString(showFirstNRows)); } @@ -2429,27 +2442,27 @@ public static void test46088(Table table) throws Exception { // http://www.ndbc.noaa.gov/data/realtime2/46088.txt //45 day //top line has precedence //#YY MM DD hh mm WDIR WSPD GST WVHT DPD APD MWD PRES ATMP WTMP DEWP VIS PTDY TIDE //#yr mo dy hr mn degT m/s m/s m sec sec degT hPa degC degC degC mi hPa ft - //2016 10 01 01 20 230 4.0 5.0 0.1 MM 4.7 MM 1013.8 10.6 10.9 9.5 MM MM MM - //2016 10 01 00 50 230 4.0 5.0 0.1 MM 4.5 MM 1013.9 10.7 10.9 9.5 MM -0.8 MM - seconds = Calendar2.isoStringToEpochSeconds("2016-10-01T01"); //50 min rounds to next hour; usually test 01T01 + //2017 04 01 01 20 270 1.0 1.0 0.1 MM 4.4 MM 1024.6 8.6 8.8 5.5 MM MM MM + //2017 04 01 00 50 300 2.0 2.0 0.2 MM 5.0 MM 1025.3 8.7 8.8 5.5 MM -2.3 MM + seconds = Calendar2.isoStringToEpochSeconds("2017-04-01T01"); //50 min rounds to next hour; usually test 01T01 row = table.getColumn(timeIndex).indexOf("" + seconds, 0); Test.ensureEqual(table.getStringData(idIndex, row), "46088", ""); Test.ensureEqual(table.getFloatData(latIndex, row), 48.333f, ""); Test.ensureEqual(table.getFloatData(lonIndex, row), -123.167f, ""); Test.ensureEqual(table.getDoubleData(depthIndex, row), 0, ""); - Test.ensureEqual(table.getDoubleData(wdIndex, row), 230, ""); - Test.ensureEqual(table.getFloatData(wspdIndex, row), 4f, ""); - Test.ensureEqual(table.getFloatData(gstIndex, row), 5f, ""); + Test.ensureEqual(table.getDoubleData(wdIndex, row), 270, ""); + Test.ensureEqual(table.getFloatData(wspdIndex, row), 1f, ""); + Test.ensureEqual(table.getFloatData(gstIndex, row), 1f, ""); Test.ensureEqual(table.getFloatData(wvhtIndex, row), .1f, ""); Test.ensureEqual(table.getFloatData(dpdIndex, row), Float.NaN, ""); - Test.ensureEqual(table.getFloatData(apdIndex, row), 4.7f, ""); + Test.ensureEqual(table.getFloatData(apdIndex, row), 4.4f, ""); Test.ensureEqual(table.getFloatData(mwdIndex, row), Float.NaN, ""); - Test.ensureEqual(table.getFloatData(aprsIndex, row), 1013.8f, ""); - Test.ensureEqual(table.getFloatData(atmpIndex, row), 10.6f, ""); - Test.ensureEqual(table.getFloatData(wtmpIndex, row), 10.9f, ""); - Test.ensureEqual(table.getFloatData(dewpIndex, row), 9.5f, ""); + Test.ensureEqual(table.getFloatData(aprsIndex, row), 1024.6f, ""); + Test.ensureEqual(table.getFloatData(atmpIndex, row), 8.6f, ""); + Test.ensureEqual(table.getFloatData(wtmpIndex, row), 8.8f, ""); + Test.ensureEqual(table.getFloatData(dewpIndex, row), 5.5f, ""); Test.ensureEqual(table.getFloatData(visIndex, row), Float.NaN, ""); //(float)Math2.roundTo(18.5 * Math2.kmPerMile, decimalDigits[visIndex]), ""); - Test.ensureEqual(table.getFloatData(ptdyIndex, row), -.8f, ""); + Test.ensureEqual(table.getFloatData(ptdyIndex, row), -2.3f, ""); Test.ensureEqual(table.getFloatData(tideIndex, row), Float.NaN, ""); //(float)Math2.roundTo(3.0 * Math2.meterPerFoot, decimalDigits[tideIndex]), ""); String2.log("test46088 was successful"); @@ -2484,28 +2497,28 @@ public static void test46088AddLastNDays(Table table) throws Exception { //top row has precedence, but not if file already had lower row of data //#YY MM DD hh mm WDIR WSPD GST WVHT DPD APD MWD PRES ATMP WTMP DEWP VIS PTDY TIDE //#yr mo dy hr mn degT m/s m/s m sec sec degT hPa degC degC degC mi hPa ft - //2016 10 25 12 20 130 12.0 14.0 1.1 5 3.9 115 1005.5 11.5 10.4 8.6 MM MM MM - //2016 10 25 11 50 140 13.0 16.0 1.2 5 4.0 104 1005.2 11.9 10.4 8.5 MM +0.5 MM - double seconds = Calendar2.isoStringToEpochSeconds("2016-10-25T12"); //rounded + //2017 04 29 02 20 240 9.0 11.0 0.3 2 2.7 87 1026.1 10.2 9.5 6.3 MM MM MM + //2017 04 29 01 50 230 9.0 10.0 0.3 11 3.2 358 1026.1 10.2 9.5 6.2 MM -0.9 MM + double seconds = Calendar2.isoStringToEpochSeconds("2017-04-29T02"); //rounded int row = table.getColumn(timeIndex).indexOf("" + seconds, 0); Test.ensureTrue(row >= 0, "row=" + row); Test.ensureEqual(table.getStringData(idIndex, row), "46088", ""); Test.ensureEqual(table.getFloatData(latIndex, row), 48.333f, ""); Test.ensureEqual(table.getFloatData(lonIndex, row), -123.167f, ""); Test.ensureEqual(table.getDoubleData(depthIndex, row), 0, ""); - Test.ensureEqual(table.getFloatData(wdIndex, row), 130, ""); - Test.ensureEqual(table.getFloatData(wspdIndex, row), 12f, ""); - Test.ensureEqual(table.getFloatData(gstIndex, row), 14f, ""); - Test.ensureEqual(table.getFloatData(wvhtIndex, row), 1.1f, ""); - Test.ensureEqual(table.getFloatData(dpdIndex, row), 5f, ""); - Test.ensureEqual(table.getFloatData(apdIndex, row), 3.9f, ""); - Test.ensureEqual(table.getFloatData(mwdIndex, row), 115, ""); - Test.ensureEqual(table.getFloatData(aprsIndex, row), 1005.5f, ""); - Test.ensureEqual(table.getFloatData(atmpIndex, row), 11.5f, ""); - Test.ensureEqual(table.getFloatData(wtmpIndex, row), 10.4f, ""); - Test.ensureEqual(table.getFloatData(dewpIndex, row), 8.6f, ""); + Test.ensureEqual(table.getFloatData(wdIndex, row), 240, ""); + Test.ensureEqual(table.getFloatData(wspdIndex, row), 9f, ""); + Test.ensureEqual(table.getFloatData(gstIndex, row), 11f, ""); + Test.ensureEqual(table.getFloatData(wvhtIndex, row), 0.3f, ""); + Test.ensureEqual(table.getFloatData(dpdIndex, row), 2f, ""); + Test.ensureEqual(table.getFloatData(apdIndex, row), 2.7f, ""); + Test.ensureEqual(table.getFloatData(mwdIndex, row), 87f, ""); + Test.ensureEqual(table.getFloatData(aprsIndex, row), 1026.1f, ""); + Test.ensureEqual(table.getFloatData(atmpIndex, row), 10.2f, ""); + Test.ensureEqual(table.getFloatData(wtmpIndex, row), 9.5f, ""); + Test.ensureEqual(table.getFloatData(dewpIndex, row), 6.3f, ""); Test.ensureEqual(table.getFloatData(visIndex, row), Float.NaN, ""); //(float)Math2.roundTo(18.5 * Math2.kmPerMile, decimalDigits[visIndex]), ""); - Test.ensureEqual(table.getFloatData(ptdyIndex, row), 0.5f, ""); + Test.ensureEqual(table.getFloatData(ptdyIndex, row), -.9f, ""); Test.ensureEqual(table.getFloatData(tideIndex, row), Float.NaN, "");//(float)Math2.roundTo(3.0 * Math2.meterPerFoot, decimalDigits[tideIndex]), ""); String2.log("test46088AddLastNDays was successful"); @@ -2762,9 +2775,9 @@ public static void main(String args[]) throws Exception { // (Once a year ~March 1, the new yearly files appear. // copy last year's monthly files // cd \data\ndbc - // md ndbcMetHistoricalTxt2015 (last year) - // copy ndbcMetHistoricalTxt\*2015.txt ndbcMetHistoricalTxt2015 - // del ndbcMetHistoricalTxt\*2015.txt + // md ndbcMetHistoricalTxt2016 (last year) + // copy ndbcMetHistoricalTxt\*2016.txt ndbcMetHistoricalTxt2016 + // del ndbcMetHistoricalTxt\*2016.txt // change HISTORICAL_FILES_CURRENT_YEAR at top of file to the current year, // then follow normal update procedure.) //2011-02-28 I re-downloaded ALL of the files (since previous years had been modified). @@ -2773,20 +2786,20 @@ public static void main(String args[]) throws Exception { //historical monthly files are from: http://www.ndbc.noaa.gov/data/stdmet// e.g., Jan //!!!!**** Windows GUI My Computer doesn't show all the files in the directory! // Use DOS window "dir" or Linux ls instead of the GUI. - //downloadNewHistoricalTxtFiles(ndbcHistoricalTxtDir); //time varies, last done 2016-10-24 + //downloadNewHistoricalTxtFiles(ndbcHistoricalTxtDir); //time varies, last done 2017-04-28 // 3) *** get latest 45 day files //DON'T download45DayTextFiles after 45 days after last historicalTxt date. - //download45DayTxtFiles(ndbc45DayTxtDir); //15-30 minutes, last done 2016-10-24 + //download45DayTxtFiles(ndbc45DayTxtDir); //15-30 minutes, last done 2017-04-28 // 4) *** Make the nc files //!!!!**** EACH MONTH, SOME TESTS NEED UPDATING: SEE "UPDATE_EACH_MONTH" //no station info for a station? search for "no station info" above // or lat lon available? search for "get the lat and lon" above - boolean testMode = false; //used to: always run 'true' then run 'false' + boolean testMode = false; //I used to always run 'true' then run 'false', but not usually now String ignoreStationsBefore = " "; //use " " to process all stations or lowercase characters to start in middle //makeSeparateNcFiles(ndbcStationHtmlDir, ndbcHistoricalTxtDir, ndbc45DayTxtDir, - // ndbcNcDir, ignoreStationsBefore, testMode); //M4700 ~1 hr, was ~3 hrs //last done 2016-09-26 + // ndbcNcDir, ignoreStationsBefore, testMode); //M4700 ~2 hrs, was ~3 hrs //last done 2017-04-28 test31201Nc(ndbcNcDir); test41009Nc(ndbcNcDir); test41015Nc(ndbcNcDir); @@ -2804,14 +2817,14 @@ public static void main(String args[]) throws Exception { //(5 days takes 12 minutes) //but 45 is more likely to get more information (if needed and if available) //(45 days takes 25 minutes) - testMode = true; //always run 'true' then run 'false' - //addLastNDaysInfo(ndbcNcDir, 5, testMode); //usually 5 + testMode = false; //always run 'true' then run 'false' + addLastNDaysInfo(ndbcNcDir, 5, testMode); //usually 5 //!!!!**** EACH MONTH, THIS TEST NEED UPDATING test46088AddLastNDaysNc(ndbcNcDir); /* 7) *** On laptop: rename ndbcMet ndbcMett - cd \data\ndbc, use git bash: tar zcvf ndbcMett.tgz ndbcMett + use git bash: cd \data\ndbc, tar zcvf ndbcMett.tgz ndbcMett * ftp ndbcMett.tgz to coastwatch's /u00/data/points cd /u00/data/points tar zxvf ndbcMett.tgz diff --git a/WEB-INF/classes/gov/noaa/pfel/coastwatch/pointdata/PointDataSetFromStationVariables.java b/WEB-INF/classes/gov/noaa/pfel/coastwatch/pointdata/PointDataSetFromStationVariables.java index 94a7738c5..ca3c1c952 100644 --- a/WEB-INF/classes/gov/noaa/pfel/coastwatch/pointdata/PointDataSetFromStationVariables.java +++ b/WEB-INF/classes/gov/noaa/pfel/coastwatch/pointdata/PointDataSetFromStationVariables.java @@ -31,14 +31,9 @@ import java.util.Vector; /** - * Get netcdf-X.X.XX.jar from - * http://www.unidata.ucar.edu/software/thredds/current/netcdf-java/index.html + * Get netcdfAll-......jar from ftp://ftp.unidata.ucar.edu/pub * and copy it to /WEB-INF/lib renamed as netcdf-latest.jar. - * Get slf4j-jdk14.jar from - * ftp://ftp.unidata.ucar.edu/pub/netcdf-java/slf4j-jdk14.jar - * and copy it to /WEB-INF/lib. - * 2013-02-21 new netcdfAll uses Java logging, not slf4j. - * Put both of these .jar files in the classpath for the compiler and for Java. + * Put it in the classpath for the compiler and for Java. */ import ucar.nc2.*; import ucar.nc2.dataset.NetcdfDataset; @@ -152,7 +147,7 @@ public static void makeGroupsOfPointDataSets( * but variableFactor may be not 1 or the file may have non-udUnits units. * Also, useful if the units exactly match the udUnits for other similar * pointDatasets (e.g, NDBC). - * See http://www.unidata.ucar.edu/software/udunits/udunits.txt . + * See https://www.unidata.ucar.edu/software/udunits/udunits.txt . * * @param courtesy the group to credit for this data (usually 25 char or less) * @param minStationX the minimum acceptable station lon (may be 0 - 360 or -180 - 180). @@ -1189,7 +1184,7 @@ public static void makeCachesAndDataSets(String pointFilesDir, public final static String[][] mbariNrtVariableInfo = { { //metsys //name, palette info and suggested range should match ndbc when possible - //units must be from http://www.unidata.ucar.edu/software/udunits/udunits.txt + //units must be from https://www.unidata.ucar.edu/software/udunits/udunits.txt //unofficial system: 4th letter n=near real time s=science quality //inFileVarName varName title palette paletteScale factor min max udUnits "AirPressure` PMBaprn` Air Pressure, Near Real Time` Rainbow` Linear` 1` 960` 1040` hPa", //metsys files have metadata @@ -1210,7 +1205,7 @@ public static void makeCachesAndDataSets(String pointFilesDir, (each group uses different dimensions for the variables).*/ public final static String[][] mbariSqVariableInfo = { { //name, palette info and suggested range should match ndbc when possible - //units must be from http://www.unidata.ucar.edu/software/udunits/udunits.txt + //units must be from https://www.unidata.ucar.edu/software/udunits/udunits.txt //unofficial system: 4th letter n=near real time s=science quality //inFileVarName varName title palette paletteScale factor min max udUnits "AIR_PRESS_HR` PMBaprs` Air Pressure, Science Quality` Rainbow` Linear` 1` 960` 1040` hPa", @@ -1219,7 +1214,7 @@ public static void makeCachesAndDataSets(String pointFilesDir, "WIND_U_COMPONENT_HR` PMBwsus` Wind Speed, Science Quality, Zonal` BlueWhiteRed` Linear` 1` -20` 20` m s^-1", "WIND_V_COMPONENT_HR` PMBwsvs` Wind Speed, Science Quality, Meridional` BlueWhiteRed` Linear` 1` -20` 20` m s^-1"}, { //name, palette info and suggested range should match ndbc when possible - //units must be from http://www.unidata.ucar.edu/software/udunits/udunits.txt + //units must be from https://www.unidata.ucar.edu/software/udunits/udunits.txt "U_COMPONENT_UNCORR_HR` PMBcrus` Current, Science Quality, Zonal` BlueWhiteRed` Linear` .01` -.5` .5` m s-1", //.01 = convert cm/s to m/s "V_COMPONENT_UNCORR_HR` PMBcrvs` Current, Science Quality, Meridional` BlueWhiteRed` Linear` .01` -.5` .5` m s-1"}, //.01 = convert cm/s to m/s //"ECHO_INTENSITY_BEAM1_HR` PMBei1s` Echo Intensity, Science Quality, Beam 1` Rainbow` Linear` 1` 0` 100` counts", //range? @@ -1228,7 +1223,7 @@ public static void makeCachesAndDataSets(String pointFilesDir, //"ECHO_INTENSITY_BEAM4_HR` PMBei4s` Echo Intensity, Science Quality, Beam 4` Rainbow` Linear` 1` 0` 100` counts"}, //range? { //the lat/lon/depth MET variables //name, palette info and suggested range should match ndbc when possible - //units must be from http://www.unidata.ucar.edu/software/udunits/udunits.txt + //units must be from https://www.unidata.ucar.edu/software/udunits/udunits.txt "CONDUCTIVITY_HR` PMBcnds` Conductivity, Science Quality` Rainbow` Linear` 1` 0` 100` Siemens m-1", //range? "PRESSURE_HR` PMBsprs` Sea Pressure, Science Quality` Rainbow` Linear` 1` 0` 100` db", //range? "SALINITY_HR` PMBsals` Salinity, Science Quality` Rainbow` Linear` 1` 0` 100` PSU", //range? @@ -1735,7 +1730,7 @@ public static void testNc4DMakeSubset() throws Exception { String2.log("\n*** whole world: all stations, 1 time, lonPM180"); table = pointDataSet.makeSubset( -180, 180, -90, 90, 0, 0, "2004-01-07", "2004-01-07"); - //String2.log(tTable.toString(Integer.MAX_VALUE)); + //String2.log(tTable.toString()); int tn = table.nRows(); //this changes, but it is good to keep the test in case the number changes badly (e.g., smaller) Test.ensureEqual(table.nRows(), 658, "all sta, 1 time, nRows"); @@ -1782,7 +1777,7 @@ public static void testNc4DMakeSubset() throws Exception { -180, 180, -90, 90, 0, 0, "2004-01-01", "2004-02-01"); //String2.log(table.toString(1000)); table.convertToFakeMissingValues(); //so I see what file will look like - String2.log(table.toString("row", 1)); + String2.log(table.toString(1)); tn = table.nRows(); for (int i = 0; i < tn; i++) { double tLon = table.getDoubleData(0, i); diff --git a/WEB-INF/classes/gov/noaa/pfel/coastwatch/pointdata/StationVariableNc4D.java b/WEB-INF/classes/gov/noaa/pfel/coastwatch/pointdata/StationVariableNc4D.java index 4434b2cba..baa5785ef 100644 --- a/WEB-INF/classes/gov/noaa/pfel/coastwatch/pointdata/StationVariableNc4D.java +++ b/WEB-INF/classes/gov/noaa/pfel/coastwatch/pointdata/StationVariableNc4D.java @@ -27,14 +27,9 @@ //import java.util.Vector; /** - * Get netcdf-X.X.XX.jar from - * http://www.unidata.ucar.edu/software/thredds/current/netcdf-java/index.html + * Get netcdfAll-......jar from ftp://ftp.unidata.ucar.edu/pub * and copy it to /WEB-INF/lib renamed as netcdf-latest.jar. - * Get slf4j-jdk14.jar from - * ftp://ftp.unidata.ucar.edu/pub/netcdf-java/slf4j-jdk14.jar - * and copy it to /WEB-INF/lib. - * 2013-02-21 new netcdfAll uses Java logging, not slf4j. - * Put both of these .jar files in the classpath for the compiler and for Java. + * Put it in the classpath for the compiler and for Java. */ import ucar.nc2.*; import ucar.nc2.dataset.NetcdfDataset; diff --git a/WEB-INF/classes/gov/noaa/pfel/coastwatch/pointdata/Table.java b/WEB-INF/classes/gov/noaa/pfel/coastwatch/pointdata/Table.java index 35b5fdda9..7f3f90f2e 100644 --- a/WEB-INF/classes/gov/noaa/pfel/coastwatch/pointdata/Table.java +++ b/WEB-INF/classes/gov/noaa/pfel/coastwatch/pointdata/Table.java @@ -29,6 +29,7 @@ import java.io.FileOutputStream; import java.io.FileReader; import java.io.FileWriter; +import java.io.InputStreamReader; import java.io.IOException; import java.io.OutputStream; import java.io.OutputStreamWriter; @@ -86,13 +87,9 @@ //import org.xml.sax.helpers.DefaultHandler; /** - * Get netcdf-X.X.XX.jar from - * http://www.unidata.ucar.edu/software/thredds/current/netcdf-java/index.html + * Get netcdfAll-......jar from ftp://ftp.unidata.ucar.edu/pub * and copy it to /WEB-INF/lib renamed as netcdf-latest.jar. - * Get slf4j-jdk14.jar from - * ftp://ftp.unidata.ucar.edu/pub/netcdf-java/slf4j-jdk14.jar - * and copy it to /WEB-INF/lib. - * Put both of these .jar files in the classpath for the compiler and for Java. + * Put it in the classpath for the compiler and for Java. */ import ucar.nc2.*; import ucar.nc2.constants.FeatureType; @@ -172,6 +169,10 @@ public class Table { public static String BGCOLOR = "#ffffcc"; + //this will only change if changes are made that aren't backwards and forwards compatible + public final static int ENHANCED_VERSION = 3; + + //related to ERDDAP /** * This is a list of all operator symbols @@ -191,6 +192,17 @@ public class Table { "=", "<", ">"}; public final static String SEQUENCE_NAME = "s"; public static String QUERY_ERROR = "Query error: "; + //EDStatic replaces this with queryErrorOrderByClosest from messages.xml + public static String ORDER_BY_CLOSEST_ERROR = + "For orderByClosest, you must specify a CSV list of orderBy column names " + + "(each of which must be in the list of results variables) plus the " + + "interval for the last orderBy variable (e.g., \"stationID,time,10 minutes\")."; + //EDStatic replaces this with queryErrorOrderByLimit from messages.xml + public static String ORDER_BY_LIMIT_ERROR = + "For orderByLimit, you must specify a CSV list of orderBy column names " + + "(each of which must be in the list of results variables, but 0 names is okay) plus the " + + "maximum number of rows for each group (e.g., \"stationID,100\")."; + public static String NOT_FOUND_EOF = " not found before end-of-file."; /** * Igor Text File File reference: in Bob's /programs/igor/ or @@ -198,7 +210,7 @@ public class Table { *
Command reference: in Bob's /programs/igor/ or * http://www.wavemetrics.net/doc/igorman/V-01%20Reference.pdf */ - public final static String IgorCharset = "ISO-8859-1"; //they are vague, but it is 1-byte, not UTF variant + public final static String IgorCharset = String2.ISO_8859_1; //they are vague, but it is 1-byte, not UTF variant public final static String IgorNanString = "NaN"; //Igor Text File Format: use "NaN" public final static String IgorEndOfLine = "\r"; //Igor Text File Format: "a carriage return at the end of the line" //Igor Text File Format: "use the standard Igor date format (number of seconds since 1/1/1904)" @@ -379,7 +391,7 @@ public Table subset(int startRow, int stride, int endRow) { int n = columns.size(); for (int i = 0; i < n; i++) - tTable.columns.add(getColumn(i).subset(startRow, stride, endRow)); + tTable.columns.add(columns.get(i).subset(startRow, stride, endRow)); tTable.columnNames = (StringArray)columnNames.clone(); @@ -834,7 +846,7 @@ public void insertBlankRow(int index) throws Exception { throw new Exception("index=" + index + " must be between 0 and " + nRows() + "."); int nCols = nColumns(); for (int col = 0; col < nCols; col++) - getColumn(col).atInsertString(index, ""); + columns.get(col).atInsertString(index, ""); } /** @@ -861,7 +873,7 @@ public void removeRow(int row) { public void removeRows(int from, int to) { int nCols = nColumns(); for (int col = 0; col < nCols; col++) { - PrimitiveArray pa = getColumn(col); + PrimitiveArray pa = columns.get(col); int nRows = pa.size(); if (from < nRows) pa.removeRange(from, Math.min(nRows, to)); @@ -875,7 +887,7 @@ public void removeRows(int from, int to) { public void removeAllRows() { int nCols = nColumns(); for (int col = 0; col < nCols; col++) - getColumn(col).clear(); + columns.get(col).clear(); } /** @@ -895,7 +907,7 @@ public BitSet rowsWithData() { int keepN = 0; for (int col = 0; col < tnCols; col++) { //this is very similar to lastRowWithData - PrimitiveArray pa = getColumn(col); + PrimitiveArray pa = columns.get(col); Attributes atts = columnAttributes(col); if (pa.elementClass() == String.class) { String mv = atts.getString("missing_value"); //may be null @@ -983,7 +995,7 @@ public int lastRowWithData() { int lastRowWithData = -1; for (int col = 0; col < tnCols; col++) { //this is very similar to rowsWithData - PrimitiveArray pa = getColumn(col); + PrimitiveArray pa = columns.get(col); Attributes atts = columnAttributes(col); if (pa.elementClass() == String.class) { String mv = atts.getString("missing_value"); //may be null @@ -1355,9 +1367,10 @@ public String getNCHeader(String dimensionName) { "\t" + dimensionName + " = " + nRows() + " ;\n"); int nColumns = nColumns(); for (int col = 0; col < nColumns; col++) { - PrimitiveArray pa = getColumn(col); + PrimitiveArray pa = columns.get(col); if (pa instanceof StringArray) { StringArray sa = (StringArray)pa; + //String2.log(">>getNcHeader sa=" + sa.toNccsvAttString()); sb.append("\t" + getColumnName(col) + NcHelper.StringLengthSuffix + " = " + sa.maxStringLength() + " ;\n"); } @@ -1366,7 +1379,7 @@ public String getNCHeader(String dimensionName) { //variables sb.append("variables:\n"); for (int col = 0; col < nColumns; col++) { - PrimitiveArray pa = getColumn(col); + PrimitiveArray pa = columns.get(col); String columnName = getColumnName(col); if (pa instanceof StringArray) { StringArray sa = (StringArray)pa; @@ -1385,105 +1398,43 @@ public String getNCHeader(String dimensionName) { return sb.toString(); } - /** - * This makes a string representation of this data. - * - * @param dimensionName the name for the rows (e.g., "time", "row", "station", "observation") - * @param showFirstNRows use Integer.MAX_VALUE for all rows. - * @return a string representation of this point data - */ - public String toString(String dimensionName, int showFirstNRows) { - ensureValid(); //throws Exception if not - String result = getNCHeader(dimensionName); - - /* - int nRows = nRows(); - int nColumns = nColumns(); - StringBuilder sb = new StringBuilder( - "Table[nRows=" + nRows + " nColumns=" + nColumns + "\n"); - - //print global attributes - sb.append("\tglobal attributes:\n"); - sb.append(globalAttributes.toNcString("\t\t", " ;") - - //print data attributes - for (int col = 0; col < nColumns; col++) { - sb.append(" Column " + col + " = " + getColumnName(col) + " (" + - getColumn(col).elementClassString() + ")\n"); - sb.append(columnAttributes(col).toNcString("\t\t" + getColumnName(col) + ":", " ;")); - } - } -*/ - - return result + dataToString(showFirstNRows); - } - - /** - * This prints the data to a crude table. - * - * @param showFirstNRows use Integer.MAX_VALUE for all rows. - */ - public String dataToString(int showFirstNRows) { - if (showFirstNRows <= 0) - return ""; - StringBuilder sb = new StringBuilder(); - showFirstNRows = Math.min(showFirstNRows, nRows()); - sb.append(" Row " + getColumnarColumnNamesString()); - for (int row = 0; row < showFirstNRows; row++) - sb.append("\n" + String2.right("" + row, 7) + " " + - getRowToColumnarString(row)); - sb.append('\n'); - return sb.toString(); - } - /** * This prints the metadata and the data to a CSV table. * This shows row numbers. */ - public String toCSVString() { - return toCSVString(Integer.MAX_VALUE); + public String toString() { + return toString(Integer.MAX_VALUE); } - /** - * This prints the metadata and the data to a CSV table. - * This shows row numbers. - * - * @param showFirstNRows use Integer.MAX_VALUE for all rows. - * If not all rows are shown, this adds a "..." line to the output. - */ - public String toCSVString(int showFirstNRows) { - if (showFirstNRows < 0) - return ""; - return getNCHeader("row") + dataToCSVString(showFirstNRows); - } /** - * This is convenience for dataToCSVString(showAllRows, don't showRowNumbers). - * This doesn't show row numbers. + * This returns a string CSV representation of this data. + * + * @param showFirstNRows use Integer.MAX_VALUE for all rows. + * @return a string representation of this point data */ - public String dataToCSVString() { - return dataToCSVString(Integer.MAX_VALUE, false); + public String toString(int showFirstNRows) { + ensureValid(); //throws Exception if not + return getNCHeader("row") + dataToString(showFirstNRows); } /** - * This is convenience for dataToCSVString(int showFirstNRows, showRowNumber=true). - * This shows row numbers. + * This is convenience for dataToString(Integer.MAX_VALUE). * - * @param showFirstNRows use Integer.MAX_VALUE for all rows. - * If not all rows are shown, this adds a "..." line to the output. */ - public String dataToCSVString(int showFirstNRows) { - return dataToCSVString(showFirstNRows, true); + public String dataToString() { + return dataToString(Integer.MAX_VALUE); } /** - * This prints the data to a CSV table. + * This is convenience for dataToString(int showFirstNRows, showRowNumber=true). + * This shows row numbers. * * @param showFirstNRows use Integer.MAX_VALUE for all rows. * If not all rows are shown, this adds a "..." line to the output. */ - public String dataToCSVString(int showFirstNRows, boolean showRowNumber) { - return dataToCSVString(0, showFirstNRows, showRowNumber); + public String dataToString(int showFirstNRows) { + return dataToString(0, showFirstNRows); } /** @@ -1493,23 +1444,16 @@ public String dataToCSVString(int showFirstNRows, boolean showRowNumber) { * @param stop one past the last row to be included. * If not all rows are shown, this adds a "..." line to the output. */ - public String dataToCSVString(int start, int stop, boolean showRowNumber) { + public String dataToString(int start, int stop) { ensureValid(); start = Math.max(start, 0); stop = Math.min(stop, nRows()); StringBuilder sb = new StringBuilder(); int nCols = nColumns(); - sb.append((showRowNumber? "row," : "") + - getColumnNamesCSVString() + "\n"); + sb.append(getColumnNamesCSVString() + "\n"); for (int row = start; row < stop; row++) { - if (showRowNumber) - sb.append(row + ","); for (int col = 0; col < nCols; col++) { - String s = getStringData(col, row); - if (s.indexOf(',') >= 0 || s.indexOf('"') >= 0 || - s.indexOf('\n') >= 0 || s.indexOf('\t') >= 0) - s = String2.toJson(s); - sb.append(s); + sb.append(columns.get(col).getNccsvDataString(row)); if (col == nCols - 1) sb.append('\n'); else @@ -1521,37 +1465,6 @@ public String dataToCSVString(int start, int stop, boolean showRowNumber) { return sb.toString(); } - - /** - * This makes a string representation (toString(true)) of this point data. - * - * @return a string representation of this table - */ - public String toString() { - //observationDimension is from deprecated "Unidata Observation Dataset v1.0" conventions, - //but if it exists, use it. - String observationDimension = globalAttributes.getString("observationDimension"); - if (observationDimension == null) - observationDimension = "row"; - return toString(observationDimension, Integer.MAX_VALUE); - } - - /** - * For diagnostic purposes: this returns a string with the names of the - * columns. - * Note that columns names will be truncated at 14 characters. - * - * @return a string with the column names. - */ - public String getColumnarColumnNamesString() { - StringBuilder sb = new StringBuilder(); - for (int col = 0; col < nColumns(); col++) { - sb.append(' ' + String2.right(String2.noLongerThan( - columnNames.get(col), 14), 14)); - } - return sb.toString(); - } - /** * This returns a Comma Separated Value (CSV) string with the names of the columns. * @@ -1570,32 +1483,6 @@ public String getColumnNamesCSSVString() { return columnNames.toString(); } - /** - * For diagnostic purposes: this returns a string with the values for one - * row in a columnar format String. - * Note that strings values will be truncated at 14 characters. - * - * @param row the index of the desired row (0..length-1) - * @return a string with the data values. - */ - public String getRowToColumnarString(int row) { - StringBuilder sb = new StringBuilder(); - for (int col = 0; col < nColumns(); col++) { - PrimitiveArray array = getColumn(col); - sb.append(' '); - if (array instanceof StringArray) { - sb.append(String2.right(String2.noLongerThan(array.getString(row), 14), 14)); - } else { - double d = array.getDouble(row); - long tl = Math.round(d); - if (d == tl && Math.abs(d) < 1e14) - sb.append(String2.right("" + tl, 14)); - else sb.append(String2.right(String2.genEFormat6(d), 14)); - } - } - return sb.toString(); - } - /** * This returns the Attributes with the global attributes. * @@ -1649,7 +1536,7 @@ public Attributes columnAttributes(String columnName) { * *

NOW OBSOLETE [This sets most of the metadata needed to comply with * Unidata Observation Dataset Conventions - * (http://www.unidata.ucar.edu/software/netcdf-java/formats/UnidataObsConvention.html) + * (https://www.unidata.ucar.edu/software/netcdf-java/formats/UnidataObsConvention.html) [GONE!] * To fully comply, you may need to add the global attribute * observationDimension (see saveAsFlatNc). ] * @@ -1734,7 +1621,7 @@ public void setAttributes(int lonIndex, int latIndex, int depthIndex, int timeIn * This also sets column attributes for the lon, lat, depth, and time variables * (if the index isn't -1). *

OBSOLETE [For Unidata Observation Dataset Conventions (e.g., _Coordinate), - * see http://www.unidata.ucar.edu/software/netcdf-java/formats/UnidataObsConvention.html .] + * see https://www.unidata.ucar.edu/software/netcdf-java/formats/UnidataObsConvention.html .] [GONE!] * * @param lonIndex identifies the longitude column (or -1 if none) * @param latIndex identifies the latitude column (or -1 if none) @@ -1886,7 +1773,7 @@ public void setActualRangeAndBoundingBox( * global attributes, and the Google Earth-style * Southernmost_Northing, ... Easternmost_Easting. *

OBSOLETE [For Unidata Observation Dataset Conventions (e.g., _Coordinate), - * see http://www.unidata.ucar.edu/software/netcdf-java/formats/UnidataObsConvention.html .] + * see https://www.unidata.ucar.edu/software/netcdf-java/formats/UnidataObsConvention.html .] [GONE!] * */ public void unsetActualRangeAndBoundingBox() { @@ -1971,10 +1858,10 @@ public void ensureValid() { //check that all columns have the same size int nRows = nRows(); //from column[0] for (int col = 1; col < nColumns(); col++) - if (getColumn(col).size() != nRows) + if (columns.get(col).size() != nRows) throw new SimpleException( "Invalid Table: " + - "column[" + col + "=" + getColumnName(col) + "].size=" + getColumn(col).size() + + "column[" + col + "=" + getColumnName(col) + "].size=" + columns.get(col).size() + " != column[0=" + getColumnName(0) + "].size=" + nRows); } @@ -1989,12 +1876,12 @@ public void makeColumnsSameSize() { int maxNRows = 0; int tNCol = nColumns(); for (int col = 0; col < tNCol; col++) - maxNRows = Math.max(getColumn(col).size(), maxNRows); + maxNRows = Math.max(columns.get(col).size(), maxNRows); //ensure all columns have correct maxNRows if (maxNRows > 0) { for (int col = 0; col < tNCol; col++) { - PrimitiveArray pa = getColumn(col); + PrimitiveArray pa = columns.get(col); pa.addNDoubles(maxNRows - pa.size(), Double.NaN); } } @@ -2011,12 +1898,12 @@ public void ensureColumnsAreSameSize_LastValue() { int maxNRows = 0; int tNCol = nColumns(); for (int col = 0; col < tNCol; col++) - maxNRows = Math.max(getColumn(col).size(), maxNRows); + maxNRows = Math.max(columns.get(col).size(), maxNRows); //ensure all columns have correct maxNRows if (maxNRows > 0) { for (int col = 0; col < tNCol; col++) { - PrimitiveArray pa = getColumn(col); + PrimitiveArray pa = columns.get(col); String s = pa.size() == 0? "" : pa.getString(pa.size() - 1); pa.addNStrings(maxNRows - pa.size(), s); } @@ -2065,39 +1952,29 @@ public boolean equals(Object o, boolean ensureColumnTypesEqual) { for (int col = 0; col < nColumns; col++) { Test.ensureEqual(getColumnName(col), table2.getColumnName(col), errorInMethod + "column=" + col + " names."); - PrimitiveArray array1 = getColumn(col); + PrimitiveArray array1 = columns.get(col); PrimitiveArray array2 = table2.getColumn(col); if (ensureColumnTypesEqual) Test.ensureEqual(array1.elementClassString(), array2.elementClassString(), errorInMethod + "column=" + col + " types."); boolean a1String = array1 instanceof StringArray; boolean a2String = array2 instanceof StringArray; - boolean stringTest = a1String || a2String; for (int row = 0; row < nRows; row++) { - if (stringTest) { - //avoid generating error strings unless needed - String s1 = array1.getString(row); - String s2 = array2.getString(row); - if (!s1.equals(s2)) { - //deal with NaN in long column not simplified to LongArray - //so left as NaN in String column - if (a1String && "NaN".equals(s1)) - s1 = ""; - if (a2String && "NaN".equals(s2)) - s2 = ""; - if (!s1.equals(s2)) - Test.ensureEqual(s1, s2, - errorInMethod + - "data(col=" + col + " (" + array1.elementClassString() + - " vs. " + array2.elementClassString() + "), row=" + row + ")."); - } - } else { - if (array1.getDouble(row) != array2.getDouble(row)) - //avoid generating error strings unless needed - Test.ensureEqual(array1.getDouble(row), array2.getDouble(row), + String s1 = array1.getString(row); + String s2 = array2.getString(row); + if (!s1.equals(s2)) { + //deal with NaN in long column not simplified to LongArray + // so left as NaN in String column + //or char array missing value ? + if (a1String && ("NaN".equals(s1))) + s1 = ""; + if (a2String && ("NaN".equals(s2))) + s2 = ""; + if (!s1.equals(s2)) + Test.ensureEqual(s1, s2, errorInMethod + - "data(col=" + col + " (" + array1.elementClassString() + - " vs. " + array2.elementClassString() + "), row=" + row + ")."); + "data(col=" + col + " (" + array1.elementClassString() + + " vs. " + array2.elementClassString() + "), row=" + row + ")."); } } } @@ -2136,11 +2013,12 @@ public void readMap(Map map, String keysName, String valuesName) { } /** This also reads from a file, but uses the ISO-8859-1 charset. */ - public void readASCII(String fullFileName, int columnNamesLine, int dataStartLine, + public void readASCII(String fullFileName, int columnNamesLine, + int dataStartLine, String tColSeparator, String testColumns[], double testMin[], double testMax[], String loadColumns[], boolean simplify) throws Exception { - readASCII(fullFileName, "ISO-8859-1", columnNamesLine, dataStartLine, + readASCII(fullFileName, String2.ISO_8859_1, columnNamesLine, dataStartLine, tColSeparator, testColumns, testMin, testMax, loadColumns, simplify); } @@ -2167,11 +2045,12 @@ public void readASCII(String fullFileName, int columnNamesLine, int dataStartLin * @throws Exception if trouble */ public void readASCII(String fullFileName, String charset, int columnNamesLine, - int dataStartLine, String testColumns[], double testMin[], double testMax[], + int dataStartLine, String tColSeparator, + String testColumns[], double testMin[], double testMax[], String loadColumns[], boolean simplify) throws Exception { readASCII(fullFileName, String2.readLinesFromFile(fullFileName, charset, 2), - columnNamesLine, dataStartLine, + columnNamesLine, dataStartLine, tColSeparator, testColumns, testMin, testMax, loadColumns, simplify); } @@ -2180,11 +2059,12 @@ public void readASCII(String fullFileName, String charset, int columnNamesLine, * * @throws Exception if trouble */ - public void readASCII(String fullFileName, int columnNamesLine, int dataStartLine, + public void readASCII(String fullFileName, int columnNamesLine, + int dataStartLine, String tColSeparator, String testColumns[], double testMin[], double testMax[], String loadColumns[]) throws Exception { - readASCII(fullFileName, columnNamesLine, dataStartLine, + readASCII(fullFileName, columnNamesLine, dataStartLine, null, testColumns, testMin, testMax, loadColumns, true); } @@ -2197,7 +2077,7 @@ public void readASCII(String fullFileName, int columnNamesLine, int dataStartLin throws Exception { readASCII(fullFileName, columnNamesLine, dataStartLine, - null, null, null, null, true); + null, null, null, null, null, true); } @@ -2208,7 +2088,7 @@ public void readASCII(String fullFileName, int columnNamesLine, int dataStartLin */ public void readASCII(String fullFileName) throws Exception { - readASCII(fullFileName, 0, 1, null, null, null, null, true); + readASCII(fullFileName, 0, 1, null, null, null, null, null, true); } /** @@ -2229,6 +2109,9 @@ public void readASCII(String fullFileName) throws Exception { * If there are no columnNames, names in the form "Column#" * (where col is 0 .. nColumns) will be created. * @param dataStartLine (0..) + * @param tColSeparator the character that separates the columns. + * Use "" or null to have this method guess. Otherwise, + * the first character of this string will be used. * @param testColumns the names of the columns to be tested (null = no tests). * All of the test columns must use the same, one, dimension that the * loadColumns use. @@ -2248,7 +2131,8 @@ public void readASCII(String fullFileName) throws Exception { * @throws Exception if trouble * (e.g., a specified testColumn or loadColumn not found) */ - public void readASCII(String fileName, String lines[], int columnNamesLine, int dataStartLine, + public void readASCII(String fileName, String lines[], int columnNamesLine, + int dataStartLine, String tColSeparator, String testColumns[], double testMin[], double testMax[], String loadColumns[], boolean simplify) { @@ -2279,27 +2163,31 @@ public void readASCII(String fileName, String lines[], int columnNamesLine, int //determine column separator //look for separator that appears the most and on in 3 test lines - int nTab = 1; - int nComma = 1; - int nSemi = 1; - int nSpace = 1; String oneLine; - for (int row = 0; row < Math.min(3, nRows); row++) { - oneLine = lines[dataStartLine + row]; - nTab *= String2.countAll(oneLine, "\t"); - nComma *= String2.countAll(oneLine, ","); - nSemi *= String2.countAll(oneLine, ";"); - nSpace *= String2.countAll(oneLine, " "); - } - char colSeparator = - nTab >= 1 && nTab >= Math.max(nComma, nSemi)? '\t': - nComma >= 1 && nComma >= Math.max(nTab, nSemi)? ',' : - nSemi >= 1? ';' : - nSpace >= 1? ' ' : - '\u0000'; //only one datum per line; colSeparator irrelevant - if (debugMode) - String2.log(">> separator=#" + (int)colSeparator + " nTab=" + nTab + + char colSeparator = ','; + if (tColSeparator == null || tColSeparator.length() == 0) { + int nTab = 1; + int nComma = 1; + int nSemi = 1; + int nSpace = 1; + for (int row = 0; row < Math.min(3, nRows); row++) { + oneLine = lines[dataStartLine + row]; + nTab *= String2.countAll(oneLine, "\t"); + nComma *= String2.countAll(oneLine, ","); + nSemi *= String2.countAll(oneLine, ";"); + nSpace *= String2.countAll(oneLine, " "); + } + colSeparator = + nTab >= 1 && nTab >= Math.max(nComma, nSemi)? '\t': + nComma >= 1 && nComma >= Math.max(nTab, nSemi)? ',' : + nSemi >= 1? ';' : + nSpace >= 1? ' ' : + '\u0000'; //only one datum per line; colSeparator irrelevant + if (debugMode) String2.log(">> separator=#" + (int)colSeparator + " nTab=" + nTab + " nComma=" + nComma + " nSemi=" + nSemi + " nSpace=" + nSpace); + } else { + colSeparator = tColSeparator.charAt(0); + } //read the file's column names StringArray fileColumnNames = new StringArray(); @@ -2405,11 +2293,16 @@ else if (colSeparator == ',') if (which < 0 || which >= nItems) //value treated as NaN. NaN will fail any test. continue; double d = String2.parseDouble(items[which]); - if (d >= testMin[test] && d <= testMax[test]) //NaN will fail this test + if (d >= testMin[test] && d <= testMax[test]) { //NaN will fail this test continue; - else {ok = false; break; } + } else {ok = false; + if (debugMode) String2.log(">> deleting row=" + row + + " because it failed test #" + test); + break; + } } - if (!ok) continue; + if (!ok) + continue; //store the data items for (int col = 0; col < loadColumnNumbers.length; col++) { @@ -2418,9 +2311,10 @@ else if (colSeparator == ',') //request col is not in the file loadColumnSA[col].add(canonicalEmptyString); } else if (itemNumber < nItems) { + String s = String2.fromNccsvString(items[itemNumber]); if (simplify) - loadColumnSA[col].addNotCanonical(items[itemNumber]); - else loadColumnSA[col].add(items[itemNumber]); //canonical + loadColumnSA[col].addNotCanonical(s); + else loadColumnSA[col].add(s); //canonical } else if (allowRaggedRightInReadASCII) { //it is a bad idea to allow this (who knows which value is missing?), //but some buoy files clearly lack the last value, @@ -2488,8 +2382,8 @@ public static void testReadAsciiCsvFile() throws Exception { //read as Strings table = new Table(); table.allowRaggedRightInReadASCII = true; - table.readASCII(fileName, 0, 1, null, null, null, null, false); - results = table.dataToCSVString(); + table.readASCII(fileName, 0, 1, "", null, null, null, null, false); + results = table.dataToString(); expected = "aString,aChar,aBoolean,aByte,aShort,anInt,aLong,aFloat,aDouble\n" + "\"b,d\",Ab,t,24,24000,24000000,240000000000,2.4,2.412345678987654\n" + @@ -2514,8 +2408,8 @@ public static void testReadAsciiCsvFile() throws Exception { //test simplify table = new Table(); table.allowRaggedRightInReadASCII = true; - table.readASCII(fileName, 0, 1, null, null, null, null, true); - results = table.dataToCSVString(); + table.readASCII(fileName, 0, 1, "", null, null, null, null, true); + results = table.dataToString(); expected = "aString,aChar,aBoolean,aByte,aShort,anInt,aLong,aFloat,aDouble\n" + "\"b,d\",Ab,t,24,24000,24000000,240000000000,2.4,2.412345678987654\n" + @@ -2540,10 +2434,10 @@ public static void testReadAsciiCsvFile() throws Exception { //read subset table = new Table(); table.allowRaggedRightInReadASCII = true; - table.readASCII(fileName, 0, 1, + table.readASCII(fileName, 0, 1, "", new String[]{"aByte"}, new double[]{14}, new double[]{16}, new String[]{"aDouble","aString","aByte"}, true); //load cols - results = table.dataToCSVString(); + results = table.dataToString(); expected = "aDouble,aString,aByte\n" + ",needs,\n" + @@ -2573,11 +2467,11 @@ public static void testReadAsciiSsvFile() throws Exception { //read as Strings table = new Table(); table.allowRaggedRightInReadASCII = true; - table.readASCII(fileName, 0, 1, null, null, null, null, false); - results = table.dataToCSVString(); + table.readASCII(fileName, 0, 1, "", null, null, null, null, false); + results = table.dataToString(); expected = "aString,aChar,aBoolean,aByte,aShort,anInt,aLong,aFloat,aDouble\n" + -" b d ,Ab,t,24,24000,24000000,240000000000,2.4,2.412345678987654\n" + +"\" b d \",Ab,t,24,24000,24000000,240000000000,2.4,2.412345678987654\n" + "needs1space,E,,,,,,,\n" + "fg,F,true,11,12001,1200000,12000000000,1.21,1e200\n" + "h,H,1,12,12002,120000,1200000000,1.22,2e200\n" + @@ -2599,11 +2493,11 @@ public static void testReadAsciiSsvFile() throws Exception { //test simplify table = new Table(); table.allowRaggedRightInReadASCII = true; - table.readASCII(fileName, 0, 1, null, null, null, null, true); - results = table.dataToCSVString(); + table.readASCII(fileName, 0, 1, "", null, null, null, null, true); + results = table.dataToString(); expected = "aString,aChar,aBoolean,aByte,aShort,anInt,aLong,aFloat,aDouble\n" + -" b d ,Ab,t,24,24000,24000000,240000000000,2.4,2.412345678987654\n" + +"\" b d \",Ab,t,24,24000,24000000,240000000000,2.4,2.412345678987654\n" + "needs1space,E,,,,,,,\n" + "fg,F,true,11,12001,1200000,12000000000,1.21,1.0E200\n" + "h,H,1,12,12002,120000,1200000000,1.22,2.0E200\n" + @@ -2625,10 +2519,10 @@ public static void testReadAsciiSsvFile() throws Exception { //read subset table = new Table(); table.allowRaggedRightInReadASCII = true; - table.readASCII(fileName, 0, 1, + table.readASCII(fileName, 0, 1, "", new String[]{"aByte"}, new double[]{14}, new double[]{16}, new String[]{"aDouble","aString","aByte"}, true); //load cols - results = table.dataToCSVString(); + results = table.dataToString(); expected = "aDouble,aString,aByte\n" + ",needs1space,\n" + @@ -2959,7 +2853,7 @@ public static void testReadColumnarASCIIFile() throws Exception { Arrays.fill(colClass, String.class); Table table = new Table(); table.readColumnarASCIIFile(fullFileName, "", 3, colNames, start, end, colClass); - results = table.dataToCSVString(); + results = table.dataToString(); expected = "aDouble,aString,aChar,aBoolean,aByte,aShort,anInt,aLong,aFloat\n" + "2.412345678987654,abcdef,Ab,t,24,24000,24000000,240000000000,2.4\n" + @@ -2984,7 +2878,7 @@ public static void testReadColumnarASCIIFile() throws Exception { //simplify table.readColumnarASCIIFile(fullFileName, "", 3, colNames, start, end, null); - results = table.dataToCSVString(); + results = table.dataToString(); expected = "aDouble,aString,aChar,aBoolean,aByte,aShort,anInt,aLong,aFloat\n" + "2.412345678987654,abcdef,Ab,t,24,24000,24000000,240000000000,2.4\n" + @@ -3011,10 +2905,10 @@ public static void testReadColumnarASCIIFile() throws Exception { double.class, String.class, char.class, boolean.class, byte.class, short.class, int.class, long.class, float.class}; table.readColumnarASCIIFile(fullFileName, "", 3, colNames, start, end, colClass); - results = table.dataToCSVString(); + results = table.dataToString(); expected = "aDouble,aString,aChar,aBoolean,aByte,aShort,anInt,aLong,aFloat\n" + -"2.412345678987654,abcdef,65,1,24,24000,24000000,240000000000,2.4\n" + +"2.412345678987654,abcdef,A,1,24,24000,24000000,240000000000,2.4\n" + ",short:,,,,,,,\n" + "1.0E200,fg,70,1,11,12001,1200000,12000000000,1.21\n" + "2.0E200,h,72,1,12,12002,120000,1200000000,1.22\n" + @@ -3035,6 +2929,692 @@ public static void testReadColumnarASCIIFile() throws Exception { Test.ensureEqual(results, expected, "results=\n" + results); } + + /** + * This reads an NCCSV .csv file from a URL or a file. + * See https://coastwatch.pfeg.noaa.gov/erddap/downloads/NCCSV.html . + * *SCALAR* and dataType attributes are processed and removed. + * This just calls readNccsv(fullName, true). + * + * @param fullName a URL or the name of a file + * @throws Exception if trouble + */ + public void readNccsv(String fullName) throws Exception { + readNccsv(fullName, true); + } + + /** + * This reads an NCCSV .csv file from a URL or a file. + * *SCALAR* and dataType attributes are processed and removed. + * + * @param fullName a URL or the name of a file + * @param readData If false, the PA for *SCALAR* vars will have 1 value; + * all others will have 0 values. + * @throws Exception if trouble + */ + public void readNccsv(String fullName, boolean readData) throws Exception { + BufferedReader bufferedReader = new BufferedReader( + String2.isRemote(fullName)? + new InputStreamReader(SSR.getUrlInputStream(fullName), String2.ISO_8859_1) : + new FileReader(fullName)); + lowReadNccsv(fullName, readData, bufferedReader); + bufferedReader.close(); + } + + /** + * This reads an NCCSV .csv file. + * See https://coastwatch.pfeg.noaa.gov/erddap/downloads/NCCSV.html . + * *SCALAR* and *DATA_TYPE* attributes are processed and removed. + * + * @param fullName for error messages only + * @param reader from a file or URL + * @throws SimpleException if trouble (but doesn't close the reader) + */ + public void lowReadNccsv(String fullName, boolean readData, BufferedReader reader) { + long time = System.currentTimeMillis(); + long lineNumber = 0; + clear(); + String conventionsNotFound = String2.NCCSV_GLOBAL + ",Conventions,\"..., NCCSV-...\" not found on line 1."; + + try { + + //read the header + String s; + HashMap varNameAtts = new HashMap(); + HashSet expectedDCols = new HashSet(); + + while ((s = reader.readLine()) != null) { + lineNumber++; + if (s.startsWith(String2.NCCSV_END_METADATA)) //extra commas are ignored + break; + + //split the csv line + StringArray sa = StringArray.simpleFromNccsv(s); + if (sa.size() < 3) { + if (lineNumber == 1) + throw new SimpleException(conventionsNotFound); + continue; //e.g., blank line or ignore content + } + String varName = String2.fromNccsvString(sa.get(0)); + String attName = String2.fromNccsvString(sa.get(1)); + sa.removeRange(0, 2); + if (lineNumber == 1) { + //ensure first line is as expected + if (!varName.equals(String2.NCCSV_GLOBAL) || + !attName.equals("Conventions") || + sa.get(0).indexOf("NCCSV-") < 0) + throw new SimpleException(conventionsNotFound); + globalAttributes.add(attName, String2.fromNccsvString(sa.get(0))); + continue; + } + if (sa.removeEmptyAtEnd() == 0) //extra commas are ignored + continue; + + //save the attributes + PrimitiveArray pa = PrimitiveArray.parseNccsvAttributes(sa); + if (varName.equals(String2.NCCSV_GLOBAL)) { + globalAttributes.add(attName, pa); + } else { + Attributes atts = varNameAtts.get(varName); + if (atts == null) { + //create a new column with StringArray capacity=0 + //as a marker for a dummyPA which needs to be set by + //*DATA_TYPE* or *SCALAR* + atts = new Attributes(); + varNameAtts.put(varName, atts); + addColumn(nColumns(), varName, new StringArray(0, false), atts); + } + + if (String2.NCCSV_DATATYPE.equals(attName)) { + //if *SCALAR* and *DATA_TYPE* specified, ignore *DATA_TYPE* + int col = findColumnNumber(varName); //it will exist + if (columns.get(col).capacity() == 0) //i.e., the dummy pa + //new PrimitiveArray with capacity=1024 + setColumn(col, PrimitiveArray.factory( + PrimitiveArray.caseInsensitiveElementStringToClass(sa.get(0)), + 1024, false)); //active? + expectedDCols.add(varName); + } else if (String2.NCCSV_SCALAR.equals(attName)) { + if (pa.size() != 1) + throw new SimpleException( + "There must be just 1 value for a *SCALAR*. varName=" + varName + + " has " + pa.size() + "."); + setColumn(findColumnNumber(varName), pa); + } else { + //most common case is very fast + atts.add(attName, pa); + } + } + } + if (s == null) + throw new SimpleException(String2.NCCSV_END_METADATA + NOT_FOUND_EOF); + + //check that all *DATA_TYPE*s were set + int nc = nColumns(); + for (int c = 0; c < nc; c++) { + //if (getColumn(c) instanceof CharArray) String2.log(">> col=" + c + " is a CharArray"); + if (columns.get(c).capacity() == 0) + throw new SimpleException( + "Neither *SCALAR* nor *DATA_TYPE* were specified for column=" + getColumnName(c)); + } + + //don't readData? + if (!readData) + return; + + //read the column names in the data section + s = reader.readLine(); + lineNumber++; + if (s == null) + throw new SimpleException("Column names" + NOT_FOUND_EOF); + StringArray sa = StringArray.simpleFromNccsv(s); + if (sa.removeEmptyAtEnd() == 0) + throw new SimpleException( + "No column names found names at start of data section."); + sa.fromNccsv(); //un enquote any quoted strings + int nDataCol = sa.size(); + PrimitiveArray dpa[] = new PrimitiveArray[nDataCol]; //so fast below + boolean dpaIsLongArray[] = new boolean[nDataCol]; + boolean dpaIsCharArray[] = new boolean[nDataCol]; + boolean dpaIsStringArray[] = new boolean[nDataCol]; + for (int dcol = 0; dcol < nDataCol; dcol++) { + String varName = sa.get(dcol); + if (!String2.isVariableNameSafe(varName)) + throw new SimpleException("varName=" + varName + + " is not a valid variableName."); + int col = findColumnNumber(varName); + if (col < 0) + throw new SimpleException( + "No attributes were specified for varName=" + + varName + ". *DATA_TYPE* must be specified."); + dpa[dcol] = columns.get(col); + //is this a scalar column?! + if (dpa[dcol].size() == 1) + throw new SimpleException( + "*SCALAR* variable=" + varName + + " must not be in the data section."); + //is this column name in csv section twice? + if (!expectedDCols.remove(varName)) + throw new SimpleException( + "varName=" + varName + " occurs twice in the data section."); + dpaIsLongArray[ dcol] = dpa[dcol] instanceof LongArray; + dpaIsCharArray[ dcol] = dpa[dcol] instanceof CharArray; + //if (dpaIsCharArray[dcol]) String2.log(">> dcol=" + dcol + " is CharArray"); + dpaIsStringArray[dcol] = dpa[dcol] instanceof StringArray; + } + //all expectedDCols were found? + if (expectedDCols.size() > 0) + throw new SimpleException( + "Some variables are missing in the data section: " + + String2.toCSSVString(expectedDCols.toArray())); + + //read the data + while ((s = reader.readLine()) != null) { + lineNumber++; + if (s.startsWith(String2.NCCSV_END_DATA)) //extra commas are ignored + break; + sa = StringArray.simpleFromNccsv(s); //extra commas are ignored + if (sa.size() < nDataCol) + throw new SimpleException("Too few values on data line: nObserved=" + sa.size() + + " nExpected=" + nDataCol); + for (int dcol = 0; dcol < nDataCol; dcol++) { + if (dpaIsStringArray[dcol]) { + dpa[dcol].addString(String2.fromNccsvString(sa.get(dcol))); + } else if (dpaIsCharArray[dcol]) { + ((CharArray)dpa[dcol]).add(String2.fromNccsvChar(sa.get(dcol))); + } else if (dpaIsLongArray[dcol]) { + String ts = sa.get(dcol); + if (ts.endsWith("L")) + ts = ts.substring(0, ts.length() - 1); + dpa[dcol].addString(ts); + } else { + dpa[dcol].addString(sa.get(dcol)); + } + } + } + if (s == null) + throw new SimpleException(String2.NCCSV_END_METADATA + NOT_FOUND_EOF); + + //expand scalars + ensureColumnsAreSameSize_LastValue(); + + String2.log("readNccsv(" + fullName + ") finished successfully. nColumns=" + nColumns() + + " nRows=" + nRows() + " time=" + (System.currentTimeMillis() - time)); + + } catch (Exception e) { + String2.log(MustBe.throwableToString(e)); + throw new SimpleException(String2.ERROR + " on line #" + lineNumber + + " in readNccsv(" + fullName + "): " + e.getMessage()); + } + } + + /** This is like toNccsv(true, true, Integer.MAX_VALUE) */ + public String toNccsv() throws Exception { + return toNccsv(true, true, Integer.MAX_VALUE); + } + + /** + * This saves this table in an NCCSV .csv file. + * See https://coastwatch.pfeg.noaa.gov/erddap/downloads/NCCSV.html . + * This can be a metadata table -- where scalar vars have 1 value and others have 0 values. + * This doesn't close the writer at the end. + * + * @param writeDataRows (use Integer.MAX_VALUE for all rows). + * If 0, *END_METADATA* is the last thing in the file. + * @throws Exception if trouble. No_data is not an error. + */ + public String toNccsv(boolean catchScalars, boolean writeMetadata, + int writeDataRows) throws Exception { + + StringWriter sw = new StringWriter(1024 + nColumns() * nRows() * 10); + toNccsv(catchScalars, writeMetadata, writeDataRows, sw); + return sw.toString(); + } + + + /** + * This writes this table to an nccsv file. + * This writes to a temp file, then renames it into place. + */ + public void toNccsvFile(boolean catchScalars, boolean writeMetadata, + int writeDataRows, String fullFileName) throws Exception { + + BufferedWriter bw = null; + int randomInt = Math2.random(Integer.MAX_VALUE); + + try { + bw = new BufferedWriter( + new OutputStreamWriter( + new FileOutputStream(fullFileName + randomInt), String2.ISO_8859_1)); + toNccsv(catchScalars, writeMetadata, writeDataRows, bw); + bw.close(); + bw = null; + File2.rename(fullFileName + randomInt, fullFileName); + + } catch (Throwable t) { + if (bw != null) { + try {bw.close();} catch (Throwable t2) {} + } + File2.delete(fullFileName + randomInt); + File2.delete(fullFileName); + } + } + + + + + /** + * This saves this table in an NCCSV .csv file. + * This can be a metadata table -- where scalar vars have 1 value and others have 0 values. + * This doesn't close the writer at the end. + * + * @param catchScalars If true, this looks at the data for scalars (just 1 value). + * @param writeMetadata If true, this writes the metadata section. + * This adds a *DATA_TYPE* or *SCALAR* attribute to each column. + * @param writeDataRows This is the maximum number of data rows to write. + * Use Integer.MAX_VALUE to write all. + * @param writer At the end it is flushed, not closed. + * @throws Exception if trouble. No_data is not an error. + */ + public void toNccsv(boolean catchScalars, boolean writeMetadata, + int writeDataRows, Writer writer) throws Exception { + + //figure out what's what + int nc = nColumns(); + int nr = Integer.MAX_VALUE; //shortest non-scalar pa (may be scalars have 1, others 0 or many) + boolean isLong[] = new boolean[nc]; + boolean isScalar[] = new boolean[nc]; + boolean allScalar = true; + int firstNonScalar = nc; + for (int c = 0; c < nc; c++) { + PrimitiveArray pa = columns.get(c); + isLong[c] = pa.elementClass() == long.class; + isScalar[c] = catchScalars && pa.size() > 0 && pa.allSame(); + if (!isScalar[c]) { + nr = Math.min(nr, pa.size()); + allScalar = false; + if (firstNonScalar == nc) + firstNonScalar = c; + } + } + + //write metadata + if (writeMetadata) { + writer.write(globalAttributes.toNccsvString(String2.NCCSV_GLOBAL)); + + for (int c = 0; c < nc; c++) { + //scalar + if (isScalar[c]) { + writer.write( + String2.toNccsvDataString(getColumnName(c)) + "," + + String2.NCCSV_SCALAR + "," + + columns.get(c).subset(0, 1, 0).toNccsvAttString() + "\n"); + } else { + writer.write( + String2.toNccsvDataString(getColumnName(c)) + + "," + String2.NCCSV_DATATYPE + "," + + columns.get(c).elementClassString() + "\n"); + } + writer.write(columnAttributes(c).toNccsvString(getColumnName(c))); + } + writer.write("\n" + String2.NCCSV_END_METADATA + "\n"); + writer.flush(); //important + } + + if (writeDataRows <= 0) + return; + + //write the non-scalar column data + if (!allScalar) { + //column names + for (int c = 0; c < nc; c++) { + if (isScalar[c]) + continue; + if (c > firstNonScalar) + writer.write(','); + writer.write(String2.toNccsvAttString(getColumnName(c))); + } + writer.write("\n"); + + //csv data + int tnr = Math.min(nr, writeDataRows); + for (int r = 0; r < tnr; r++) { + for (int c = 0; c < nc; c++) { + if (isScalar[c]) + continue; + + if (c > firstNonScalar) + writer.write(','); + String ts = columns.get(c).getNccsvDataString(r); + writer.write(ts); + if (isLong[c] && ts.length() > 0) + writer.write('L'); //special case not handled by getNccsvDataString + } + writer.write("\n"); + } + } + writer.write(String2.NCCSV_END_DATA + "\n"); + writer.flush(); //important + } + + + /** + * This tests readNccsv(), readNccsvMetadata(), + */ + public static void testNccsv() throws Exception { + String2.log("\n**** Table.testNccsv()\n"); + String dir = "/erddapTest/nccsv/"; + + //scalar + String fileName = dir + "testScalar.csv"; + Table table = new Table(); + table.readNccsv(fileName); + for (int c = 0; c < table.nColumns(); c++) { + Test.ensureTrue(table.columnAttributes(c).get(String2.NCCSV_SCALAR) ==null, "col=" + c); + Test.ensureTrue(table.columnAttributes(c).get(String2.NCCSV_DATATYPE)==null, "col=" + c); + } + String results = table.toNccsv(); + String expected = +"*GLOBAL*,Conventions,\"COARDS, CF-1.6, ACDD-1.3, NCCSV-1.0\"\n" + +"*GLOBAL*,cdm_trajectory_variables,ship\n" + +"*GLOBAL*,creator_email,bob.simons@noaa.gov\n" + +"*GLOBAL*,creator_name,Bob Simons\n" + +"*GLOBAL*,creator_type,person\n" + +"*GLOBAL*,creator_url,https://www.pfeg.noaa.gov\n" + +"*GLOBAL*,featureType,trajectory\n" + +"*GLOBAL*,infoUrl,https://coastwatch.pfeg.noaa.gov/erddap/downloads/NCCSV.html\n" + +"*GLOBAL*,institution,\"NOAA NMFS SWFSC ERD, NOAA PMEL\"\n" + +"*GLOBAL*,keywords,\"NOAA, sea, ship, sst, surface, temperature, trajectory\"\n" + +"*GLOBAL*,license,\"\"\"NCCSV Demonstration\"\" by Bob Simons and Steve Hankin is licensed under CC BY 4.0, https://creativecommons.org/licenses/by/4.0/ .\"\n" + +"*GLOBAL*,standard_name_vocabulary,CF Standard Name Table v29\n" + +"*GLOBAL*,subsetVariables,ship\n" + +"*GLOBAL*,summary,This is a paragraph or two describing the dataset.\n" + +"*GLOBAL*,title,NCCSV Demonstration\n" + +"ship,*SCALAR*,\" a\\t~\\u00fc,\\n'z\"\"\\u20ac\"\n" + +"ship,cf_role,trajectory_id\n" + +"time,*DATA_TYPE*,String\n" + +"time,standard_name,time\n" + +"time,units,M/d/YYYY H:mm:ss\n" + +"lat,*DATA_TYPE*,double\n" + +"lat,units,degrees_north\n" + +"lon,*DATA_TYPE*,double\n" + +"lon,units,degrees_east\n" + +"status,*DATA_TYPE*,char\n" + +"status,comment,\"From http://some.url.gov/someProjectDocument , Table C\"\n" + +"testLong,*DATA_TYPE*,long\n" + +"testLong,units,\"1\"\n" + +"sst,*DATA_TYPE*,float\n" + +"sst,actual_range,0.17f,23.58f\n" + +"sst,missing_value,99.0f\n" + +"sst,standard_name,sea_surface_temperature\n" + +"sst,testBytes,-128b,0b,127b\n" + +"sst,testChars,\"','\",\"'\"\"'\",\"'\\u20ac'\"\n" + +"sst,testDoubles,-1.7976931348623157E308d,0.0d,1.7976931348623157E308d\n" + +"sst,testFloats,-3.4028235E38f,0.0f,3.4028235E38f\n" + +"sst,testInts,-2147483648i,0i,2147483647i\n" + +"sst,testLongs,-9223372036854775808L,9223372036854775806L,9223372036854775807L\n" + +"sst,testShorts,-32768s,0s,32767s\n" + +"sst,testStrings,\" a\\t~\\u00fc,\\n'z\"\"\\u20ac\"\n" + +"sst,units,degrees_C\n" + +"\n" + +"*END_METADATA*\n" + +"time,lat,lon,status,testLong,sst\n" + +"3/23/2017 0:45:00,28.0002,-130.2576,A,-9223372036854775808L,10.9\n" + +"3/23/2017 1:45:00,28.0003,-130.3472,\\u20ac,-1234567890123456L,\n" + +"3/23/2017 2:45:00,28.0001,-130.4305,\\t,0L,10.7\n" + +"3/23/2017 12:45:00,27.9998,-131.5578,\"\"\"\",1234567890123456L,99.0\n" + +"3/23/2017 21:45:00,28.0003,-132.0014,\\u00fc,9223372036854775806L,10.0\n" + +"3/23/2017 23:45:00,28.0002,-132.1591,?,,\n" + +"*END_DATA*\n"; + Test.ensureEqual(results, expected, "results=\n" + results); + + results = table.toNccsv(false, true, Integer.MAX_VALUE); //don't catch scalar + expected = +"*GLOBAL*,Conventions,\"COARDS, CF-1.6, ACDD-1.3, NCCSV-1.0\"\n" + +"*GLOBAL*,cdm_trajectory_variables,ship\n" + +"*GLOBAL*,creator_email,bob.simons@noaa.gov\n" + +"*GLOBAL*,creator_name,Bob Simons\n" + +"*GLOBAL*,creator_type,person\n" + +"*GLOBAL*,creator_url,https://www.pfeg.noaa.gov\n" + +"*GLOBAL*,featureType,trajectory\n" + +"*GLOBAL*,infoUrl,https://coastwatch.pfeg.noaa.gov/erddap/downloads/NCCSV.html\n" + +"*GLOBAL*,institution,\"NOAA NMFS SWFSC ERD, NOAA PMEL\"\n" + +"*GLOBAL*,keywords,\"NOAA, sea, ship, sst, surface, temperature, trajectory\"\n" + +"*GLOBAL*,license,\"\"\"NCCSV Demonstration\"\" by Bob Simons and Steve Hankin is licensed under CC BY 4.0, https://creativecommons.org/licenses/by/4.0/ .\"\n" + +"*GLOBAL*,standard_name_vocabulary,CF Standard Name Table v29\n" + +"*GLOBAL*,subsetVariables,ship\n" + +"*GLOBAL*,summary,This is a paragraph or two describing the dataset.\n" + +"*GLOBAL*,title,NCCSV Demonstration\n" + +"ship,*DATA_TYPE*,String\n" + +"ship,cf_role,trajectory_id\n" + +"time,*DATA_TYPE*,String\n" + +"time,standard_name,time\n" + +"time,units,M/d/YYYY H:mm:ss\n" + +"lat,*DATA_TYPE*,double\n" + +"lat,units,degrees_north\n" + +"lon,*DATA_TYPE*,double\n" + +"lon,units,degrees_east\n" + +"status,*DATA_TYPE*,char\n" + +"status,comment,\"From http://some.url.gov/someProjectDocument , Table C\"\n" + +"testLong,*DATA_TYPE*,long\n" + +"testLong,units,\"1\"\n" + +"sst,*DATA_TYPE*,float\n" + +"sst,actual_range,0.17f,23.58f\n" + +"sst,missing_value,99.0f\n" + +"sst,standard_name,sea_surface_temperature\n" + +"sst,testBytes,-128b,0b,127b\n" + +"sst,testChars,\"','\",\"'\"\"'\",\"'\\u20ac'\"\n" + +"sst,testDoubles,-1.7976931348623157E308d,0.0d,1.7976931348623157E308d\n" + +"sst,testFloats,-3.4028235E38f,0.0f,3.4028235E38f\n" + +"sst,testInts,-2147483648i,0i,2147483647i\n" + +"sst,testLongs,-9223372036854775808L,9223372036854775806L,9223372036854775807L\n" + +"sst,testShorts,-32768s,0s,32767s\n" + +"sst,testStrings,\" a\\t~\\u00fc,\\n'z\"\"\\u20ac\"\n" + +"sst,units,degrees_C\n" + +"\n" + +"*END_METADATA*\n" + +"ship,time,lat,lon,status,testLong,sst\n" + +"\" a\\t~\\u00fc,\\n'z\"\"\\u20ac\",3/23/2017 0:45:00,28.0002,-130.2576,A,-9223372036854775808L,10.9\n" + +"\" a\\t~\\u00fc,\\n'z\"\"\\u20ac\",3/23/2017 1:45:00,28.0003,-130.3472,\\u20ac,-1234567890123456L,\n" + +"\" a\\t~\\u00fc,\\n'z\"\"\\u20ac\",3/23/2017 2:45:00,28.0001,-130.4305,\\t,0L,10.7\n" + +"\" a\\t~\\u00fc,\\n'z\"\"\\u20ac\",3/23/2017 12:45:00,27.9998,-131.5578,\"\"\"\",1234567890123456L,99.0\n" + +"\" a\\t~\\u00fc,\\n'z\"\"\\u20ac\",3/23/2017 21:45:00,28.0003,-132.0014,\\u00fc,9223372036854775806L,10.0\n" + +"\" a\\t~\\u00fc,\\n'z\"\"\\u20ac\",3/23/2017 23:45:00,28.0002,-132.1591,?,,\n" + +"*END_DATA*\n"; + Test.ensureEqual(results, expected, "results=\n" + results); + + + //non scalar + fileName = dir + "sample.csv"; + table.readNccsv(fileName); + for (int c = 0; c < table.nColumns(); c++) { + Test.ensureTrue(table.columnAttributes(c).get(String2.NCCSV_SCALAR) ==null, "col=" + c); + Test.ensureTrue(table.columnAttributes(c).get(String2.NCCSV_DATATYPE)==null, "col=" + c); + } + results = table.toNccsv(false, true, Integer.MAX_VALUE); //don't catch scalars + expected = +"*GLOBAL*,Conventions,\"COARDS, CF-1.6, ACDD-1.3, NCCSV-1.0\"\n" + +"*GLOBAL*,cdm_trajectory_variables,ship\n" + +"*GLOBAL*,creator_email,bob.simons@noaa.gov\n" + +"*GLOBAL*,creator_name,Bob Simons\n" + +"*GLOBAL*,creator_type,person\n" + +"*GLOBAL*,creator_url,https://www.pfeg.noaa.gov\n" + +"*GLOBAL*,featureType,trajectory\n" + +"*GLOBAL*,infoUrl,https://coastwatch.pfeg.noaa.gov/erddap/downloads/NCCSV.html\n" + +"*GLOBAL*,institution,\"NOAA NMFS SWFSC ERD, NOAA PMEL\"\n" + +"*GLOBAL*,keywords,\"NOAA, sea, ship, sst, surface, temperature, trajectory\"\n" + +"*GLOBAL*,license,\"\"\"NCCSV Demonstration\"\" by Bob Simons and Steve Hankin is licensed under CC BY 4.0, https://creativecommons.org/licenses/by/4.0/ .\"\n" + +"*GLOBAL*,standard_name_vocabulary,CF Standard Name Table v29\n" + +"*GLOBAL*,subsetVariables,ship\n" + +"*GLOBAL*,summary,This is a paragraph or two describing the dataset.\n" + +"*GLOBAL*,title,NCCSV Demonstration\n" + +"ship,*DATA_TYPE*,String\n" + +"ship,cf_role,trajectory_id\n" + +"time,*DATA_TYPE*,String\n" + +"time,standard_name,time\n" + +"time,units,M/d/YYYY H:mm:ss\n" + +"lat,*DATA_TYPE*,double\n" + +"lat,units,degrees_north\n" + +"lon,*DATA_TYPE*,double\n" + +"lon,units,degrees_east\n" + +"status,*DATA_TYPE*,char\n" + +"status,comment,\"From http://some.url.gov/someProjectDocument , Table C\"\n" + +"testLong,*DATA_TYPE*,long\n" + +"testLong,units,\"1\"\n" + +"sst,*DATA_TYPE*,float\n" + +"sst,actual_range,0.17f,23.58f\n" + +"sst,missing_value,99.0f\n" + +"sst,standard_name,sea_surface_temperature\n" + +"sst,testBytes,-128b,0b,127b\n" + +"sst,testChars,\"','\",\"'\"\"'\",\"'\\u20ac'\"\n" + +"sst,testDoubles,-1.7976931348623157E308d,0.0d,1.7976931348623157E308d\n" + +"sst,testFloats,-3.4028235E38f,0.0f,3.4028235E38f\n" + +"sst,testInts,-2147483648i,0i,2147483647i\n" + +"sst,testLongs,-9223372036854775808L,0L,9223372036854775807L\n" + +"sst,testShorts,-32768s,0s,32767s\n" + +"sst,testStrings,\" a\\t~\\u00fc,\\n'z\"\"\\u20ac\"\n" + +"sst,units,degrees_C\n" + +"\n" + +"*END_METADATA*\n" + +"ship,time,lat,lon,status,testLong,sst\n" + +"Bell M. Shimada,3/23/2017 0:45:00,28.0002,-130.2576,A,-9223372036854775808L,10.9\n" + +"Bell M. Shimada,3/23/2017 1:45:00,28.0003,-130.3472,\\u20ac,-1234567890123456L,\n" + +"Bell M. Shimada,3/23/2017 2:45:00,28.0001,-130.4305,\\t,0L,10.7\n" + +"Bell M. Shimada,3/23/2017 12:45:00,27.9998,-131.5578,\"\"\"\",1234567890123456L,99.0\n" + +"Bell M. Shimada,3/23/2017 21:45:00,28.0003,-132.0014,\\u00fc,9223372036854775806L,10.0\n" + +"Bell M. Shimada,3/23/2017 23:45:00,28.0002,-132.1591,?,,\n" + +"*END_DATA*\n"; + Test.ensureEqual(results, expected, "results=\n" + results); + + //just metadata + fileName = dir + "sampleMetadata.csv"; + table = new Table(); + table.readNccsv(fileName, false); //readData? + for (int c = 0; c < table.nColumns(); c++) { + Test.ensureTrue(table.columnAttributes(c).get(String2.NCCSV_SCALAR) ==null, "col=" + c); + Test.ensureTrue(table.columnAttributes(c).get(String2.NCCSV_DATATYPE)==null, "col=" + c); + } + results = table.toNccsv(true, true, 0); //catch scalar, writeMetadata, don't write data + expected = +"*GLOBAL*,Conventions,\"COARDS, CF-1.6, ACDD-1.3, NCCSV-1.0\"\n" + +"*GLOBAL*,cdm_trajectory_variables,ship\n" + +"*GLOBAL*,creator_email,bob.simons@noaa.gov\n" + +"*GLOBAL*,creator_name,Bob Simons\n" + +"*GLOBAL*,creator_type,person\n" + +"*GLOBAL*,creator_url,https://www.pfeg.noaa.gov\n" + +"*GLOBAL*,featureType,trajectory\n" + +"*GLOBAL*,infoUrl,https://coastwatch.pfeg.noaa.gov/erddap/downloads/NCCSV.html\n" + +"*GLOBAL*,institution,\"NOAA NMFS SWFSC ERD, NOAA PMEL\"\n" + +"*GLOBAL*,keywords,\"NOAA, sea, ship, sst, surface, temperature, trajectory\"\n" + +"*GLOBAL*,license,\"\"\"NCCSV Demonstration\"\" by Bob Simons and Steve Hankin is licensed under CC BY 4.0, https://creativecommons.org/licenses/by/4.0/ .\"\n" + +"*GLOBAL*,standard_name_vocabulary,CF Standard Name Table v29\n" + +"*GLOBAL*,subsetVariables,ship\n" + +"*GLOBAL*,summary,This is a paragraph or two describing the dataset.\n" + +"*GLOBAL*,title,NCCSV Demonstration\n" + +"ship,*SCALAR*,Bell M. Shimada\n" + +"ship,cf_role,trajectory_id\n" + +"time,*DATA_TYPE*,String\n" + +"time,standard_name,time\n" + +"time,units,M/d/YYYY H:mm:ss\n" + +"lat,*DATA_TYPE*,double\n" + +"lat,units,degrees_north\n" + +"lon,*DATA_TYPE*,double\n" + +"lon,units,degrees_east\n" + +"status,*DATA_TYPE*,char\n" + +"status,comment,\"From http://some.url.gov/someProjectDocument , Table C\"\n" + +"testLong,*DATA_TYPE*,long\n" + +"testLong,units,\"1\"\n" + +"sst,*DATA_TYPE*,float\n" + +"sst,actual_range,0.17f,23.58f\n" + +"sst,missing_value,99.0f\n" + +"sst,standard_name,sea_surface_temperature\n" + +"sst,testBytes,-128b,0b,127b\n" + +"sst,testChars,\"','\",\"'\"\"'\",\"'\\u20ac'\"\n" + +"sst,testDoubles,-1.7976931348623157E308d,0.0d,1.7976931348623157E308d\n" + +"sst,testFloats,-3.4028235E38f,0.0f,3.4028235E38f\n" + +"sst,testInts,-2147483648i,0i,2147483647i\n" + +"sst,testLongs,-9223372036854775808L,0L,9223372036854775807L\n" + +"sst,testShorts,-32768s,0s,32767s\n" + +"sst,testStrings,\" a\\t~\\u00fc,\\n'z\"\"\\u20ac\"\n" + +"sst,units,degrees_C\n" + +"\n" + +"*END_METADATA*\n"; + Test.ensureEqual(results, expected, "results=\n" + results); + + //test round trip to spreadsheet and back + //make a copy of sampleScalar + fileName = dir + "sampleExcel.csv"; + String2.writeToFile(fileName, + String2.directReadFrom88591File(dir + "testScalar.csv")); + SSR.displayInBrowser("file://" + fileName); + String2.pressEnterToContinue("\nIn Excel, use File : Save As : CSV : as sampleExcel.csv : yes : yes."); + table = new Table(); + table.readNccsv(fileName); + for (int c = 0; c < table.nColumns(); c++) { + Test.ensureTrue(table.columnAttributes(c).get(String2.NCCSV_SCALAR) ==null, "col=" + c); + Test.ensureTrue(table.columnAttributes(c).get(String2.NCCSV_DATATYPE)==null, "col=" + c); + } + results = table.toNccsv(); + expected = +"*GLOBAL*,Conventions,\"COARDS, CF-1.6, ACDD-1.3, NCCSV-1.0\"\n" + +"*GLOBAL*,cdm_trajectory_variables,ship\n" + +"*GLOBAL*,creator_email,bob.simons@noaa.gov\n" + +"*GLOBAL*,creator_name,Bob Simons\n" + +"*GLOBAL*,creator_type,person\n" + +"*GLOBAL*,creator_url,https://www.pfeg.noaa.gov\n" + +"*GLOBAL*,featureType,trajectory\n" + +"*GLOBAL*,infoUrl,https://coastwatch.pfeg.noaa.gov/erddap/downloads/NCCSV.html\n" + +"*GLOBAL*,institution,\"NOAA NMFS SWFSC ERD, NOAA PMEL\"\n" + +"*GLOBAL*,keywords,\"NOAA, sea, ship, sst, surface, temperature, trajectory\"\n" + +"*GLOBAL*,license,\"\"\"NCCSV Demonstration\"\" by Bob Simons and Steve Hankin is licensed under CC BY 4.0, https://creativecommons.org/licenses/by/4.0/ .\"\n" + +"*GLOBAL*,standard_name_vocabulary,CF Standard Name Table v29\n" + +"*GLOBAL*,subsetVariables,ship\n" + +"*GLOBAL*,summary,This is a paragraph or two describing the dataset.\n" + +"*GLOBAL*,title,NCCSV Demonstration\n" + +"ship,*SCALAR*,\" a\\t~\\u00fc,\\n'z\"\"\\u20ac\"\n" + +"ship,cf_role,trajectory_id\n" + +"time,*DATA_TYPE*,String\n" + +"time,standard_name,time\n" + +"time,units,M/d/YYYY H:mm:ss\n" + +"lat,*DATA_TYPE*,double\n" + +"lat,units,degrees_north\n" + +"lon,*DATA_TYPE*,double\n" + +"lon,units,degrees_east\n" + +"status,*DATA_TYPE*,char\n" + +"status,comment,\"From http://some.url.gov/someProjectDocument , Table C\"\n" + +"testLong,*DATA_TYPE*,long\n" + +"testLong,units,\"1\"\n" + +"sst,*DATA_TYPE*,float\n" + +"sst,actual_range,0.17f,23.58f\n" + +"sst,missing_value,99.0f\n" + +"sst,standard_name,sea_surface_temperature\n" + +"sst,testBytes,-128b,0b,127b\n" + +"sst,testChars,\"','\",\"'\"\"'\",\"'\\u20ac'\"\n" + +"sst,testDoubles,-1.7976931348623157E308d,0.0d,1.7976931348623157E308d\n" + +"sst,testFloats,-3.4028235E38f,0.0f,3.4028235E38f\n" + +"sst,testInts,-2147483648i,0i,2147483647i\n" + +"sst,testLongs,-9223372036854775808L,9223372036854775806L,9223372036854775807L\n" + +"sst,testShorts,-32768s,0s,32767s\n" + +"sst,testStrings,\" a\\t~\\u00fc,\\n'z\"\"\\u20ac\"\n" + +"sst,units,degrees_C\n" + +"\n" + +"*END_METADATA*\n" + +"time,lat,lon,status,testLong,sst\n" + +"3/23/2017 0:45:00,28.0002,-130.2576,A,-9223372036854775808L,10.9\n" + +"3/23/2017 1:45:00,28.0003,-130.3472,\\u20ac,-1234567890123456L,\n" + +"3/23/2017 2:45:00,28.0001,-130.4305,\\t,0L,10.7\n" + +"3/23/2017 12:45:00,27.9998,-131.5578,\"\"\"\",1234567890123456L,99.0\n" + +"3/23/2017 21:45:00,28.0003,-132.0014,\\u00fc,9223372036854775806L,10.0\n" + +"3/23/2017 23:45:00,28.0002,-132.1591,?,,\n" + +"*END_DATA*\n"; + try { + Test.ensureEqual(results, expected, "results=\n" + results); + } catch (Exception e) { + String2.pressEnterToContinue(MustBe.throwableToString(e) + + "\nKnown problem: how to keep integer in string att as a string."); + } + } + /** * This gets data from the IOBIS web site (http://www.iobis.org) * by mimicing the Advanced Search form @@ -3173,7 +3753,7 @@ public void readIobis(String url, String genus, String species, //read the data into a temporary table Table tTable = new Table(); - tTable.readASCII(url2, dataLines, 0, 1, //columnNamesLine, int dataStartLine, + tTable.readASCII(url2, dataLines, 0, 1, "", //columnNamesLine, int dataStartLine, colSeparator null, null, null, //constraints null, false); //just load all the columns, and don't simplify @@ -3571,9 +4151,9 @@ public static void testXml() throws Exception { false, //no validate since no .dtd "/wfs:FeatureCollection/gml:featureMember", null, false); //row attributes, simplify - results = table.dataToCSVString(3); + results = table.dataToString(3); expected = -"row,aasg:BoreholeTemperature/aasg:OBJECTID,aasg:BoreholeTemperature/aasg:ObservationURI,aasg:BoreholeTemperature/aasg:WellName,aasg:Bo" + +"aasg:BoreholeTemperature/aasg:OBJECTID,aasg:BoreholeTemperature/aasg:ObservationURI,aasg:BoreholeTemperature/aasg:WellName,aasg:Bo" + "reholeTemperature/aasg:APINo,aasg:BoreholeTemperature/aasg:HeaderURI,aasg:BoreholeTemperature/aasg:Label,aasg:BoreholeTemperature/aasg" + ":Operator,aasg:BoreholeTemperature/aasg:SpudDate,aasg:BoreholeTemperature/aasg:EndedDrillingDate,aasg:BoreholeTemperature/aasg:WellTyp" + "e,aasg:BoreholeTemperature/aasg:Status,aasg:BoreholeTemperature/aasg:CommodityOfInterest,aasg:BoreholeTemperature/aasg:Function,aasg:B" + @@ -3587,21 +4167,21 @@ public static void testXml() throws Exception { "eholeTemperature/aasg:MeasurementSource,aasg:BoreholeTemperature/aasg:RelatedResource,aasg:BoreholeTemperature/aasg:Shape/gml:Point/latitude" + ",aasg:BoreholeTemperature/aasg:Shape/gml:Point/longitude,aasg:BoreholeTemperature/aasg:TimeSinceCirculation,aasg:BoreholeTemperature/aasg:Oth" + "erName,aasg:BoreholeTemperature/aasg:LeaseName,aasg:BoreholeTemperature/aasg:Notes\n" + -"0,1,http://resources.usgin.org/uri-gin/wvges/bhtemp/4703501405_121/,Kaiser Exploration and Mining Co. (KEM Gas) K Donohew,4703501405," + +"1,http://resources.usgin.org/uri-gin/wvges/bhtemp/4703501405_121/,Kaiser Exploration and Mining Co. (KEM Gas) K Donohew,4703501405," + "http://resources.usgin.org/uri-gin/wvges/well/4703501405/,4703501405,Kaiser Exploration and Mining Co. (KEM Gas),1977-06-25T00:00:00,1" + "977-07-09T00:00:00,Gas,Missing,Missing,Missing,Missing,Missing,Providence,Jackson,West Virginia,38.796917999999998,-81.813635000000005" + ",NAD 83,Location recorded as received from official permit application converted to NAD83 if required,4840,G.L.,ft,vertical,4840,897,H" + "elderberg Group,121,F,Temperature log evaluated by WVGES staff for deepest stable log segment to extract data otherwise used given bot" + "tom hole temperature on log header if available,4176,Java Formation,Well Temperature Log,TL," + "38.796918000000062,-81.81363499999992,,,,\n" + -"1,2,http://resources.usgin.org/uri-gin/wvges/bhtemp/4703501400_97.5/,Kaiser Exploration and Mining Co. (KEM Gas) Roger Pinnell,470350" + +"2,http://resources.usgin.org/uri-gin/wvges/bhtemp/4703501400_97.5/,Kaiser Exploration and Mining Co. (KEM Gas) Roger Pinnell,470350" + "1400,http://resources.usgin.org/uri-gin/wvges/well/4703501400/,4703501400,Kaiser Exploration and Mining Co. (KEM Gas),1977-07-12T00:00" + ":00,1977-07-31T00:00:00,Gas,Missing,Missing,Missing,Missing,Missing,Providence,Jackson,West Virginia,38.850490999999998,-81.8098509999" + "99995,NAD 83,Location recorded as received from official permit application converted to NAD83 if required,4471,G.L.,ft,vertical,4471," + "612,Helderberg Group,97.5,F,Temperature log evaluated by WVGES staff for deepest stable log segment to extract data otherwise used giv" + "en bottom hole temperature on log header if available,4060,Angola: interbedded middle unit,Well Temperature Log,TL,38.850491000000034" + ",-81.809850999999924,,,,\n" + -"2,3,http://resources.usgin.org/uri-gin/wvges/bhtemp/4703501411_108/,Kaiser Exploration and Mining Co. (KEM Gas) Emma Bibbee,470350141" + +"3,http://resources.usgin.org/uri-gin/wvges/bhtemp/4703501411_108/,Kaiser Exploration and Mining Co. (KEM Gas) Emma Bibbee,470350141" + "1,http://resources.usgin.org/uri-gin/wvges/well/4703501411/,4703501411,Kaiser Exploration and Mining Co. (KEM Gas),1977-08-19T00:00:00" + ",1977-08-30T00:00:00,Gas,Missing,Missing,Missing,Missing,Missing,Mt Alto (Cottageville),Jackson,West Virginia,38.894705000000002,-81.7" + "99130000000005,NAD 83,Location recorded as received from official permit application converted to NAD83 if required,4301,G.L.,ft,verti" + @@ -3699,12 +4279,12 @@ public static void testXml() throws Exception { Test.ensureEqual(table.getStringData(9, 1), "BIO", ""); Test.ensureEqual(table.getStringData(9, 2), "", ""); -//a subset of http://opendap.co-ops.nos.noaa.gov/stations/stationsXML.jsp +//a subset of https://opendap.co-ops.nos.noaa.gov/stations/stationsXML.jsp String stationsXml = "\n" + -" \n" + +"xsi:schemaLocation=\"https://opendap.co-ops.nos.noaa.gov/stations/ xml_schemas/stations.xsd\"> \n" + "\n" + "\n" + "\n" + @@ -3984,7 +4564,7 @@ public static void testReadAwsXmlFile() throws Exception { String2.log("\nTable.testReadAwsXmlFile"); Table table = new Table(); table.readAwsXmlFile(String2.unitTestDataDir + "aws/xml/SNFLS-2012-11-03T20_30_01Z.xml"); - String results = table.toCSVString(); + String results = table.toString(); String expected = "{\n" + "dimensions:\n" + @@ -4112,8 +4692,8 @@ public static void testReadAwsXmlFile() throws Exception { "\n" + "// global attributes:\n" + "}\n" + -"row,ob-date,station-id,station,city-state-zip,city-state,site-url,aux-temp,aux-temp-rate,dew-point,elevation,feels-like,gust-time,gust-direction,gust-speed,humidity,humidity-high,humidity-low,humidity-rate,indoor-temp,indoor-temp-rate,light,light-rate,moon-phase-moon-phase-img,moon-phase,pressure,pressure-high,pressure-low,pressure-rate,rain-month,rain-rate,rain-rate-max,rain-today,rain-year,temp,temp-high,temp-low,temp-rate,sunrise,sunset,wet-bulb,wind-speed,wind-speed-avg,wind-direction,wind-direction-avg\n" + -"0,1.3519746E9,SNFLS,Exploratorium,94123,\"San Francisco, CA\",,32,0,54,0,67,1.3519746E9,E,8,63,100,63,-6,90,+4.6,67.9,-0.3,mphase16.gif,82,30.1,30.14,30.06,-0.01,0.21,0,0,0,1.76,66.9,67,52,3.8,1.351953497E9,1.351991286E9,59.162,0,2,ENE,E\n"; +"ob-date,station-id,station,city-state-zip,city-state,site-url,aux-temp,aux-temp-rate,dew-point,elevation,feels-like,gust-time,gust-direction,gust-speed,humidity,humidity-high,humidity-low,humidity-rate,indoor-temp,indoor-temp-rate,light,light-rate,moon-phase-moon-phase-img,moon-phase,pressure,pressure-high,pressure-low,pressure-rate,rain-month,rain-rate,rain-rate-max,rain-today,rain-year,temp,temp-high,temp-low,temp-rate,sunrise,sunset,wet-bulb,wind-speed,wind-speed-avg,wind-direction,wind-direction-avg\n" + +"1.3519746E9,SNFLS,Exploratorium,94123,\"San Francisco, CA\",,32,0,54,0,67,1.3519746E9,E,8,63,100,63,-6,90,+4.6,67.9,-0.3,mphase16.gif,82,30.1,30.14,30.06,-0.01,0.21,0,0,0,1.76,66.9,67,52,3.8,1.351953497E9,1.351991286E9,59.162,0,2,ENE,E\n"; Test.ensureEqual(results, expected, "results=\n" + results); Test.ensureEqual(Calendar2.epochSecondsToIsoStringT(1.3519746E9), "2012-11-03T20:30:00", ""); @@ -4129,6 +4709,8 @@ public static void testReadAwsXmlFile() throws Exception { * but it doesn't need any rows of data. * See writeDAS. * + *

CharArray columns appear as String columns in DAP. + * * @param outputStream the outputStream to receive the results (will be encoded as ISO-8859-1). * Afterwards, it is flushed, not closed. * @param sequenceName e.g., "bottle_data_2002" @@ -4140,7 +4722,7 @@ public void saveAsDAS(OutputStream outputStream, String sequenceName) throws Exc long time = System.currentTimeMillis(); Writer writer = new OutputStreamWriter( //DAP 2.0 section 3.2.3 says US-ASCII (7bit), so might as well go for compatible common 8bit - outputStream, "ISO-8859-1"); + outputStream, String2.ISO_8859_1); writeDAS(writer, sequenceName, false); //diagnostic @@ -4154,7 +4736,7 @@ public void saveAsDAS(OutputStream outputStream, String sequenceName) throws Exc * to the outputStream as an DODS DAS (see www.opendap.org, DAP 2.0, 7.2.1). * Note that the table does needs columns (and their attributes), * but it doesn't need any rows of data. - * E.g. from http://oceanwatch.pfeg.noaa.gov/opendap/GLOBEC/GLOBEC_bottle.das + * E.g. from https://oceanwatch.pfeg.noaa.gov/opendap/GLOBEC/GLOBEC_bottle.das

 Attributes {
     bottle_data_2002 {
@@ -4182,6 +4764,8 @@ public void saveAsDAS(OutputStream outputStream, String sequenceName) throws Exc
     }
 }
 
+ * + *

CharArray columns appear as String columns in DAP. * * @param writer the Writer to receive the results. * Afterwards, it is flushed, not closed. @@ -4214,7 +4798,7 @@ public void writeDAS(Writer writer, String sequenceName, boolean encodeAsHtml) t * to the outputStream as an DODS DDS (see www.opendap.org, DAP 2.0, 7.2.2). * Note that the table does needs columns (and their attributes), * but it doesn't need any rows of data. - * E.g. from http://oceanwatch.pfeg.noaa.gov/opendap/GLOBEC/GLOBEC_bottle.dds + * E.g. from https://oceanwatch.pfeg.noaa.gov/opendap/GLOBEC/GLOBEC_bottle.dds

 Dataset {
     Sequence {
@@ -4230,6 +4814,8 @@ public void writeDAS(Writer writer, String sequenceName, boolean encodeAsHtml) t
     } bottle_data_2002;
 } bottle_data_2002; 
* + *

CharArray columns appear as String columns in DAP. + * * @param outputStream the outputStream to receive the results. * Afterwards, it is flushed, not closed. * @param sequenceName e.g., "bottle_data_2002" @@ -4263,7 +4849,7 @@ public void saveAsDDS(OutputStream outputStream, String sequenceName) throws Exc * This writes the table's data structure (as if it were a DODS Sequence) * to the outputStream as DODS ASCII data (which is not defined in DAP 2.0, * but which is very close to saveAsDODS below). - * This mimics http://oceanwatch.pfeg.noaa.gov/opendap/GLOBEC/GLOBEC_bottle.asc?lon,ship,cast,t0,NO3&lon<-125.7 + * This mimics https://oceanwatch.pfeg.noaa.gov/opendap/GLOBEC/GLOBEC_bottle.asc?lon,ship,cast,t0,NO3&lon<-125.7 * *

This sends missing values as is. * This doesn't call convertToFakeMissingValues. Do it beforehand if you need to. @@ -4288,9 +4874,11 @@ public void saveAsDodsAscii(OutputStream outputStream, String sequenceName) thro //write the column names int nColumns = nColumns(); int nRows = nRows(); - boolean isStringCol[] = new boolean[nColumns]; + boolean isCharOrString[] = new boolean[nColumns]; for (int col = 0; col < nColumns; col++) { - isStringCol[col] = getColumn(col).elementClass() == String.class; + isCharOrString[col] = + getColumn(col).elementClass() == char.class || + getColumn(col).elementClass() == String.class; writer.write(getColumnName(col) + (col == nColumns - 1? OpendapHelper.EOL : ", ")); } @@ -4300,7 +4888,7 @@ public void saveAsDodsAscii(OutputStream outputStream, String sequenceName) thro for (int row = 0; row < nRows; row++) { for (int col = 0; col < nColumns; col++) { String s = getColumn(col).getString(row); - if (isStringCol[col]) //see DODS Appendix A, quoted-string + if (isCharOrString[col]) //see DODS Appendix A, quoted-string s = "\"" + String2.replaceAll(s, "\"", "\\\"") + "\""; writer.write(s + (col == nColumns - 1? OpendapHelper.EOL : ", ")); } @@ -4358,7 +4946,7 @@ public void saveAsDODS(OutputStream outputStream, String sequenceName) throws Ex /** - * This is like the other saveAsHtml, but saves to a file. + * This is like the other saveAsHtml, but saves to a UTF-8 file. * * @param fullFileName the complete file name (including directory and * extension, usually ".htm" or ".html"). @@ -4442,7 +5030,8 @@ public void saveAsHtml(OutputStream outputStream, String fileNameNoExt, long time = System.currentTimeMillis(); //write the header - BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(outputStream)); + BufferedWriter writer = new BufferedWriter( + new OutputStreamWriter(outputStream, String2.UTF_8)); writer.write( "\n" + @@ -4589,7 +5178,7 @@ public void saveAsHtmlTable(Writer writer, String otherClasses, String bgColor, public void readHtml(String fullFileName, int skipNTables, boolean secondRowHasUnits, boolean simplify) throws Exception { - String sar[] = String2.readFromFile(fullFileName, null, 2); + String sar[] = String2.readFromFile(fullFileName, String2.UTF_8, 2); Test.ensureEqual(sar[0].length(), 0, sar[0]); //check that there was no error //String2.log(String2.annotatedString(sar[1])); readHtml(fullFileName, sar[1], skipNTables, secondRowHasUnits, simplify); @@ -4688,6 +5277,9 @@ public void readHtml(String fullFileName, String html, int skipNTables, endTagPo = row.length(); String datum = XML.decodeEntities( row.substring(gtPo + 1, endTagPo).trim()); + //String2.log(">> row=" + nRows + " col=" + tCol + " datum=" + String2.annotatedString(datum)); + if ("\u00a0".equals(datum)) //nbsp + datum = ""; if (nRows == 0) { //if first row, add a column @@ -4739,6 +5331,240 @@ public void readHtml(String fullFileName, String html, int skipNTables, (System.currentTimeMillis() - time)); } + /** + * This encodes the values of an Attributes before saveAsEnhancedFlatNc. + */ + void encodeEnhancedAttributes(Attributes atts) { + String names[] = atts.getNames(); + int n = names.length; + for (int i = 0; i < n; i++) { + PrimitiveArray pa = atts.get(names[i]); + if (pa instanceof CharArray) { + atts.remove(names[i]); + atts.set("_encodedCharArray_" + names[i], + ShortArray.fromCharArrayBytes((CharArray)pa)); + } else if (pa instanceof LongArray) { + atts.remove(names[i]); + atts.set("_encodedLongArray_" + names[i], + new StringArray(new String[]{pa.toString()})); + //Even nc3 saves attributes via utf-8 + //} else if (pa instanceof StringArray) { + // atts.remove(names[i]); + // atts.set("_encodedStringArray_" + names[i], + // (new StringArray(pa)).toJson()); //change a copy of pa + } + } + } + + /** + * This decodes the values of an Attributes after readEnhancedFlatNc. + */ + void decodeEnhancedAttributes(Attributes atts) { + String names[] = atts.getNames(); + int n = names.length; + for (int i = 0; i < n; i++) { + if (names[i].startsWith("_encoded")) { + PrimitiveArray pa = atts.get(names[i]); + if (pa instanceof ShortArray && + names[i].startsWith("_encodedCharArray_")) { + atts.remove(names[i]); + atts.set(names[i].substring(18), + CharArray.fromShortArrayBytes((ShortArray)pa)); + } else if (pa instanceof StringArray && + names[i].startsWith("_encodedLongArray_")) { + atts.remove(names[i]); + atts.set(names[i].substring(18), + PrimitiveArray.csvFactory(long.class, pa.getString(0))); + //Even nc3 saves attributes via utf-8 + //} else if (pa instanceof StringArray && + // names[i].startsWith("_encodedStringArray_")) { + // atts.remove(names[i]); + // atts.set(names[i].substring(20), + // ((StringArray)pa).fromJson()); //actually a newline separated string + } + } + } + } + + /** + * This writes this table as a Bob enhanced flatNc file + * (supports longs, 2-byte chars, and utf-8 strings, for data and attribute values). + * The table is temporarily modified, but all changes are undone when this is finished. + * + * @throws exception if trouble + */ + public void saveAsEnhancedFlatNc(String fullName) throws Exception { + + //Important: make a new table and make changes to it (even if temporary). + // Some other thread may be using this table. + + //encode things + Table newTable = new Table(); + newTable.globalAttributes().add(globalAttributes); + newTable.globalAttributes().add("_enhanced_version_", ENHANCED_VERSION); + encodeEnhancedAttributes(newTable.globalAttributes()); + + int nCols = nColumns(); + int nRows = nRows(); + for (int col = 0; col < nCols; col++) { + PrimitiveArray pa = getColumn(col); + Attributes atts = (Attributes)(columnAttributes(col).clone()); + newTable.addColumn(col, getColumnName(col), pa, atts); + + if (pa instanceof CharArray) { + atts.set("_Unsigned", "true"); //netcdf recommendation + newTable.setColumn(col, ShortArray.fromCharArrayBytes((CharArray)pa)); + + } else if (pa instanceof LongArray) { + atts.set("_encoded_", "fromLong"); + newTable.setColumn(col, new StringArray(pa)); + + } else if (pa instanceof StringArray) { + atts.set("_encoded_", String2.JSON); //not _Encoding because encodeEnhancedAtts will change it + newTable.setColumn(col, (new StringArray(pa)).toJson()); //change a copy of pa + } + + encodeEnhancedAttributes(atts); + } + + //save newTable + newTable.saveAsFlatNc(fullName, "row", false); //convertToStandardMissingValues + } + + + /** + * This reads Bob's enhanced flatNc file (supports longs, 2-byte chars, + * and utf-8 strings for data and attribute values) + * into this table (replacing current contents). + * + * @param loadColumns a list of column names, or null for all + * @return enhancedVersion This is for informational purposes. + * Normally, all the enhanced encoding/decoding is dealt with here. + * This returns + *
< ENHANCED_VERSION (e.g., 0, i.e. out-of-date) + * if not enhanced or not up to current standards. + *
ENHANCED_VERION if presumably all okay. + *
>ENHANCED_VERSION if from a future version + * (with significant changes that aren't caught here). + * @throws exception if trouble + */ + public int readEnhancedFlatNc(String fullName, String loadColumns[]) throws Exception { + + lowReadFlatNc(fullName, loadColumns, 0, -1); + + //decode + decodeEnhancedAttributes(globalAttributes); + int enhVersion = globalAttributes.getInt("_enhanced_version_"); + if (enhVersion == Integer.MAX_VALUE) + enhVersion = 0; + globalAttributes.remove("_enhanced_version_"); + + int nCols = nColumns(); + int nRows = nRows(); + for (int col = 0; col < nCols; col++) { + PrimitiveArray pa = getColumn(col); + Attributes atts = columnAttributes(col); + decodeEnhancedAttributes(atts); + + if (pa instanceof CharArray) { + //trouble! significant info loss. There shouldn't be any CharArray + enhVersion = 0; //trouble. must be v0 + + } else if (pa instanceof ShortArray) { + if ("true".equals(atts.getString("_Unsigned"))) { //netcdf recommendation + //convert unsigned short to char + atts.remove("_Unsigned"); + setColumn(col, CharArray.fromShortArrayBytes((ShortArray)pa)); + } + + } else if (pa instanceof StringArray) { + String enc = atts.getString("_encoded_"); + if ("fromLong".equals(enc)) { + //convert longs encoded as doubles back to longs + atts.remove("_encoded_"); + setColumn(col, new LongArray(pa)); + } else if (String2.JSON.equals(enc)) { + //convert UTF-8 back to unicode + atts.remove("_encoded_"); + ((StringArray)pa).fromJson(); + } else { + //significant info loss. + //there shouldn't be any StringArray without that encoding + enhVersion = 0; //trouble. must be v0 + } + } + } + return enhVersion; + } + + /** + * This makes a tough test table. + */ + public static Table makeToughTestTable() { + + Table table = new Table(); + + Attributes gatts = table.globalAttributes(); + gatts.add("tests", "a\u1f63b\nc\td\ufffez"); + gatts.add("testc", '\u1f63'); + gatts.add("testl", new LongArray(new long[]{Long.MIN_VALUE, Long.MAX_VALUE})); + gatts.add("testi", new IntArray(new int[]{Integer.MIN_VALUE, Integer.MAX_VALUE})); + + table.addColumn(0, "aString", + new StringArray(new String[]{"a\u1f63b\nc\td\ufffee", "ab", ""}), + (new Attributes()).add("test", "a\u1f63b\nc\td\ufffee")); + table.addColumn(1, "aChar", + new CharArray(new char[]{'\u1f63', (char)0, '\ufffe'}), + (new Attributes()).add("test", '\u1f63')); + table.addColumn(2, "aByte", + new ByteArray(new byte[]{Byte.MIN_VALUE, 0, Byte.MAX_VALUE}), + (new Attributes()).add("test", Byte.MIN_VALUE)); + table.addColumn(3, "aShort", + new ShortArray(new short[]{Short.MIN_VALUE, 0, Short.MAX_VALUE}), + (new Attributes()).add("test", Short.MIN_VALUE)); + table.addColumn(4, "anInt", + new IntArray(new int[]{Integer.MIN_VALUE, 0, Integer.MAX_VALUE}), + (new Attributes()).add("test", Integer.MIN_VALUE)); + table.addColumn(5, "aLong", + new LongArray(new long[]{Long.MIN_VALUE, 0, Long.MAX_VALUE}), + (new Attributes()).add("test", Long.MIN_VALUE)); + table.addColumn(6, "aFloat", + new FloatArray(new float[]{-Float.MAX_VALUE, Float.MIN_VALUE, Float.MAX_VALUE}), + (new Attributes()).add("test", -Float.MAX_VALUE)); + table.addColumn(7, "aDouble", + new DoubleArray(new double[]{-Double.MAX_VALUE, Double.MIN_VALUE, Double.MAX_VALUE}), + (new Attributes()).add("test", -Double.MAX_VALUE)); + return table; + + } + + + /** + * This tests saveAsEnhancedFlatNcFile and readEnhancedFlatNcFile. + */ + public static void testEnhancedFlatNcFile() throws Exception { + + String2.log("\n*** Table.testEnhancedFlatNcFile()"); + String results, expected; + String fileName = File2.getSystemTempDirectory() + "enhancedFlatNcFile.nc"; + + Table table = makeToughTestTable(); + expected = String2.annotatedString(table.toString()); + String2.log("expected=\n" + expected); + + table.saveAsEnhancedFlatNc(fileName); + results = String2.annotatedString(table.toString()); + Test.ensureEqual(results, expected, "a"); + table.clear(); + + table.readEnhancedFlatNc(fileName, null); + table.globalAttributes().remove("id"); + results = String2.annotatedString(table.toString()); + Test.ensureEqual(results, expected, "b"); + + } + + /** * This reads all rows of all of the specified columns in a flat .nc file @@ -4797,7 +5623,7 @@ public void lowReadFlatNc(String fullName, String loadColumns[], int unpack, clear(); appendNcRows(loadVariables, 0, lastRow); NcHelper.getGlobalAttributes(netcdfFile, globalAttributes()); - for (int col = 0; col < loadVariables.length; col++) + for (int col = 0; col < loadVariables.length; col++) NcHelper.getVariableAttributes(loadVariables[col], columnAttributes(col)); //I care about this exception @@ -4822,10 +5648,65 @@ public void lowReadFlatNc(String fullName, String loadColumns[], int unpack, //convert to standard MissingValues convertToStandardMissingValues(); + + decodeCharsAndStrings(); + if (verbose) String2.log(" Table.readFlatNc done. nColumns=" + nColumns() + " nRows=" + nRows() + " TIME=" + (System.currentTimeMillis() - time)); } + /** + * This is commonly used by nc readers to decode any UTF-8 encoded + * strings before returning the table. + * + *

There is similar code in GridDataAccessor and Table.decodeCharsAndStrings(). + */ + public void decodeCharsAndStrings() { + int nc = nColumns(); + for (int col = 0; col < nc; col++) { + PrimitiveArray pa = getColumn(col); + Attributes atts = columnAttributes(col); + String enc = atts.getString(String2.ENCODING); + atts.remove(String2.ENCODING); +// disabled until there is a standard +// String charset = atts.getString(String2.CHARSET); +// atts.remove(String2.CHARSET); + + //charset +// if (String2.isSomething(charset)) { +// //check that it is CharArray and 8859-1 +// if (pa.elementClass() != char.class) +// setColumn(col, new CharArray(pa)); //too bold? +// if (!charset.toLowerCase().equals(String2.ISO_8859_1_LC)) +// String2.log("col=" + getColumnName(col) + " has unexpected " + +// String2.CHARSET + "=" + charset); +// continue; +// } + + //encoding + if (pa.elementClass() != String.class || + !String2.isSomething(enc)) + continue; + enc = enc.toLowerCase(); + + //decode + if (enc.toLowerCase().equals(String2.UTF_8_LC)) { + //UTF-8 + ((StringArray)pa).fromUTF8(); + + } else if (enc.toLowerCase().equals(String2.ISO_8859_1_LC)) { + //unchanged ISO-8859-1 becomes the first page of unicode encoded strings + + } else { + String2.log("col=" + getColumnName(col) + " has unexpected " + + String2.ENCODING + "=" + enc); + } + + //currently, OTHER ENCODINGS ARE NOT HANDLED + //JUST LEAVE THE ATTRIBUTE AND VALUE + } + } + /** * This reads the 1D variables from a .nc file *and* the scalar (0D) values * (duplicated to have the same number of rows). @@ -4871,10 +5752,9 @@ public void readFlat0Nc(String fullName, String loadColumns[], int unpack, boolean isChar = var.getDataType() == DataType.CHAR; if (var.getRank() + (isChar? -1 : 0) == 0) { PrimitiveArray pa = NcHelper.getPrimitiveArray(var.read()); - //technically, shouldn't trim. - //But more likely problem is source meant to trim but didn't. + //nc allows strings to be 0-terminated or padded with spaces, so always trimEnd if (pa instanceof StringArray) - pa.setString(0, pa.getString(0).trim()); + pa.setString(0, String2.trimEnd(pa.getString(0))); if (tnRows > 1) { if (pa instanceof StringArray) pa.addNStrings(tnRows-1, pa.getString(0)); @@ -4909,6 +5789,8 @@ public void readFlat0Nc(String fullName, String loadColumns[], int unpack, //convert to standard MissingValues convertToStandardMissingValues(); + decodeCharsAndStrings(); + if (verbose) String2.log(" Table.readFlat0Nc done. nColumns=" + nColumns() + " nRows=" + nRows() + " TIME=" + (System.currentTimeMillis() - time)); } @@ -5087,6 +5969,9 @@ public void read4DNc(String fullName, String loadColumns[], int unpack, //convert to standard MissingValues convertToStandardMissingValues(); + + decodeCharsAndStrings(); + if (verbose) String2.log(" Table.read4DNc done. nColumns=" + nColumns() + " nRows=" + nRows() + " TIME=" + (System.currentTimeMillis() - time)); } @@ -5186,6 +6071,7 @@ public void readNDNc(String fullName, String loadVariableNames[], for (int i = 0; i < nDims; i++) iaa[i].add(current[i]); } + decodeCharsAndStrings(); return; } } @@ -5443,10 +6329,9 @@ public void readNDNc(String fullName, String loadVariableNames[], //read it PrimitiveArray pa = NcHelper.getPrimitiveArray(var.read()); - //technically, shouldn't trim. - //But more likely problem is source meant to trim but didn't. + //nc allows strings to be 0-terminated or padded with spaces, so always trimEnd if (pa instanceof StringArray) - pa.setString(0, pa.getString(0).trim()); + pa.setString(0, String2.trimEnd(pa.getString(0))); if (tnRows > 1) { if (pa instanceof StringArray) pa.addNStrings(tnRows-1, pa.getString(0)); @@ -5462,6 +6347,7 @@ public void readNDNc(String fullName, String loadVariableNames[], //I do care if this throws exception ncFile.close(); + decodeCharsAndStrings(); } catch (Exception e) { //make sure ncFile is explicitly closed @@ -5484,6 +6370,7 @@ public void readNDNc(String fullName, String loadVariableNames[], * One difference between using this and readNcCF: this doesn't require/expect * that the file follows the nc CF DSG MA standard. *
This does not unpack the values or convert to standardMissingValues. + *
For strings, this always calls String2.trimEnd(s) * * @param fullName This may be a local file name, an "http:" address of a * .nc file, an .ncml file (which must end with ".ncml"), or an opendap url. @@ -5508,7 +6395,6 @@ public void readNDNc(String fullName, String loadVariableNames[], * So if you want to get just the scalar vars, request a nonexistent * dimension (e.g., ZZTOP). * @param getMetadata if true, global and variable metadata is read - * @param trimStrings if true, string values are trimmed. * @param removeMVRows This removes any block of rows at the * end of a group where all the values are missing_value, _FillValue, * or the CoHort ...Array native missing value (or char=#32 for CharArrays). @@ -5531,7 +6417,8 @@ public void readNDNc(String fullName, String loadVariableNames[], */ public void readMultidimNc(String fullName, StringArray loadVarNames, StringArray loadDimNames, - boolean getMetadata, boolean trimStrings, boolean removeMVRows, + boolean getMetadata, //before 2016-11-29, this had a boolean trimStrings parameter, now it always trimEnd's all strings + boolean removeMVRows, StringArray conVars, StringArray conOps, StringArray conVals) throws Exception { //clear the table @@ -5736,8 +6623,8 @@ public void readMultidimNc(String fullName, //test it PrimitiveArray pa = NcHelper.getPrimitiveArray(tVar, isString); - if (trimStrings && pa instanceof StringArray) - ((StringArray)pa).trimAll(); + if (pa instanceof StringArray) + ((StringArray)pa).trimEndAll(); knownPAs[v] = pa; BitSet keep = new BitSet(); keep.set(0, pa.size()); @@ -5804,19 +6691,18 @@ public void readMultidimNc(String fullName, PrimitiveArray pa = knownPAs[v]; //v is loadVars v if (knownPAs[v] == null) { pa = NcHelper.getPrimitiveArray(tVar, isString); - if (trimStrings && pa instanceof StringArray) - ((StringArray)pa).trimAll(); + if (pa instanceof StringArray) + ((StringArray)pa).trimEndAll(); } knownPAs[v] = null; loaded.set(v); -//FUTURE: be smarter? just trim values that are STRING_LENGTH long? - if (pa instanceof StringArray && trimStrings) - ((StringArray)pa).trimAll(); + if (pa instanceof StringArray) + ((StringArray)pa).trimEndAll(); addColumn(nColumns(), tVar.getFullName(), pa, atts); } if (debugMode) String2.log(Math2.memoryString() + "\n" + ">> this table after load varsWithAllDims:\n" + - dataToCSVString(5)); + dataToString(5)); //if loadDims size is 0, we're done because all scalars have been read if (loadDims.size() == 0) { @@ -5856,7 +6742,7 @@ public void readMultidimNc(String fullName, allIndicesTable.addIndexColumns(shape); if (debugMode) String2.log(Math2.memoryString() + "\n" + ">> allIndicesTable=" + - allIndicesTable.dataToCSVString(5)); + allIndicesTable.dataToString(5)); //*** removeMVRows @@ -5886,8 +6772,8 @@ public void readMultidimNc(String fullName, //don't use knownPAs here: different vars and different v's. PrimitiveArray pa = NcHelper.getPrimitiveArray(tVar, isString); //FUTURE: be smarter? just trim values that are STRING_LENGTH long? - if (trimStrings && pa instanceof StringArray) - ((StringArray)pa).trimAll(); + if (pa instanceof StringArray) + ((StringArray)pa).trimEndAll(); addColumn(nColumns(), tVar.getFullName(), pa, atts); } @@ -5965,7 +6851,8 @@ public void readMultidimNc(String fullName, break; } } else { - String s = columns.get(c).getString(row).trim(); + //nc allows strings to be 0-terminated or padded with spaces, so always trimEnd + String s = String2.trimEnd(columns.get(c).getString(row)); if (s.length() > 0) { hasData = true; break; @@ -5980,16 +6867,15 @@ public void readMultidimNc(String fullName, keep.clear(row); } } - if (debugMode) { - String2.log(">> removeMVRows nRows before=" + onRows + + if (debugMode) { String2.log(">> removeMVRows nRows before=" + onRows + " after=" + keep.cardinality()); //one time debugging: if (false) { PrimitiveArray pa = getColumn(nLoadDims); for (int row = 0; row < onRows; row++) { if (keep.get(row) && pa.getDouble(row) == -99999) - String2.log(">> remaining row with mv:\n" + - dataToCSVString(row-1, row+2, true)); + String2.log(">> remaining row with mv:\n" + //in debugMode + dataToString(row-1, row+2)); } } } @@ -6053,7 +6939,7 @@ public void readMultidimNc(String fullName, //if (debugMode) { // String2.log(">> v=" + v + " cDims==null?" + (cDims==null) + // " lut: nCols=" + lut.nColumns() + " nRows=" + lut.nRows()); - // String2.log(">> lut=" + lut.dataToCSVString(5)); + // String2.log(">> lut=" + lut.dataToString(5)); //} //look for an unloaded var (and other vars with same dimensions) @@ -6107,14 +6993,13 @@ public void readMultidimNc(String fullName, PrimitiveArray pa = knownPAs[v]; //v is loadVars v if (pa == null) { pa = NcHelper.getPrimitiveArray(tVar, isString); - if (trimStrings && pa instanceof StringArray) - ((StringArray)pa).trimAll(); + if (pa instanceof StringArray) + ((StringArray)pa).trimEndAll(); } knownPAs[v] = null; loaded.set(v); -//FUTURE: be smarter? just trim values that are STRING_LENGTH long? - if (pa instanceof StringArray && trimStrings) - ((StringArray)pa).trimAll(); + if (pa instanceof StringArray) + ((StringArray)pa).trimEndAll(); Attributes atts = new Attributes(); if (getMetadata) NcHelper.getVariableAttributes(tVar, atts); @@ -6143,12 +7028,12 @@ public void readMultidimNc(String fullName, } //JOIN lut into main table - //if (debugMode) String2.log(">> lut=\n" + lut.dataToCSVString(5)); + //if (debugMode) String2.log(">> lut=\n" + lut.dataToString(5)); int nMatchingCols = Math.max(1, ncDims); //even scalars have 1 matching column BitSet keep = join(nMatchingCols, 0, "", lut); //"" = mvKey not needed //remove the index columns from the main table removeColumns(0, nMatchingCols); - //if (debugMode) String2.log(">> this table after join:\n" + dataToCSVString(5)); + //if (debugMode) String2.log(">> this table after join:\n" + dataToString(5)); //remove unmatched rows int tnRows = keep.cardinality(); @@ -6166,7 +7051,8 @@ public void readMultidimNc(String fullName, //this will be either the order that was requested, or their order in the file reorderColumns(loadVarNames, false); //discardOthers=false, should be irrelevant - + + decodeCharsAndStrings(); } finally { //make sure ncFile is explicitly closed @@ -6213,7 +7099,7 @@ public void addIndexColumns(int shape[]) { public static void testAddIndexColumns() throws Exception { Table table = new Table(); table.addIndexColumns(new int[]{3,2,4}); - String results = table.dataToCSVString(); + String results = table.dataToString(); String expected = "_index_0,_index_1,_index_2\n" + "0,0,0\n" + @@ -6260,22 +7146,22 @@ public static void testReadMultidimNc() throws Exception { //** don't specify varNames or dimNames -- it find vars with most dims table.readMultidimNc(fiName, new StringArray(), new StringArray(), - true, true, false, //readMetadata, trimStrings, removeMVRows + true, false, //readMetadata, removeMVRows null, null, null); //conVars, conOps, conVals - results = table.dataToCSVString(3); + results = table.dataToString(3); expectedStart = //static vars and vars like char SCIENTIFIC_CALIB_COEFFICIENT(N_PROF=254, N_CALIB=1, N_PARAM=3, STRING256=256); -"row,DATA_TYPE,FORMAT_VERSION,HANDBOOK_VERSION,REFERENCE_DATE_TIME,DATE_CREATION,DATE_UPDATE,PLATFORM_NUMBER,PROJECT_NAME,PI_NAME,STATION_PARAMETERS,CYCLE_NUMBER,DIRECTION,DATA_CENTRE,DC_REFERENCE,DATA_STATE_INDICATOR,DATA_MODE,PLATFORM_TYPE,FLOAT_SERIAL_NO,FIRMWARE_VERSION,WMO_INST_TYPE,JULD,JULD_QC,JULD_LOCATION,LATITUDE,LONGITUDE,POSITION_QC,POSITIONING_SYSTEM,PROFILE_PRES_QC,PROFILE_TEMP_QC,PROFILE_PSAL_QC,VERTICAL_SAMPLING_SCHEME,CONFIG_MISSION_NUMBER,PARAMETER,SCIENTIFIC_CALIB_EQUATION,SCIENTIFIC_CALIB_COEFFICIENT,SCIENTIFIC_CALIB_COMMENT,SCIENTIFIC_CALIB_DATE\n" + -"0,Argo profile,3.1,1.2,19500101000000,20090422121913,20160415204722,2901175,CHINA ARGO PROJECT,JIANPING XU,PRES,1,65,HZ,0066_80617_001,2C,68,,APEX_SBE_4136,,846,21660.34238425926,49,21660.345046296297,21.513999938964844,123.36499786376953,49,ARGOS,65,65,65,,1,PRES,PRES_ADJUSTED = PRES - dP,dP = 0.1 dbar.,Pressures adjusted by using pressure offset at the sea surface. The quoted error is manufacturer specified accuracy in dbar.,20110628060155\n" + -"1,Argo profile,3.1,1.2,19500101000000,20090422121913,20160415204722,2901175,CHINA ARGO PROJECT,JIANPING XU,TEMP,1,65,HZ,0066_80617_001,2C,68,,APEX_SBE_4136,,846,21660.34238425926,49,21660.345046296297,21.513999938964844,123.36499786376953,49,ARGOS,65,65,65,,1,TEMP,none,none,The quoted error is manufacturer specified accuracy with respect to ITS-90 at time of laboratory calibration.,20110628060155\n" + -"2,Argo profile,3.1,1.2,19500101000000,20090422121913,20160415204722,2901175,CHINA ARGO PROJECT,JIANPING XU,PSAL,1,65,HZ,0066_80617_001,2C,68,,APEX_SBE_4136,,846,21660.34238425926,49,21660.345046296297,21.513999938964844,123.36499786376953,49,ARGOS,65,65,65,,1,PSAL,\"PSAL_ADJUSTED = sw_salt( sw_cndr(PSAL,TEMP,PRES), TEMP, PRES_ADJUSTED ); PSAL_ADJ corrects conductivity cell therm mass (CTM), Johnson et al, 2007, JAOT;\",\"same as for PRES_ADJUSTED; CTL: alpha=0.0267, tau=18.6;\",No significant salinity drift detected; SBE sensor accuracy,20110628060155\n" + +"DATA_TYPE,FORMAT_VERSION,HANDBOOK_VERSION,REFERENCE_DATE_TIME,DATE_CREATION,DATE_UPDATE,PLATFORM_NUMBER,PROJECT_NAME,PI_NAME,STATION_PARAMETERS,CYCLE_NUMBER,DIRECTION,DATA_CENTRE,DC_REFERENCE,DATA_STATE_INDICATOR,DATA_MODE,PLATFORM_TYPE,FLOAT_SERIAL_NO,FIRMWARE_VERSION,WMO_INST_TYPE,JULD,JULD_QC,JULD_LOCATION,LATITUDE,LONGITUDE,POSITION_QC,POSITIONING_SYSTEM,PROFILE_PRES_QC,PROFILE_TEMP_QC,PROFILE_PSAL_QC,VERTICAL_SAMPLING_SCHEME,CONFIG_MISSION_NUMBER,PARAMETER,SCIENTIFIC_CALIB_EQUATION,SCIENTIFIC_CALIB_COEFFICIENT,SCIENTIFIC_CALIB_COMMENT,SCIENTIFIC_CALIB_DATE\n" + +"Argo profile,3.1,1.2,19500101000000,20090422121913,20160415204722,2901175,CHINA ARGO PROJECT,JIANPING XU,PRES,1,A,HZ,0066_80617_001,2C,D,,APEX_SBE_4136,,846,21660.34238425926,1,21660.345046296297,21.513999938964844,123.36499786376953,1,ARGOS,A,A,A,,1,PRES,PRES_ADJUSTED = PRES - dP,dP = 0.1 dbar.,Pressures adjusted by using pressure offset at the sea surface. The quoted error is manufacturer specified accuracy in dbar.,20110628060155\n" + +"Argo profile,3.1,1.2,19500101000000,20090422121913,20160415204722,2901175,CHINA ARGO PROJECT,JIANPING XU,TEMP,1,A,HZ,0066_80617_001,2C,D,,APEX_SBE_4136,,846,21660.34238425926,1,21660.345046296297,21.513999938964844,123.36499786376953,1,ARGOS,A,A,A,,1,TEMP,none,none,The quoted error is manufacturer specified accuracy with respect to ITS-90 at time of laboratory calibration.,20110628060155\n" + +"Argo profile,3.1,1.2,19500101000000,20090422121913,20160415204722,2901175,CHINA ARGO PROJECT,JIANPING XU,PSAL,1,A,HZ,0066_80617_001,2C,D,,APEX_SBE_4136,,846,21660.34238425926,1,21660.345046296297,21.513999938964844,123.36499786376953,1,ARGOS,A,A,A,,1,PSAL,\"PSAL_ADJUSTED = sw_salt( sw_cndr(PSAL,TEMP,PRES), TEMP, PRES_ADJUSTED ); PSAL_ADJ corrects conductivity cell therm mass (CTM), Johnson et al, 2007, JAOT;\",\"same as for PRES_ADJUSTED; CTL: alpha=0.0267, tau=18.6;\",No significant salinity drift detected; SBE sensor accuracy,20110628060155\n" + "...\n"; Test.ensureEqual(results, expectedStart, "results=\n" + results); Test.ensureEqual(table.nRows(), 762, "nRows"); //254*3 //* same but quick reject based on constraint table.readMultidimNc(fiName, new StringArray(), new StringArray(), - true, true, false, //readMetadata, trimStrings, removeMVRows + true, false, //readMetadata, removeMVRows StringArray.fromCSV("FORMAT_VERSION,FORMAT_VERSION"), //conVars StringArray.fromCSV("=,="), //conOps StringArray.fromCSV("3.1,3.2")); //conVals @@ -6284,9 +7170,9 @@ public static void testReadMultidimNc() throws Exception { //* test don't removeMVRows table.readMultidimNc(fiName, null, StringArray.fromCSV("ZZTOP, N_PROF, N_LEVELS"), - true, true, false, //readMetadata, trimStrings, removeMVRows + true, false, //readMetadata, removeMVRows null, null, null); //conVars, conOps, conVals - results = table.toCSVString(3); + results = table.toString(3); expectedStart = "{\n" + "dimensions:\n" + @@ -6530,49 +7416,49 @@ public static void testReadMultidimNc() throws Exception { "}\n"; Test.ensureEqual(results.substring(0, expectedStart.length()), expectedStart, "results=\n" + results); - results = table.dataToCSVString(3); + results = table.dataToString(3); expectedStart = -"row,DATA_TYPE,FORMAT_VERSION,HANDBOOK_VERSION,REFERENCE_DATE_TIME,DATE_CREATION,DATE_UPDATE,PLATFORM_NUMBER,PROJECT_NAME,PI_NAME,CYCLE_NUMBER,DIRECTION,DATA_CENTRE,DC_REFERENCE,DATA_STATE_INDICATOR,DATA_MODE,PLATFORM_TYPE,FLOAT_SERIAL_NO,FIRMWARE_VERSION,WMO_INST_TYPE,JULD,JULD_QC,JULD_LOCATION,LATITUDE,LONGITUDE,POSITION_QC,POSITIONING_SYSTEM,PROFILE_PRES_QC,PROFILE_TEMP_QC,PROFILE_PSAL_QC,VERTICAL_SAMPLING_SCHEME,CONFIG_MISSION_NUMBER,PRES,PRES_QC,PRES_ADJUSTED,PRES_ADJUSTED_QC,PRES_ADJUSTED_ERROR,TEMP,TEMP_QC,TEMP_ADJUSTED,TEMP_ADJUSTED_QC,TEMP_ADJUSTED_ERROR,PSAL,PSAL_QC,PSAL_ADJUSTED,PSAL_ADJUSTED_QC,PSAL_ADJUSTED_ERROR\n" + -"0,Argo profile,3.1,1.2,19500101000000,20090422121913,20160415204722,2901175,CHINA ARGO PROJECT,JIANPING XU,1,65,HZ,0066_80617_001,2C,68,,APEX_SBE_4136,,846,21660.34238425926,49,21660.345046296297,21.513999938964844,123.36499786376953,49,ARGOS,65,65,65,,1,5.9,49,5.8,49,2.4,24.989,49,24.989,49,0.002,34.555,49,34.55511,49,0.01\n" + -"1,Argo profile,3.1,1.2,19500101000000,20090422121913,20160415204722,2901175,CHINA ARGO PROJECT,JIANPING XU,1,65,HZ,0066_80617_001,2C,68,,APEX_SBE_4136,,846,21660.34238425926,49,21660.345046296297,21.513999938964844,123.36499786376953,49,ARGOS,65,65,65,,1,10.0,49,9.9,49,2.4,24.99,49,24.99,49,0.002,34.554,49,34.55505,49,0.01\n" + -"2,Argo profile,3.1,1.2,19500101000000,20090422121913,20160415204722,2901175,CHINA ARGO PROJECT,JIANPING XU,1,65,HZ,0066_80617_001,2C,68,,APEX_SBE_4136,,846,21660.34238425926,49,21660.345046296297,21.513999938964844,123.36499786376953,49,ARGOS,65,65,65,,1,20.1,49,20.0,49,2.4,24.69,49,24.69,49,0.002,34.56,49,34.56191,49,0.01\n" + +"DATA_TYPE,FORMAT_VERSION,HANDBOOK_VERSION,REFERENCE_DATE_TIME,DATE_CREATION,DATE_UPDATE,PLATFORM_NUMBER,PROJECT_NAME,PI_NAME,CYCLE_NUMBER,DIRECTION,DATA_CENTRE,DC_REFERENCE,DATA_STATE_INDICATOR,DATA_MODE,PLATFORM_TYPE,FLOAT_SERIAL_NO,FIRMWARE_VERSION,WMO_INST_TYPE,JULD,JULD_QC,JULD_LOCATION,LATITUDE,LONGITUDE,POSITION_QC,POSITIONING_SYSTEM,PROFILE_PRES_QC,PROFILE_TEMP_QC,PROFILE_PSAL_QC,VERTICAL_SAMPLING_SCHEME,CONFIG_MISSION_NUMBER,PRES,PRES_QC,PRES_ADJUSTED,PRES_ADJUSTED_QC,PRES_ADJUSTED_ERROR,TEMP,TEMP_QC,TEMP_ADJUSTED,TEMP_ADJUSTED_QC,TEMP_ADJUSTED_ERROR,PSAL,PSAL_QC,PSAL_ADJUSTED,PSAL_ADJUSTED_QC,PSAL_ADJUSTED_ERROR\n" + +"Argo profile,3.1,1.2,19500101000000,20090422121913,20160415204722,2901175,CHINA ARGO PROJECT,JIANPING XU,1,A,HZ,0066_80617_001,2C,D,,APEX_SBE_4136,,846,21660.34238425926,1,21660.345046296297,21.513999938964844,123.36499786376953,1,ARGOS,A,A,A,,1,5.9,1,5.8,1,2.4,24.989,1,24.989,1,0.002,34.555,1,34.55511,1,0.01\n" + +"Argo profile,3.1,1.2,19500101000000,20090422121913,20160415204722,2901175,CHINA ARGO PROJECT,JIANPING XU,1,A,HZ,0066_80617_001,2C,D,,APEX_SBE_4136,,846,21660.34238425926,1,21660.345046296297,21.513999938964844,123.36499786376953,1,ARGOS,A,A,A,,1,10.0,1,9.9,1,2.4,24.99,1,24.99,1,0.002,34.554,1,34.55505,1,0.01\n" + +"Argo profile,3.1,1.2,19500101000000,20090422121913,20160415204722,2901175,CHINA ARGO PROJECT,JIANPING XU,1,A,HZ,0066_80617_001,2C,D,,APEX_SBE_4136,,846,21660.34238425926,1,21660.345046296297,21.513999938964844,123.36499786376953,1,ARGOS,A,A,A,,1,20.1,1,20.0,1,2.4,24.69,1,24.69,1,0.002,34.56,1,34.56191,1,0.01\n" + "...\n"; Test.ensureEqual(results, expectedStart, "results=\n" + results); Test.ensureEqual(table.nRows(), 18034, "nRows"); //254*71 //and the end of that table table.removeRows(0, table.nRows() - 3); - results = table.dataToCSVString(5, true); + results = table.dataToString(5); expectedEnd = -"row,DATA_TYPE,FORMAT_VERSION,HANDBOOK_VERSION,REFERENCE_DATE_TIME,DATE_CREATION,DATE_UPDATE,PLATFORM_NUMBER,PROJECT_NAME,PI_NAME,CYCLE_NUMBER,DIRECTION,DATA_CENTRE,DC_REFERENCE,DATA_STATE_INDICATOR,DATA_MODE,PLATFORM_TYPE,FLOAT_SERIAL_NO,FIRMWARE_VERSION,WMO_INST_TYPE,JULD,JULD_QC,JULD_LOCATION,LATITUDE,LONGITUDE,POSITION_QC,POSITIONING_SYSTEM,PROFILE_PRES_QC,PROFILE_TEMP_QC,PROFILE_PSAL_QC,VERTICAL_SAMPLING_SCHEME,CONFIG_MISSION_NUMBER,PRES,PRES_QC,PRES_ADJUSTED,PRES_ADJUSTED_QC,PRES_ADJUSTED_ERROR,TEMP,TEMP_QC,TEMP_ADJUSTED,TEMP_ADJUSTED_QC,TEMP_ADJUSTED_ERROR,PSAL,PSAL_QC,PSAL_ADJUSTED,PSAL_ADJUSTED_QC,PSAL_ADJUSTED_ERROR\n" + -"0,Argo profile,3.1,1.2,19500101000000,20090422121913,20160415204722,2901175,CHINA ARGO PROJECT,JIANPING XU,256,65,HZ,0066_80617_256,2B,65,APEX,4136,013108,846,24210.44662037037,49,24210.44662037037,26.587,154.853,49,ARGOS,65,65,65,Primary sampling: discrete,1,1899.9,49,1899.3,49,99999.0,2.055,49,2.055,49,99999.0,34.612,49,34.612,49,99999.0\n" + -"1,Argo profile,3.1,1.2,19500101000000,20090422121913,20160415204722,2901175,CHINA ARGO PROJECT,JIANPING XU,256,65,HZ,0066_80617_256,2B,65,APEX,4136,013108,846,24210.44662037037,49,24210.44662037037,26.587,154.853,49,ARGOS,65,65,65,Primary sampling: discrete,1,1950.0,49,1949.4,49,99999.0,2.014,49,2.014,49,99999.0,34.617,49,34.617,49,99999.0\n" + -"2,Argo profile,3.1,1.2,19500101000000,20090422121913,20160415204722,2901175,CHINA ARGO PROJECT,JIANPING XU,256,65,HZ,0066_80617_256,2B,65,APEX,4136,013108,846,24210.44662037037,49,24210.44662037037,26.587,154.853,49,ARGOS,65,65,65,Primary sampling: discrete,1,99999.0,32,99999.0,32,99999.0,99999.0,32,99999.0,32,99999.0,99999.0,32,99999.0,32,99999.0\n"; +"DATA_TYPE,FORMAT_VERSION,HANDBOOK_VERSION,REFERENCE_DATE_TIME,DATE_CREATION,DATE_UPDATE,PLATFORM_NUMBER,PROJECT_NAME,PI_NAME,CYCLE_NUMBER,DIRECTION,DATA_CENTRE,DC_REFERENCE,DATA_STATE_INDICATOR,DATA_MODE,PLATFORM_TYPE,FLOAT_SERIAL_NO,FIRMWARE_VERSION,WMO_INST_TYPE,JULD,JULD_QC,JULD_LOCATION,LATITUDE,LONGITUDE,POSITION_QC,POSITIONING_SYSTEM,PROFILE_PRES_QC,PROFILE_TEMP_QC,PROFILE_PSAL_QC,VERTICAL_SAMPLING_SCHEME,CONFIG_MISSION_NUMBER,PRES,PRES_QC,PRES_ADJUSTED,PRES_ADJUSTED_QC,PRES_ADJUSTED_ERROR,TEMP,TEMP_QC,TEMP_ADJUSTED,TEMP_ADJUSTED_QC,TEMP_ADJUSTED_ERROR,PSAL,PSAL_QC,PSAL_ADJUSTED,PSAL_ADJUSTED_QC,PSAL_ADJUSTED_ERROR\n" + +"Argo profile,3.1,1.2,19500101000000,20090422121913,20160415204722,2901175,CHINA ARGO PROJECT,JIANPING XU,256,A,HZ,0066_80617_256,2B,A,APEX,4136,013108,846,24210.44662037037,1,24210.44662037037,26.587,154.853,1,ARGOS,A,A,A,Primary sampling: discrete,1,1899.9,1,1899.3,1,99999.0,2.055,1,2.055,1,99999.0,34.612,1,34.612,1,99999.0\n" + +"Argo profile,3.1,1.2,19500101000000,20090422121913,20160415204722,2901175,CHINA ARGO PROJECT,JIANPING XU,256,A,HZ,0066_80617_256,2B,A,APEX,4136,013108,846,24210.44662037037,1,24210.44662037037,26.587,154.853,1,ARGOS,A,A,A,Primary sampling: discrete,1,1950.0,1,1949.4,1,99999.0,2.014,1,2.014,1,99999.0,34.617,1,34.617,1,99999.0\n" + +"Argo profile,3.1,1.2,19500101000000,20090422121913,20160415204722,2901175,CHINA ARGO PROJECT,JIANPING XU,256,A,HZ,0066_80617_256,2B,A,APEX,4136,013108,846,24210.44662037037,1,24210.44662037037,26.587,154.853,1,ARGOS,A,A,A,Primary sampling: discrete,1,99999.0,\" \",99999.0,\" \",99999.0,99999.0,\" \",99999.0,\" \",99999.0,99999.0,\" \",99999.0,\" \",99999.0\n"; Test.ensureEqual(results, expectedEnd, "results=\n" + results); //* test do removeMVRows table.readMultidimNc(fiName, null, StringArray.fromCSV("ZZTOP, N_PROF, N_LEVELS"), - true, true, true, //readMetadata, trimStrings, removeMVRows + true, true, //readMetadata, removeMVRows null, null, null); //conVars, conOps, conVals - results = table.dataToCSVString(3, true); + results = table.dataToString(3); Test.ensureEqual(results, expectedStart, "results=\n" + results); Test.ensureEqual(table.nRows(), 17266, "nRows"); //and test data at the end of that table table.removeRows(0, table.nRows() - 3); - results = table.dataToCSVString(5, true); + results = table.dataToString(5); expectedEnd = -"row,DATA_TYPE,FORMAT_VERSION,HANDBOOK_VERSION,REFERENCE_DATE_TIME,DATE_CREATION,DATE_UPDATE,PLATFORM_NUMBER,PROJECT_NAME,PI_NAME,CYCLE_NUMBER,DIRECTION,DATA_CENTRE,DC_REFERENCE,DATA_STATE_INDICATOR,DATA_MODE,PLATFORM_TYPE,FLOAT_SERIAL_NO,FIRMWARE_VERSION,WMO_INST_TYPE,JULD,JULD_QC,JULD_LOCATION,LATITUDE,LONGITUDE,POSITION_QC,POSITIONING_SYSTEM,PROFILE_PRES_QC,PROFILE_TEMP_QC,PROFILE_PSAL_QC,VERTICAL_SAMPLING_SCHEME,CONFIG_MISSION_NUMBER,PRES,PRES_QC,PRES_ADJUSTED,PRES_ADJUSTED_QC,PRES_ADJUSTED_ERROR,TEMP,TEMP_QC,TEMP_ADJUSTED,TEMP_ADJUSTED_QC,TEMP_ADJUSTED_ERROR,PSAL,PSAL_QC,PSAL_ADJUSTED,PSAL_ADJUSTED_QC,PSAL_ADJUSTED_ERROR\n" + -"0,Argo profile,3.1,1.2,19500101000000,20090422121913,20160415204722,2901175,CHINA ARGO PROJECT,JIANPING XU,256,65,HZ,0066_80617_256,2B,65,APEX,4136,013108,846,24210.44662037037,49,24210.44662037037,26.587,154.853,49,ARGOS,65,65,65,Primary sampling: discrete,1,1850.0,49,1849.4,49,99999.0,2.106,49,2.106,49,99999.0,34.604,49,34.604,49,99999.0\n" + -"1,Argo profile,3.1,1.2,19500101000000,20090422121913,20160415204722,2901175,CHINA ARGO PROJECT,JIANPING XU,256,65,HZ,0066_80617_256,2B,65,APEX,4136,013108,846,24210.44662037037,49,24210.44662037037,26.587,154.853,49,ARGOS,65,65,65,Primary sampling: discrete,1,1899.9,49,1899.3,49,99999.0,2.055,49,2.055,49,99999.0,34.612,49,34.612,49,99999.0\n" + -"2,Argo profile,3.1,1.2,19500101000000,20090422121913,20160415204722,2901175,CHINA ARGO PROJECT,JIANPING XU,256,65,HZ,0066_80617_256,2B,65,APEX,4136,013108,846,24210.44662037037,49,24210.44662037037,26.587,154.853,49,ARGOS,65,65,65,Primary sampling: discrete,1,1950.0,49,1949.4,49,99999.0,2.014,49,2.014,49,99999.0,34.617,49,34.617,49,99999.0\n"; +"DATA_TYPE,FORMAT_VERSION,HANDBOOK_VERSION,REFERENCE_DATE_TIME,DATE_CREATION,DATE_UPDATE,PLATFORM_NUMBER,PROJECT_NAME,PI_NAME,CYCLE_NUMBER,DIRECTION,DATA_CENTRE,DC_REFERENCE,DATA_STATE_INDICATOR,DATA_MODE,PLATFORM_TYPE,FLOAT_SERIAL_NO,FIRMWARE_VERSION,WMO_INST_TYPE,JULD,JULD_QC,JULD_LOCATION,LATITUDE,LONGITUDE,POSITION_QC,POSITIONING_SYSTEM,PROFILE_PRES_QC,PROFILE_TEMP_QC,PROFILE_PSAL_QC,VERTICAL_SAMPLING_SCHEME,CONFIG_MISSION_NUMBER,PRES,PRES_QC,PRES_ADJUSTED,PRES_ADJUSTED_QC,PRES_ADJUSTED_ERROR,TEMP,TEMP_QC,TEMP_ADJUSTED,TEMP_ADJUSTED_QC,TEMP_ADJUSTED_ERROR,PSAL,PSAL_QC,PSAL_ADJUSTED,PSAL_ADJUSTED_QC,PSAL_ADJUSTED_ERROR\n" + +"Argo profile,3.1,1.2,19500101000000,20090422121913,20160415204722,2901175,CHINA ARGO PROJECT,JIANPING XU,256,A,HZ,0066_80617_256,2B,A,APEX,4136,013108,846,24210.44662037037,1,24210.44662037037,26.587,154.853,1,ARGOS,A,A,A,Primary sampling: discrete,1,1850.0,1,1849.4,1,99999.0,2.106,1,2.106,1,99999.0,34.604,1,34.604,1,99999.0\n" + +"Argo profile,3.1,1.2,19500101000000,20090422121913,20160415204722,2901175,CHINA ARGO PROJECT,JIANPING XU,256,A,HZ,0066_80617_256,2B,A,APEX,4136,013108,846,24210.44662037037,1,24210.44662037037,26.587,154.853,1,ARGOS,A,A,A,Primary sampling: discrete,1,1899.9,1,1899.3,1,99999.0,2.055,1,2.055,1,99999.0,34.612,1,34.612,1,99999.0\n" + +"Argo profile,3.1,1.2,19500101000000,20090422121913,20160415204722,2901175,CHINA ARGO PROJECT,JIANPING XU,256,A,HZ,0066_80617_256,2B,A,APEX,4136,013108,846,24210.44662037037,1,24210.44662037037,26.587,154.853,1,ARGOS,A,A,A,Primary sampling: discrete,1,1950.0,1,1949.4,1,99999.0,2.014,1,2.014,1,99999.0,34.617,1,34.617,1,99999.0\n"; Test.ensureEqual(results, expectedEnd, "results=\n" + results); //* same but quick reject based on constraint LAT,LON 26.587,154.853 //*** this takes 9ms while test above takes 99ms! table.readMultidimNc(fiName, null, StringArray.fromCSV("ZZTOP, N_PROF, N_LEVELS"), - true, true, true, //readMetadata, trimStrings, removeMVRows + true, true, //readMetadata, removeMVRows StringArray.fromCSV("LATITUDE"), //conVars StringArray.fromCSV("="), //conOps StringArray.fromCSV("45")); //conVals @@ -6580,15 +7466,15 @@ public static void testReadMultidimNc() throws Exception { //* test different dim order (should be rearranged so the same) table.readMultidimNc(fiName, null, StringArray.fromCSV("N_LEVELS, ZZTOP, N_PROF"), - true, true, true, //readMetadata, trimStrings, removeMVRows + true, true, //readMetadata, removeMVRows null, null, null); //conVars, conOps, conVals - results = table.dataToCSVString(3, true); + results = table.dataToString(3); Test.ensureEqual(results, expectedStart, "results=\n" + results); Test.ensureEqual(table.nRows(), 17266, "nRows"); //and test data at the end of that table table.removeRows(0, table.nRows() - 3); - results = table.dataToCSVString(5, true); + results = table.dataToString(5); Test.ensureEqual(results, expectedEnd, "results=\n" + results); //* test read all and constrain PLATFORM_NUMBER @@ -6606,13 +7492,13 @@ public static void testReadMultidimNc() throws Exception { "PRES_ADJUSTED_ERROR, TEMP, TEMP_QC, TEMP_ADJUSTED, TEMP_ADJUSTED_QC, " + "TEMP_ADJUSTED_ERROR, PSAL, PSAL_QC, PSAL_ADJUSTED, PSAL_ADJUSTED_QC, " + "PSAL_ADJUSTED_ERROR"), null, - true, true, true, //readMetadata, trimStrings, removeMVRows + true, true, //readMetadata, removeMVRows StringArray.fromCSV("PLATFORM_NUMBER"), //conVars, conOps, conVals StringArray.fromCSV("="), StringArray.fromCSV("2901175")); - results = table.dataToCSVString(3, true); + results = table.dataToString(3); expectedStart = -"row,DATA_TYPE,FORMAT_VERSION,HANDBOOK_VERSION,REFERENCE_DATE_TIME,DATE_CREATION," + +"DATA_TYPE,FORMAT_VERSION,HANDBOOK_VERSION,REFERENCE_DATE_TIME,DATE_CREATION," + "DATE_UPDATE,PLATFORM_NUMBER,PROJECT_NAME,PI_NAME,CYCLE_NUMBER,DIRECTION,DATA_CENTRE," + "DC_REFERENCE,DATA_STATE_INDICATOR,DATA_MODE,PLATFORM_TYPE,FLOAT_SERIAL_NO," + "FIRMWARE_VERSION,WMO_INST_TYPE,JULD,JULD_QC,JULD_LOCATION,LATITUDE,LONGITUDE," + @@ -6621,23 +7507,23 @@ public static void testReadMultidimNc() throws Exception { "PRES_ADJUSTED_QC,PRES_ADJUSTED_ERROR,TEMP,TEMP_QC,TEMP_ADJUSTED,TEMP_ADJUSTED_QC," + "TEMP_ADJUSTED_ERROR,PSAL,PSAL_QC,PSAL_ADJUSTED,PSAL_ADJUSTED_QC,PSAL_ADJUSTED_ERROR\n" + -"0,Argo profile,3.1,1.2,19500101000000,20090422121913,20160415204722,2901175," + -"CHINA ARGO PROJECT,JIANPING XU,1,65,HZ,0066_80617_001,2C,68,,APEX_SBE_4136,,846," + -"21660.34238425926,49,21660.345046296297,21.513999938964844,123.36499786376953,49," + -"ARGOS,65,65,65,,1,5.9,49,5.8,49,2.4,24.989,49,24.989,49,0.002,34.555,49,34.55511," + -"49,0.01\n" + - -"1,Argo profile,3.1,1.2,19500101000000,20090422121913,20160415204722,2901175," + -"CHINA ARGO PROJECT,JIANPING XU,1,65,HZ,0066_80617_001,2C,68,,APEX_SBE_4136,,846," + -"21660.34238425926,49,21660.345046296297,21.513999938964844,123.36499786376953,49," + -"ARGOS,65,65,65,,1,10.0,49,9.9,49,2.4,24.99,49,24.99,49,0.002,34.554,49,34.55505," + -"49,0.01\n" + - -"2,Argo profile,3.1,1.2,19500101000000,20090422121913,20160415204722,2901175," + -"CHINA ARGO PROJECT,JIANPING XU,1,65,HZ,0066_80617_001,2C,68,,APEX_SBE_4136,,846," + -"21660.34238425926,49,21660.345046296297,21.513999938964844,123.36499786376953,49," + -"ARGOS,65,65,65,,1,20.1,49,20.0,49,2.4,24.69,49,24.69,49,0.002,34.56,49,34.56191," + -"49,0.01\n" + +"Argo profile,3.1,1.2,19500101000000,20090422121913,20160415204722,2901175," + +"CHINA ARGO PROJECT,JIANPING XU,1,A,HZ,0066_80617_001,2C,D,,APEX_SBE_4136,,846," + +"21660.34238425926,1,21660.345046296297,21.513999938964844,123.36499786376953,1," + +"ARGOS,A,A,A,,1,5.9,1,5.8,1,2.4,24.989,1,24.989,1,0.002,34.555,1,34.55511," + +"1,0.01\n" + + +"Argo profile,3.1,1.2,19500101000000,20090422121913,20160415204722,2901175," + +"CHINA ARGO PROJECT,JIANPING XU,1,A,HZ,0066_80617_001,2C,D,,APEX_SBE_4136,,846," + +"21660.34238425926,1,21660.345046296297,21.513999938964844,123.36499786376953,1," + +"ARGOS,A,A,A,,1,10.0,1,9.9,1,2.4,24.99,1,24.99,1,0.002,34.554,1,34.55505," + +"1,0.01\n" + + +"Argo profile,3.1,1.2,19500101000000,20090422121913,20160415204722,2901175," + +"CHINA ARGO PROJECT,JIANPING XU,1,A,HZ,0066_80617_001,2C,D,,APEX_SBE_4136,,846," + +"21660.34238425926,1,21660.345046296297,21.513999938964844,123.36499786376953,1," + +"ARGOS,A,A,A,,1,20.1,1,20.0,1,2.4,24.69,1,24.69,1,0.002,34.56,1,34.56191," + +"1,0.01\n" + "...\n"; Test.ensureEqual(results, expectedStart, "results=\n" + results); Test.ensureEqual(table.nRows(), 17266, "nRows"); //same as when all variables were explicitly loaded @@ -6648,88 +7534,88 @@ public static void testReadMultidimNc() throws Exception { table.readMultidimNc(fiName, StringArray.fromCSV("DATA_TYPE,FORMAT_VERSION,HANDBOOK_VERSION,REFERENCE_DATE_TIME,DATE_CREATION,DATE_UPDATE,PLATFORM_NUMBER,PROJECT_NAME,PI_NAME,CYCLE_NUMBER,DIRECTION,DATA_CENTRE,DC_REFERENCE,DATA_STATE_INDICATOR,DATA_MODE,PLATFORM_TYPE,FLOAT_SERIAL_NO,FIRMWARE_VERSION,WMO_INST_TYPE,JULD,JULD_QC,JULD_LOCATION,LATITUDE,LONGITUDE,POSITION_QC,POSITIONING_SYSTEM,PROFILE_PRES_QC,PROFILE_TEMP_QC,PROFILE_PSAL_QC,VERTICAL_SAMPLING_SCHEME,CONFIG_MISSION_NUMBER,PRES,PRES_QC,PRES_ADJUSTED,PRES_ADJUSTED_QC,PRES_ADJUSTED_ERROR,TEMP,TEMP_QC,TEMP_ADJUSTED,TEMP_ADJUSTED_QC,TEMP_ADJUSTED_ERROR,PSAL,PSAL_QC,PSAL_ADJUSTED,PSAL_ADJUSTED_QC,PSAL_ADJUSTED_ERROR"), null, - true, true, true, //readMetadata, trimStrings, removeMVRows + true, true, //readMetadata, removeMVRows null, null, null); //conVars, conOps, conVals - results = table.dataToCSVString(3, true); + results = table.dataToString(3); Test.ensureEqual(results, expectedStart, "results=\n" + results); Test.ensureEqual(table.nRows(), 17266, "nRows"); //and test data at the end of that table table.removeRows(0, table.nRows() - 3); - results = table.dataToCSVString(5, true); + results = table.dataToString(5); Test.ensureEqual(results, expectedEnd, "results=\n" + results); //* test do removeMVRows when loadVariables is limited (to ensure all are loaded for the test) table.readMultidimNc(fiName, StringArray.fromCSV("LONGITUDE,PRES,PSAL_ADJUSTED_ERROR"), null, - true, true, true, //readMetadata, trimStrings, removeMVRows + true, true, //readMetadata, removeMVRows null, null, null); //conVars, conOps, conVals - results = table.dataToCSVString(3, true); + results = table.dataToString(3); expectedStart = -"row,LONGITUDE,PRES,PSAL_ADJUSTED_ERROR\n" + -"0,123.36499786376953,5.9,0.01\n" + -"1,123.36499786376953,10.0,0.01\n" + -"2,123.36499786376953,20.1,0.01\n" + +"LONGITUDE,PRES,PSAL_ADJUSTED_ERROR\n" + +"123.36499786376953,5.9,0.01\n" + +"123.36499786376953,10.0,0.01\n" + +"123.36499786376953,20.1,0.01\n" + "...\n"; Test.ensureEqual(results, expectedStart, "results=\n" + results); Test.ensureEqual(table.nRows(), 17266, "nRows"); //same as when all variables were explicitly loaded //and test data at the end of that table table.removeRows(0, table.nRows() - 3); - results = table.dataToCSVString(5, true); + results = table.dataToString(5); expectedEnd = -"row,LONGITUDE,PRES,PSAL_ADJUSTED_ERROR\n" + -"0,154.853,1850.0,99999.0\n" + //these rows were're removed because other full-dim vars had values -"1,154.853,1899.9,99999.0\n" + -"2,154.853,1950.0,99999.0\n"; +"LONGITUDE,PRES,PSAL_ADJUSTED_ERROR\n" + +"154.853,1850.0,99999.0\n" + //these rows were're removed because other full-dim vars had values +"154.853,1899.9,99999.0\n" + +"154.853,1950.0,99999.0\n"; Test.ensureEqual(results, expectedEnd, "results=\n" + results); //* test read JULD table.readMultidimNc(fiName, StringArray.fromCSV("JULD"), null, - true, true, true, //readMetadata, trimStrings, removeMVRows + true, true, //readMetadata, removeMVRows null, null, null); //conVars, conOps, conVals - results = table.dataToCSVString(3, true); + results = table.dataToString(3); expectedStart = -"row,JULD\n" + -"0,21660.34238425926\n" + -"1,21670.351828703704\n" + -"2,21680.386898148146\n" + +"JULD\n" + +"21660.34238425926\n" + +"21670.351828703704\n" + +"21680.386898148146\n" + "...\n"; Test.ensureEqual(results, expectedStart, "results=\n" + results); Test.ensureEqual(table.nRows(), 254, "nRows"); //same as when all variables were explicitly loaded table.removeRows(0, 251); - results = table.dataToCSVString(1000, true); + results = table.dataToString(1000); expectedStart = -"row,JULD\n" + -"0,24190.451828703703\n" + -"1,24200.381412037037\n" + -"2,24210.44662037037\n"; +"JULD\n" + +"24190.451828703703\n" + +"24200.381412037037\n" + +"24210.44662037037\n"; Test.ensureEqual(results, expectedStart, "results=\n" + results); //* test read JULD && PRES table.readMultidimNc(fiName, StringArray.fromCSV("JULD,PRES"), null, - true, true, true, //readMetadata, trimStrings, removeMVRows + true, true, //readMetadata, removeMVRows null, null, null); //conVars, conOps, conVals - results = table.dataToCSVString(3, true); + results = table.dataToString(3); expectedStart = -"row,JULD,PRES\n" + -"0,21660.34238425926,5.9\n" + //JULD is correctly JOINed -"1,21660.34238425926,10.0\n" + -"2,21660.34238425926,20.1\n" + +"JULD,PRES\n" + +"21660.34238425926,5.9\n" + //JULD is correctly JOINed +"21660.34238425926,10.0\n" + +"21660.34238425926,20.1\n" + "...\n"; Test.ensureEqual(results, expectedStart, "results=\n" + results); Test.ensureEqual(table.nRows(), 17266, "nRows"); //same as when all variables were explicitly loaded table.removeRows(0, 17263); - results = table.dataToCSVString(1000, true); + results = table.dataToString(1000); expectedStart = -"row,JULD,PRES\n" + -"0,24210.44662037037,1850.0\n" + //JULD is correctly JOINed -"1,24210.44662037037,1899.9\n" + -"2,24210.44662037037,1950.0\n"; +"JULD,PRES\n" + +"24210.44662037037,1850.0\n" + //JULD is correctly JOINed +"24210.44662037037,1899.9\n" + +"24210.44662037037,1950.0\n"; Test.ensureEqual(results, expectedStart, "results=\n" + results); @@ -6737,12 +7623,12 @@ public static void testReadMultidimNc() throws Exception { table.readMultidimNc(fiName, StringArray.fromCSV("HANDBOOK_VERSION,FORMAT_VERSION,DATA_TYPE"), null, - true, true, true, //readMetadata, trimStrings, removeMVRows + true, true, //readMetadata, removeMVRows null, null, null); //conVars, conOps, conVals - results = table.dataToCSVString(3, true); + results = table.dataToString(3); expectedStart = -"row,HANDBOOK_VERSION,FORMAT_VERSION,DATA_TYPE\n" + -"0,1.2,3.1,Argo profile\n"; +"HANDBOOK_VERSION,FORMAT_VERSION,DATA_TYPE\n" + +"1.2,3.1,Argo profile\n"; Test.ensureEqual(results, expectedStart, "results=\n" + results); @@ -6750,7 +7636,7 @@ public static void testReadMultidimNc() throws Exception { table.readMultidimNc(fiName, StringArray.fromCSV("HISTORY_INSTITUTION"), null, - true, true, true, //readMetadata, trimStrings, removeMVRows + true, true, //readMetadata, removeMVRows null, null, null); //conVars, conOps, conVals Test.ensureEqual(table.nRows(), 0, ""); Test.ensureEqual(table.nColumns(), 0, ""); @@ -6759,19 +7645,19 @@ public static void testReadMultidimNc() throws Exception { table.readMultidimNc(fiName, null, StringArray.fromCSV("ZZTOP"), - true, true, true, //readMetadata, trimStrings, removeMVRows + true, true, //readMetadata, removeMVRows null, null, null); //conVars, conOps, conVals - results = table.dataToCSVString(3, true); + results = table.dataToString(3); expectedStart = -"row,DATA_TYPE,FORMAT_VERSION,HANDBOOK_VERSION,REFERENCE_DATE_TIME,DATE_CREATION,DATE_UPDATE\n" + -"0,Argo profile,3.1,1.2,19500101000000,20090422121913,20160415204722\n"; +"DATA_TYPE,FORMAT_VERSION,HANDBOOK_VERSION,REFERENCE_DATE_TIME,DATE_CREATION,DATE_UPDATE\n" + +"Argo profile,3.1,1.2,19500101000000,20090422121913,20160415204722\n"; Test.ensureEqual(results, expectedStart, "results=\n" + results); //* test read non-existent Var -> empty table table.readMultidimNc(fiName, StringArray.fromCSV("ZZTOP"), null, - true, true, true, //readMetadata, trimStrings, removeMVRows + true, true, //readMetadata, removeMVRows null, null, null); //conVars, conOps, conVals Test.ensureEqual(table.nRows(), 0, ""); Test.ensureEqual(table.nColumns(), 0, ""); @@ -6797,22 +7683,22 @@ public static void testReadVlenNc() throws Exception { //** don't specify varNames or dimNames -- it find vars with most dims table.readMultidimNc(fiName, new StringArray(), new StringArray(), - true, true, false, //readMetadata, trimStrings, removeMVRows + true, false, //readMetadata, removeMVRows null, null, null); //conVars, conOps, conVals - results = table.dataToCSVString(3); + results = table.dataToString(3); expectedStart = //static vars and vars like char SCIENTIFIC_CALIB_COEFFICIENT(N_PROF=254, N_CALIB=1, N_PARAM=3, STRING256=256); -"row,DATA_TYPE,FORMAT_VERSION,HANDBOOK_VERSION,REFERENCE_DATE_TIME,DATE_CREATION,DATE_UPDATE,PLATFORM_NUMBER,PROJECT_NAME,PI_NAME,STATION_PARAMETERS,CYCLE_NUMBER,DIRECTION,DATA_CENTRE,DC_REFERENCE,DATA_STATE_INDICATOR,DATA_MODE,PLATFORM_TYPE,FLOAT_SERIAL_NO,FIRMWARE_VERSION,WMO_INST_TYPE,JULD,JULD_QC,JULD_LOCATION,LATITUDE,LONGITUDE,POSITION_QC,POSITIONING_SYSTEM,PROFILE_PRES_QC,PROFILE_TEMP_QC,PROFILE_PSAL_QC,VERTICAL_SAMPLING_SCHEME,CONFIG_MISSION_NUMBER,PARAMETER,SCIENTIFIC_CALIB_EQUATION,SCIENTIFIC_CALIB_COEFFICIENT,SCIENTIFIC_CALIB_COMMENT,SCIENTIFIC_CALIB_DATE\n" + -"0,Argo profile,3.1,1.2,19500101000000,20090422121913,20160415204722,2901175,CHINA ARGO PROJECT,JIANPING XU,PRES,1,65,HZ,0066_80617_001,2C,68,,APEX_SBE_4136,,846,21660.34238425926,49,21660.345046296297,21.513999938964844,123.36499786376953,49,ARGOS,65,65,65,,1,PRES,PRES_ADJUSTED = PRES - dP,dP = 0.1 dbar.,Pressures adjusted by using pressure offset at the sea surface. The quoted error is manufacturer specified accuracy in dbar.,20110628060155\n" + -"1,Argo profile,3.1,1.2,19500101000000,20090422121913,20160415204722,2901175,CHINA ARGO PROJECT,JIANPING XU,TEMP,1,65,HZ,0066_80617_001,2C,68,,APEX_SBE_4136,,846,21660.34238425926,49,21660.345046296297,21.513999938964844,123.36499786376953,49,ARGOS,65,65,65,,1,TEMP,none,none,The quoted error is manufacturer specified accuracy with respect to ITS-90 at time of laboratory calibration.,20110628060155\n" + -"2,Argo profile,3.1,1.2,19500101000000,20090422121913,20160415204722,2901175,CHINA ARGO PROJECT,JIANPING XU,PSAL,1,65,HZ,0066_80617_001,2C,68,,APEX_SBE_4136,,846,21660.34238425926,49,21660.345046296297,21.513999938964844,123.36499786376953,49,ARGOS,65,65,65,,1,PSAL,\"PSAL_ADJUSTED = sw_salt( sw_cndr(PSAL,TEMP,PRES), TEMP, PRES_ADJUSTED ); PSAL_ADJ corrects conductivity cell therm mass (CTM), Johnson et al, 2007, JAOT;\",\"same as for PRES_ADJUSTED; CTL: alpha=0.0267, tau=18.6;\",No significant salinity drift detected; SBE sensor accuracy,20110628060155\n" + +"DATA_TYPE,FORMAT_VERSION,HANDBOOK_VERSION,REFERENCE_DATE_TIME,DATE_CREATION,DATE_UPDATE,PLATFORM_NUMBER,PROJECT_NAME,PI_NAME,STATION_PARAMETERS,CYCLE_NUMBER,DIRECTION,DATA_CENTRE,DC_REFERENCE,DATA_STATE_INDICATOR,DATA_MODE,PLATFORM_TYPE,FLOAT_SERIAL_NO,FIRMWARE_VERSION,WMO_INST_TYPE,JULD,JULD_QC,JULD_LOCATION,LATITUDE,LONGITUDE,POSITION_QC,POSITIONING_SYSTEM,PROFILE_PRES_QC,PROFILE_TEMP_QC,PROFILE_PSAL_QC,VERTICAL_SAMPLING_SCHEME,CONFIG_MISSION_NUMBER,PARAMETER,SCIENTIFIC_CALIB_EQUATION,SCIENTIFIC_CALIB_COEFFICIENT,SCIENTIFIC_CALIB_COMMENT,SCIENTIFIC_CALIB_DATE\n" + +"Argo profile,3.1,1.2,19500101000000,20090422121913,20160415204722,2901175,CHINA ARGO PROJECT,JIANPING XU,PRES,1,A,HZ,0066_80617_001,2C,D,,APEX_SBE_4136,,846,21660.34238425926,1,21660.345046296297,21.513999938964844,123.36499786376953,1,ARGOS,A,A,A,,1,PRES,PRES_ADJUSTED = PRES - dP,dP = 0.1 dbar.,Pressures adjusted by using pressure offset at the sea surface. The quoted error is manufacturer specified accuracy in dbar.,20110628060155\n" + +"Argo profile,3.1,1.2,19500101000000,20090422121913,20160415204722,2901175,CHINA ARGO PROJECT,JIANPING XU,TEMP,1,A,HZ,0066_80617_001,2C,D,,APEX_SBE_4136,,846,21660.34238425926,1,21660.345046296297,21.513999938964844,123.36499786376953,1,ARGOS,A,A,A,,1,TEMP,none,none,The quoted error is manufacturer specified accuracy with respect to ITS-90 at time of laboratory calibration.,20110628060155\n" + +"Argo profile,3.1,1.2,19500101000000,20090422121913,20160415204722,2901175,CHINA ARGO PROJECT,JIANPING XU,PSAL,1,A,HZ,0066_80617_001,2C,D,,APEX_SBE_4136,,846,21660.34238425926,1,21660.345046296297,21.513999938964844,123.36499786376953,1,ARGOS,A,A,A,,1,PSAL,\"PSAL_ADJUSTED = sw_salt( sw_cndr(PSAL,TEMP,PRES), TEMP, PRES_ADJUSTED ); PSAL_ADJ corrects conductivity cell therm mass (CTM), Johnson et al, 2007, JAOT;\",\"same as for PRES_ADJUSTED; CTL: alpha=0.0267, tau=18.6;\",No significant salinity drift detected; SBE sensor accuracy,20110628060155\n" + "...\n"; Test.ensureEqual(results, expectedStart, "results=\n" + results); Test.ensureEqual(table.nRows(), 762, "nRows"); //254*3 /* //* same but quick reject based on constraint table.readVlenNc(fiName, new StringArray(), new StringArray(), - true, true, false, //readMetadata, trimStrings, removeMVRows + true, false, //readMetadata, removeMVRows StringArray.fromCSV("FORMAT_VERSION,FORMAT_VERSION"), //conVars StringArray.fromCSV("=,="), //conOps StringArray.fromCSV("3.1,3.2")); //conVals @@ -6822,26 +7708,26 @@ public static void testReadVlenNc() throws Exception { //* test do removeMVRows table.readVlenNc(fiName, null, StringArray.fromCSV("ZZTOP, N_PROF, N_LEVELS"), - true, true, false, //readMetadata, trimStrings, removeMVRows + true, false, //readMetadata, removeMVRows null, null, null); //conVars, conOps, conVals - results = table.dataToCSVString(3, true); + results = table.dataToString(3, true); Test.ensureEqual(results, expectedStart, "results=\n" + results); Test.ensureEqual(table.nRows(), 17266, "nRows"); //and test data at the end of that table table.removeRows(0, table.nRows() - 3); - results = table.dataToCSVString(5, true); + results = table.dataToString(5); expectedEnd = -"row,DATA_TYPE,FORMAT_VERSION,HANDBOOK_VERSION,REFERENCE_DATE_TIME,DATE_CREATION,DATE_UPDATE,PLATFORM_NUMBER,PROJECT_NAME,PI_NAME,CYCLE_NUMBER,DIRECTION,DATA_CENTRE,DC_REFERENCE,DATA_STATE_INDICATOR,DATA_MODE,PLATFORM_TYPE,FLOAT_SERIAL_NO,FIRMWARE_VERSION,WMO_INST_TYPE,JULD,JULD_QC,JULD_LOCATION,LATITUDE,LONGITUDE,POSITION_QC,POSITIONING_SYSTEM,PROFILE_PRES_QC,PROFILE_TEMP_QC,PROFILE_PSAL_QC,VERTICAL_SAMPLING_SCHEME,CONFIG_MISSION_NUMBER,PRES,PRES_QC,PRES_ADJUSTED,PRES_ADJUSTED_QC,PRES_ADJUSTED_ERROR,TEMP,TEMP_QC,TEMP_ADJUSTED,TEMP_ADJUSTED_QC,TEMP_ADJUSTED_ERROR,PSAL,PSAL_QC,PSAL_ADJUSTED,PSAL_ADJUSTED_QC,PSAL_ADJUSTED_ERROR\n" + -"0,Argo profile,3.1,1.2,19500101000000,20090422121913,20160415204722,2901175,CHINA ARGO PROJECT,JIANPING XU,256,65,HZ,0066_80617_256,2B,65,APEX,4136,013108,846,24210.44662037037,49,24210.44662037037,26.587,154.853,49,ARGOS,65,65,65,Primary sampling: discrete,1,1850.0,49,1849.4,49,99999.0,2.106,49,2.106,49,99999.0,34.604,49,34.604,49,99999.0\n" + -"1,Argo profile,3.1,1.2,19500101000000,20090422121913,20160415204722,2901175,CHINA ARGO PROJECT,JIANPING XU,256,65,HZ,0066_80617_256,2B,65,APEX,4136,013108,846,24210.44662037037,49,24210.44662037037,26.587,154.853,49,ARGOS,65,65,65,Primary sampling: discrete,1,1899.9,49,1899.3,49,99999.0,2.055,49,2.055,49,99999.0,34.612,49,34.612,49,99999.0\n" + -"2,Argo profile,3.1,1.2,19500101000000,20090422121913,20160415204722,2901175,CHINA ARGO PROJECT,JIANPING XU,256,65,HZ,0066_80617_256,2B,65,APEX,4136,013108,846,24210.44662037037,49,24210.44662037037,26.587,154.853,49,ARGOS,65,65,65,Primary sampling: discrete,1,1950.0,49,1949.4,49,99999.0,2.014,49,2.014,49,99999.0,34.617,49,34.617,49,99999.0\n"; +"DATA_TYPE,FORMAT_VERSION,HANDBOOK_VERSION,REFERENCE_DATE_TIME,DATE_CREATION,DATE_UPDATE,PLATFORM_NUMBER,PROJECT_NAME,PI_NAME,CYCLE_NUMBER,DIRECTION,DATA_CENTRE,DC_REFERENCE,DATA_STATE_INDICATOR,DATA_MODE,PLATFORM_TYPE,FLOAT_SERIAL_NO,FIRMWARE_VERSION,WMO_INST_TYPE,JULD,JULD_QC,JULD_LOCATION,LATITUDE,LONGITUDE,POSITION_QC,POSITIONING_SYSTEM,PROFILE_PRES_QC,PROFILE_TEMP_QC,PROFILE_PSAL_QC,VERTICAL_SAMPLING_SCHEME,CONFIG_MISSION_NUMBER,PRES,PRES_QC,PRES_ADJUSTED,PRES_ADJUSTED_QC,PRES_ADJUSTED_ERROR,TEMP,TEMP_QC,TEMP_ADJUSTED,TEMP_ADJUSTED_QC,TEMP_ADJUSTED_ERROR,PSAL,PSAL_QC,PSAL_ADJUSTED,PSAL_ADJUSTED_QC,PSAL_ADJUSTED_ERROR\n" + +"Argo profile,3.1,1.2,19500101000000,20090422121913,20160415204722,2901175,CHINA ARGO PROJECT,JIANPING XU,256,A,HZ,0066_80617_256,2B,A,APEX,4136,013108,846,24210.44662037037,1,24210.44662037037,26.587,154.853,1,ARGOS,A,A,A,Primary sampling: discrete,1,1850.0,1,1849.4,1,99999.0,2.106,1,2.106,1,99999.0,34.604,1,34.604,1,99999.0\n" + +"Argo profile,3.1,1.2,19500101000000,20090422121913,20160415204722,2901175,CHINA ARGO PROJECT,JIANPING XU,256,A,HZ,0066_80617_256,2B,A,APEX,4136,013108,846,24210.44662037037,1,24210.44662037037,26.587,154.853,1,ARGOS,A,A,A,Primary sampling: discrete,1,1899.9,1,1899.3,1,99999.0,2.055,1,2.055,1,99999.0,34.612,1,34.612,1,99999.0\n" + +"Argo profile,3.1,1.2,19500101000000,20090422121913,20160415204722,2901175,CHINA ARGO PROJECT,JIANPING XU,256,A,HZ,0066_80617_256,2B,A,APEX,4136,013108,846,24210.44662037037,1,24210.44662037037,26.587,154.853,1,ARGOS,A,A,A,Primary sampling: discrete,1,1950.0,1,1949.4,1,99999.0,2.014,1,2.014,1,99999.0,34.617,1,34.617,1,99999.0\n"; Test.ensureEqual(results, expectedEnd, "results=\n" + results); //* same but quick reject based on constraint LAT,LON 26.587,154.853 //*** this takes 9ms while test above takes 99ms! table.readVlenNc(fiName, null, StringArray.fromCSV("ZZTOP, N_PROF, N_LEVELS"), - true, true, //readMetadata, trimStrings, + true, //readMetadata, StringArray.fromCSV("LATITUDE"), //conVars StringArray.fromCSV("="), //conOps StringArray.fromCSV("45")); //conVals @@ -6849,15 +7735,15 @@ public static void testReadVlenNc() throws Exception { //* test different dim order (should be rearranged so the same) table.readVlenNc(fiName, null, StringArray.fromCSV("N_LEVELS, ZZTOP, N_PROF"), - true, true, //readMetadata, trimStrings, + true, //readMetadata, null, null, null); //conVars, conOps, conVals - results = table.dataToCSVString(3, true); + results = table.dataToString(3, true); Test.ensureEqual(results, expectedStart, "results=\n" + results); Test.ensureEqual(table.nRows(), 17266, "nRows"); //and test data at the end of that table table.removeRows(0, table.nRows() - 3); - results = table.dataToCSVString(5, true); + results = table.dataToString(5, true); Test.ensureEqual(results, expectedEnd, "results=\n" + results); @@ -6865,63 +7751,63 @@ public static void testReadVlenNc() throws Exception { table.readVlenNc(fiName, StringArray.fromCSV("DATA_TYPE,FORMAT_VERSION,HANDBOOK_VERSION,REFERENCE_DATE_TIME,DATE_CREATION,DATE_UPDATE,PLATFORM_NUMBER,PROJECT_NAME,PI_NAME,CYCLE_NUMBER,DIRECTION,DATA_CENTRE,DC_REFERENCE,DATA_STATE_INDICATOR,DATA_MODE,PLATFORM_TYPE,FLOAT_SERIAL_NO,FIRMWARE_VERSION,WMO_INST_TYPE,JULD,JULD_QC,JULD_LOCATION,LATITUDE,LONGITUDE,POSITION_QC,POSITIONING_SYSTEM,PROFILE_PRES_QC,PROFILE_TEMP_QC,PROFILE_PSAL_QC,VERTICAL_SAMPLING_SCHEME,CONFIG_MISSION_NUMBER,PRES,PRES_QC,PRES_ADJUSTED,PRES_ADJUSTED_QC,PRES_ADJUSTED_ERROR,TEMP,TEMP_QC,TEMP_ADJUSTED,TEMP_ADJUSTED_QC,TEMP_ADJUSTED_ERROR,PSAL,PSAL_QC,PSAL_ADJUSTED,PSAL_ADJUSTED_QC,PSAL_ADJUSTED_ERROR"), null, - true, true, //readMetadata, trimStrings, + true, //readMetadata, null, null, null); //conVars, conOps, conVals - results = table.dataToCSVString(3, true); + results = table.dataToString(3, true); Test.ensureEqual(results, expectedStart, "results=\n" + results); Test.ensureEqual(table.nRows(), 17266, "nRows"); //and test data at the end of that table table.removeRows(0, table.nRows() - 3); - results = table.dataToCSVString(5, true); + results = table.dataToString(5, true); Test.ensureEqual(results, expectedEnd, "results=\n" + results); //* test read JULD table.readVlenNc(fiName, StringArray.fromCSV("JULD"), null, - true, true, //readMetadata, trimStrings, + true, //readMetadata, null, null, null); //conVars, conOps, conVals - results = table.dataToCSVString(3, true); + results = table.dataToString(3); expectedStart = -"row,JULD\n" + -"0,21660.34238425926\n" + -"1,21670.351828703704\n" + -"2,21680.386898148146\n" + +"JULD\n" + +"21660.34238425926\n" + +"21670.351828703704\n" + +"21680.386898148146\n" + "...\n"; Test.ensureEqual(results, expectedStart, "results=\n" + results); Test.ensureEqual(table.nRows(), 254, "nRows"); //same as when all variables were explicitly loaded table.removeRows(0, 251); - results = table.dataToCSVString(1000, true); + results = table.dataToString(1000); expectedStart = -"row,JULD\n" + -"0,24190.451828703703\n" + -"1,24200.381412037037\n" + -"2,24210.44662037037\n"; +"JULD\n" + +"24190.451828703703\n" + +"24200.381412037037\n" + +"24210.44662037037\n"; Test.ensureEqual(results, expectedStart, "results=\n" + results); //* test read JULD && PRES table.readVlenNc(fiName, StringArray.fromCSV("JULD,PRES"), null, - true, true, //readMetadata, trimStrings, + true, //readMetadata, null, null, null); //conVars, conOps, conVals - results = table.dataToCSVString(3, true); + results = table.dataToString(3); expectedStart = -"row,JULD,PRES\n" + -"0,21660.34238425926,5.9\n" + //JULD is correctly JOINed -"1,21660.34238425926,10.0\n" + -"2,21660.34238425926,20.1\n" + +"JULD,PRES\n" + +"21660.34238425926,5.9\n" + //JULD is correctly JOINed +"21660.34238425926,10.0\n" + +"21660.34238425926,20.1\n" + "...\n"; Test.ensureEqual(results, expectedStart, "results=\n" + results); Test.ensureEqual(table.nRows(), 17266, "nRows"); //same as when all variables were explicitly loaded table.removeRows(0, 17263); - results = table.dataToCSVString(1000, true); + results = table.dataToString(1000); expectedStart = -"row,JULD,PRES\n" + -"0,24210.44662037037,1850.0\n" + //JULD is correctly JOINed -"1,24210.44662037037,1899.9\n" + -"2,24210.44662037037,1950.0\n"; +"JULD,PRES\n" + +"24210.44662037037,1850.0\n" + //JULD is correctly JOINed +"24210.44662037037,1899.9\n" + +"24210.44662037037,1950.0\n"; Test.ensureEqual(results, expectedStart, "results=\n" + results); @@ -6929,12 +7815,12 @@ public static void testReadVlenNc() throws Exception { table.readVlenNc(fiName, StringArray.fromCSV("HANDBOOK_VERSION,FORMAT_VERSION,DATA_TYPE"), null, - true, true, //readMetadata, trimStrings, + true, //readMetadata, null, null, null); //conVars, conOps, conVals - results = table.dataToCSVString(3, true); + results = table.dataToString(3); expectedStart = -"row,HANDBOOK_VERSION,FORMAT_VERSION,DATA_TYPE\n" + -"0,1.2,3.1,Argo profile\n"; +"HANDBOOK_VERSION,FORMAT_VERSION,DATA_TYPE\n" + +"1.2,3.1,Argo profile\n"; Test.ensureEqual(results, expectedStart, "results=\n" + results); @@ -6942,7 +7828,7 @@ public static void testReadVlenNc() throws Exception { table.readVlenNc(fiName, StringArray.fromCSV("HISTORY_INSTITUTION"), null, - true, true, //readMetadata, trimStrings, + true, //readMetadata, null, null, null); //conVars, conOps, conVals Test.ensureEqual(table.nRows(), 0, ""); Test.ensureEqual(table.nColumns(), 0, ""); @@ -6951,19 +7837,19 @@ public static void testReadVlenNc() throws Exception { table.readVlenNc(fiName, null, StringArray.fromCSV("ZZTOP"), - true, true, //readMetadata, trimStrings, + true, //readMetadata, null, null, null); //conVars, conOps, conVals - results = table.dataToCSVString(3, true); + results = table.dataToString(3); expectedStart = -"row,DATA_TYPE,FORMAT_VERSION,HANDBOOK_VERSION,REFERENCE_DATE_TIME,DATE_CREATION,DATE_UPDATE\n" + -"0,Argo profile,3.1,1.2,19500101000000,20090422121913,20160415204722\n"; +"DATA_TYPE,FORMAT_VERSION,HANDBOOK_VERSION,REFERENCE_DATE_TIME,DATE_CREATION,DATE_UPDATE\n" + +"Argo profile,3.1,1.2,19500101000000,20090422121913,20160415204722\n"; Test.ensureEqual(results, expectedStart, "results=\n" + results); //* test read non-existent Var -> empty table table.readVlenNc(fiName, StringArray.fromCSV("ZZTOP"), null, - true, true, //readMetadata, trimStrings, + true, //readMetadata, null, null, null); //conVars, conOps, conVals Test.ensureEqual(table.nRows(), 0, ""); Test.ensureEqual(table.nColumns(), 0, ""); @@ -6986,7 +7872,7 @@ public static void testReadNDNc() throws Exception { //test no vars specified, 4D, only 2nd dim has >1 value, getMetadata String fiName = "c:/u00/data/points/erdCalcofiSubsurface/1950/subsurface_19500106_69_144.nc"; table.readNDNc(fiName, null, null, 0, 0, true); - results = table.toCSVString(); + results = table.toString(); expected = "{\n" + "dimensions:\n" + @@ -7120,28 +8006,28 @@ public static void testReadNDNc() throws Exception { "\t\t:history = \"created by ERD from Matlab database created by Andrew Leising from the CalCOFI Physical data\" ;\n" + "\t\t:title = \"CalCOFI Physical Observations, 1949-2001\" ;\n" + "}\n" + -"row,time,depth,lat,lon,stationyear,stationmonth,stationday,stime,stationline,stationnum,temperature,salinity,pressure,oxygen,po4,silicate,no2,no3,nh3,chl,dark,primprod,lightpercent\n" + -"0,6.3612E7,0.0,33.31667,-128.53333,1950,1,6,600,69.0,144.0,16.19,33.6,-999.0,5.3,0.42,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0\n" + -"1,6.3612E7,22.0,33.31667,-128.53333,1950,1,6,600,69.0,144.0,16.18,33.6,-999.0,5.26,0.38,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0\n" + -"2,6.3612E7,49.0,33.31667,-128.53333,1950,1,6,600,69.0,144.0,16.2,33.6,-999.0,5.3,0.36,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0\n" + -"3,6.3612E7,72.0,33.31667,-128.53333,1950,1,6,600,69.0,144.0,14.95,33.58,-999.0,5.51,0.37,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0\n" + -"4,6.3612E7,98.0,33.31667,-128.53333,1950,1,6,600,69.0,144.0,13.02,33.35,-999.0,5.35,0.45,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0\n" + -"5,6.3612E7,147.0,33.31667,-128.53333,1950,1,6,600,69.0,144.0,11.45,33.36,-999.0,4.99,0.81,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0\n" + -"6,6.3612E7,194.0,33.31667,-128.53333,1950,1,6,600,69.0,144.0,9.32,33.55,-999.0,4.47,1.19,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0\n" + -"7,6.3612E7,241.0,33.31667,-128.53333,1950,1,6,600,69.0,144.0,8.51,33.85,-999.0,4.02,1.51,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0\n" + -"8,6.3612E7,287.0,33.31667,-128.53333,1950,1,6,600,69.0,144.0,7.74,33.95,-999.0,3.48,1.76,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0\n" + -"9,6.3612E7,384.0,33.31667,-128.53333,1950,1,6,600,69.0,144.0,6.42,33.97,-999.0,2.55,2.15,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0\n" + -"10,6.3612E7,477.0,33.31667,-128.53333,1950,1,6,600,69.0,144.0,5.35,34.04,-999.0,1.29,2.48,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0\n" + -"11,6.3612E7,576.0,33.31667,-128.53333,1950,1,6,600,69.0,144.0,4.83,34.14,-999.0,0.73,2.73,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0\n" + -"12,6.3612E7,673.0,33.31667,-128.53333,1950,1,6,600,69.0,144.0,4.44,34.22,-999.0,0.48,2.9,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0\n" + -"13,6.3612E7,768.0,33.31667,-128.53333,1950,1,6,600,69.0,144.0,4.15,34.31,-999.0,0.37,2.87,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0\n" + -"14,6.3612E7,969.0,33.31667,-128.53333,1950,1,6,600,69.0,144.0,3.67,34.43,-999.0,0.49,2.8,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0\n" + -"15,6.3612E7,1167.0,33.31667,-128.53333,1950,1,6,600,69.0,144.0,3.3,34.49,-999.0,0.66,2.7,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0\n"; +"time,depth,lat,lon,stationyear,stationmonth,stationday,stime,stationline,stationnum,temperature,salinity,pressure,oxygen,po4,silicate,no2,no3,nh3,chl,dark,primprod,lightpercent\n" + +"6.3612E7,0.0,33.31667,-128.53333,1950,1,6,600,69.0,144.0,16.19,33.6,-999.0,5.3,0.42,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0\n" + +"6.3612E7,22.0,33.31667,-128.53333,1950,1,6,600,69.0,144.0,16.18,33.6,-999.0,5.26,0.38,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0\n" + +"6.3612E7,49.0,33.31667,-128.53333,1950,1,6,600,69.0,144.0,16.2,33.6,-999.0,5.3,0.36,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0\n" + +"6.3612E7,72.0,33.31667,-128.53333,1950,1,6,600,69.0,144.0,14.95,33.58,-999.0,5.51,0.37,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0\n" + +"6.3612E7,98.0,33.31667,-128.53333,1950,1,6,600,69.0,144.0,13.02,33.35,-999.0,5.35,0.45,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0\n" + +"6.3612E7,147.0,33.31667,-128.53333,1950,1,6,600,69.0,144.0,11.45,33.36,-999.0,4.99,0.81,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0\n" + +"6.3612E7,194.0,33.31667,-128.53333,1950,1,6,600,69.0,144.0,9.32,33.55,-999.0,4.47,1.19,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0\n" + +"6.3612E7,241.0,33.31667,-128.53333,1950,1,6,600,69.0,144.0,8.51,33.85,-999.0,4.02,1.51,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0\n" + +"6.3612E7,287.0,33.31667,-128.53333,1950,1,6,600,69.0,144.0,7.74,33.95,-999.0,3.48,1.76,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0\n" + +"6.3612E7,384.0,33.31667,-128.53333,1950,1,6,600,69.0,144.0,6.42,33.97,-999.0,2.55,2.15,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0\n" + +"6.3612E7,477.0,33.31667,-128.53333,1950,1,6,600,69.0,144.0,5.35,34.04,-999.0,1.29,2.48,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0\n" + +"6.3612E7,576.0,33.31667,-128.53333,1950,1,6,600,69.0,144.0,4.83,34.14,-999.0,0.73,2.73,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0\n" + +"6.3612E7,673.0,33.31667,-128.53333,1950,1,6,600,69.0,144.0,4.44,34.22,-999.0,0.48,2.9,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0\n" + +"6.3612E7,768.0,33.31667,-128.53333,1950,1,6,600,69.0,144.0,4.15,34.31,-999.0,0.37,2.87,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0\n" + +"6.3612E7,969.0,33.31667,-128.53333,1950,1,6,600,69.0,144.0,3.67,34.43,-999.0,0.49,2.8,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0\n" + +"6.3612E7,1167.0,33.31667,-128.53333,1950,1,6,600,69.0,144.0,3.3,34.49,-999.0,0.66,2.7,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0\n"; Test.ensureEqual(results, expected, "results=\n" + results); //test same but !getMetadata table.readNDNc(fiName, null, null, 0, 0, false); - results = table.toCSVString(); + results = table.toString(); expected = "{\n" + "dimensions:\n" + @@ -7173,28 +8059,28 @@ public static void testReadNDNc() throws Exception { "\n" + "// global attributes:\n" + "}\n" + -"row,time,depth,lat,lon,stationyear,stationmonth,stationday,stime,stationline,stationnum,temperature,salinity,pressure,oxygen,po4,silicate,no2,no3,nh3,chl,dark,primprod,lightpercent\n" + -"0,6.3612E7,0.0,33.31667,-128.53333,1950,1,6,600,69.0,144.0,16.19,33.6,-999.0,5.3,0.42,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0\n" + -"1,6.3612E7,22.0,33.31667,-128.53333,1950,1,6,600,69.0,144.0,16.18,33.6,-999.0,5.26,0.38,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0\n" + -"2,6.3612E7,49.0,33.31667,-128.53333,1950,1,6,600,69.0,144.0,16.2,33.6,-999.0,5.3,0.36,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0\n" + -"3,6.3612E7,72.0,33.31667,-128.53333,1950,1,6,600,69.0,144.0,14.95,33.58,-999.0,5.51,0.37,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0\n" + -"4,6.3612E7,98.0,33.31667,-128.53333,1950,1,6,600,69.0,144.0,13.02,33.35,-999.0,5.35,0.45,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0\n" + -"5,6.3612E7,147.0,33.31667,-128.53333,1950,1,6,600,69.0,144.0,11.45,33.36,-999.0,4.99,0.81,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0\n" + -"6,6.3612E7,194.0,33.31667,-128.53333,1950,1,6,600,69.0,144.0,9.32,33.55,-999.0,4.47,1.19,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0\n" + -"7,6.3612E7,241.0,33.31667,-128.53333,1950,1,6,600,69.0,144.0,8.51,33.85,-999.0,4.02,1.51,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0\n" + -"8,6.3612E7,287.0,33.31667,-128.53333,1950,1,6,600,69.0,144.0,7.74,33.95,-999.0,3.48,1.76,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0\n" + -"9,6.3612E7,384.0,33.31667,-128.53333,1950,1,6,600,69.0,144.0,6.42,33.97,-999.0,2.55,2.15,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0\n" + -"10,6.3612E7,477.0,33.31667,-128.53333,1950,1,6,600,69.0,144.0,5.35,34.04,-999.0,1.29,2.48,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0\n" + -"11,6.3612E7,576.0,33.31667,-128.53333,1950,1,6,600,69.0,144.0,4.83,34.14,-999.0,0.73,2.73,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0\n" + -"12,6.3612E7,673.0,33.31667,-128.53333,1950,1,6,600,69.0,144.0,4.44,34.22,-999.0,0.48,2.9,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0\n" + -"13,6.3612E7,768.0,33.31667,-128.53333,1950,1,6,600,69.0,144.0,4.15,34.31,-999.0,0.37,2.87,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0\n" + -"14,6.3612E7,969.0,33.31667,-128.53333,1950,1,6,600,69.0,144.0,3.67,34.43,-999.0,0.49,2.8,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0\n" + -"15,6.3612E7,1167.0,33.31667,-128.53333,1950,1,6,600,69.0,144.0,3.3,34.49,-999.0,0.66,2.7,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0\n"; +"time,depth,lat,lon,stationyear,stationmonth,stationday,stime,stationline,stationnum,temperature,salinity,pressure,oxygen,po4,silicate,no2,no3,nh3,chl,dark,primprod,lightpercent\n" + +"6.3612E7,0.0,33.31667,-128.53333,1950,1,6,600,69.0,144.0,16.19,33.6,-999.0,5.3,0.42,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0\n" + +"6.3612E7,22.0,33.31667,-128.53333,1950,1,6,600,69.0,144.0,16.18,33.6,-999.0,5.26,0.38,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0\n" + +"6.3612E7,49.0,33.31667,-128.53333,1950,1,6,600,69.0,144.0,16.2,33.6,-999.0,5.3,0.36,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0\n" + +"6.3612E7,72.0,33.31667,-128.53333,1950,1,6,600,69.0,144.0,14.95,33.58,-999.0,5.51,0.37,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0\n" + +"6.3612E7,98.0,33.31667,-128.53333,1950,1,6,600,69.0,144.0,13.02,33.35,-999.0,5.35,0.45,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0\n" + +"6.3612E7,147.0,33.31667,-128.53333,1950,1,6,600,69.0,144.0,11.45,33.36,-999.0,4.99,0.81,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0\n" + +"6.3612E7,194.0,33.31667,-128.53333,1950,1,6,600,69.0,144.0,9.32,33.55,-999.0,4.47,1.19,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0\n" + +"6.3612E7,241.0,33.31667,-128.53333,1950,1,6,600,69.0,144.0,8.51,33.85,-999.0,4.02,1.51,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0\n" + +"6.3612E7,287.0,33.31667,-128.53333,1950,1,6,600,69.0,144.0,7.74,33.95,-999.0,3.48,1.76,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0\n" + +"6.3612E7,384.0,33.31667,-128.53333,1950,1,6,600,69.0,144.0,6.42,33.97,-999.0,2.55,2.15,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0\n" + +"6.3612E7,477.0,33.31667,-128.53333,1950,1,6,600,69.0,144.0,5.35,34.04,-999.0,1.29,2.48,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0\n" + +"6.3612E7,576.0,33.31667,-128.53333,1950,1,6,600,69.0,144.0,4.83,34.14,-999.0,0.73,2.73,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0\n" + +"6.3612E7,673.0,33.31667,-128.53333,1950,1,6,600,69.0,144.0,4.44,34.22,-999.0,0.48,2.9,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0\n" + +"6.3612E7,768.0,33.31667,-128.53333,1950,1,6,600,69.0,144.0,4.15,34.31,-999.0,0.37,2.87,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0\n" + +"6.3612E7,969.0,33.31667,-128.53333,1950,1,6,600,69.0,144.0,3.67,34.43,-999.0,0.49,2.8,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0\n" + +"6.3612E7,1167.0,33.31667,-128.53333,1950,1,6,600,69.0,144.0,3.3,34.49,-999.0,0.66,2.7,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0,-999.0\n"; Test.ensureEqual(results, expected, "results=\n" + results); //test specify vars (including out-of-order axis var, and nonsense var), !getMetadata table.readNDNc(fiName, new String[]{"temperature", "lat", "salinity", "junk"}, "depth", 100, 200, false); - results = table.dataToCSVString(); + results = table.dataToString(); expected = "time,depth,lat,lon,temperature,salinity\n" + "6.3612E7,147.0,33.31667,-128.53333,11.45,33.36\n" + @@ -7204,31 +8090,31 @@ public static void testReadNDNc() throws Exception { //test String vars fiName = "c:/u00/cwatch/erddap2/copy/cPostDet3/BARBARAx20BLOCK/LAMNAx20DITROPIS/Nx2fA/52038_A69-1303_1059305.nc"; table.readNDNc(fiName, null, null, 0, 0, false); - results = table.dataToCSVString(4); + results = table.dataToString(4); expected = -"row,row,unique_tag_id,PI,longitude,latitude,time,bottom_depth,common_name,date_public,line,position_on_subarray,project,riser_height,role,scientific_name,serial_number,stock,surgery_time,surgery_location,tagger\n" + -"0,0,52038_A69-1303_1059305,BARBARA BLOCK,-146.1137,60.7172,1.2192849E9,,SALMON SHARK,1.273271649385E9,,,HOPKINS MARINE STATION,,BLOCK_BARBARA_LAMNA_DITROPIS_N/A,LAMNA DITROPIS,1059305,N/A,1.2192156E9,\"PORT GRAVINA, PRINCE WILLIAM SOUND\",\n" + -"1,1,52038_A69-1303_1059305,BARBARA BLOCK,-146.32355,60.66713,1.233325298E9,127.743902439024,SALMON SHARK,1.273271649385E9,PORT GRAVINA,6,HOPKINS MARINE STATION,,BLOCK_BARBARA_LAMNA_DITROPIS_N/A,LAMNA DITROPIS,1059305,N/A,1.2192156E9,\"PORT GRAVINA, PRINCE WILLIAM SOUND\",\n" + -"2,2,52038_A69-1303_1059305,BARBARA BLOCK,-146.32355,60.66713,1.233325733E9,127.743902439024,SALMON SHARK,1.273271649385E9,PORT GRAVINA,6,HOPKINS MARINE STATION,,BLOCK_BARBARA_LAMNA_DITROPIS_N/A,LAMNA DITROPIS,1059305,N/A,1.2192156E9,\"PORT GRAVINA, PRINCE WILLIAM SOUND\",\n" + -"3,3,52038_A69-1303_1059305,BARBARA BLOCK,-146.32355,60.66713,1.233325998E9,127.743902439024,SALMON SHARK,1.273271649385E9,PORT GRAVINA,6,HOPKINS MARINE STATION,,BLOCK_BARBARA_LAMNA_DITROPIS_N/A,LAMNA DITROPIS,1059305,N/A,1.2192156E9,\"PORT GRAVINA, PRINCE WILLIAM SOUND\",\n" + +"row,unique_tag_id,PI,longitude,latitude,time,bottom_depth,common_name,date_public,line,position_on_subarray,project,riser_height,role,scientific_name,serial_number,stock,surgery_time,surgery_location,tagger\n" + +"0,52038_A69-1303_1059305,BARBARA BLOCK,-146.1137,60.7172,1.2192849E9,,SALMON SHARK,1.273271649385E9,,,HOPKINS MARINE STATION,,BLOCK_BARBARA_LAMNA_DITROPIS_N/A,LAMNA DITROPIS,1059305,N/A,1.2192156E9,\"PORT GRAVINA, PRINCE WILLIAM SOUND\",\n" + +"1,52038_A69-1303_1059305,BARBARA BLOCK,-146.32355,60.66713,1.233325298E9,127.743902439024,SALMON SHARK,1.273271649385E9,PORT GRAVINA,6,HOPKINS MARINE STATION,,BLOCK_BARBARA_LAMNA_DITROPIS_N/A,LAMNA DITROPIS,1059305,N/A,1.2192156E9,\"PORT GRAVINA, PRINCE WILLIAM SOUND\",\n" + +"2,52038_A69-1303_1059305,BARBARA BLOCK,-146.32355,60.66713,1.233325733E9,127.743902439024,SALMON SHARK,1.273271649385E9,PORT GRAVINA,6,HOPKINS MARINE STATION,,BLOCK_BARBARA_LAMNA_DITROPIS_N/A,LAMNA DITROPIS,1059305,N/A,1.2192156E9,\"PORT GRAVINA, PRINCE WILLIAM SOUND\",\n" + +"3,52038_A69-1303_1059305,BARBARA BLOCK,-146.32355,60.66713,1.233325998E9,127.743902439024,SALMON SHARK,1.273271649385E9,PORT GRAVINA,6,HOPKINS MARINE STATION,,BLOCK_BARBARA_LAMNA_DITROPIS_N/A,LAMNA DITROPIS,1059305,N/A,1.2192156E9,\"PORT GRAVINA, PRINCE WILLIAM SOUND\",\n" + "...\n"; Test.ensureEqual(results, expected, "results=\n" + results); //test 4D but request axis vars only, with constraints fiName = "/u00/data/points/ndbcMet/NDBC_51001_met.nc"; //implied c: table.readNDNc(fiName, new String[]{"LON", "LAT", "TIME"}, "TIME", 1.2051936e9, 1.20528e9, false); - results = table.dataToCSVString(4); + results = table.dataToString(4); expected = -//"row, LON, LAT, TIME\n" + //pre 2011-07-28 -//"0, -162.21, 23.43, 1.2051828E9\n" + -//"1, -162.21, 23.43, 1.2051864E9\n" + -//"2, -162.21, 23.43, 1.20519E9\n" + -//"3, -162.21, 23.43, 1.2051936E9\n"; -"row,LON,LAT,TIME\n" + -"0,-162.279,23.445,1.20519E9\n" + //pre 2013-06-20 last 9 was 828 -"1,-162.279,23.445,1.2051936E9\n" + //and 936 was 864 -"2,-162.279,23.445,1.2051972E9\n" + //and 972 was 9 -"3,-162.279,23.445,1.2052008E9\n" + +//"LON, LAT, TIME\n" + //pre 2011-07-28 +//"-162.21, 23.43, 1.2051828E9\n" + +//"-162.21, 23.43, 1.2051864E9\n" + +//"-162.21, 23.43, 1.20519E9\n" + +//"-162.21, 23.43, 1.2051936E9\n"; +"LON,LAT,TIME\n" + +"-162.279,23.445,1.20519E9\n" + //pre 2013-06-20 last 9 was 828 +"-162.279,23.445,1.2051936E9\n" + //and 936 was 864 +"-162.279,23.445,1.2051972E9\n" + //and 972 was 9 +"-162.279,23.445,1.2052008E9\n" + "...\n"; //and 2008 was 1936 Test.ensureEqual(results, expected, "results=\n" + results); @@ -7251,7 +8137,7 @@ public static void testReadNDNc2() throws Exception { //test no vars specified table.readNDNc(fiName, null, null, 0, 0, true); - results = table.toCSVString(); + results = table.toString(); expected = "{\n" + "dimensions:\n" + @@ -7343,23 +8229,23 @@ public static void testReadNDNc2() throws Exception { "\t\t:Metadata_Conventions = \"Unidata Dataset Discovery v1.0\" ;\n" + "\t\t:standard_name_vocabulary = \"CF-1.5\" ;\n" + "}\n" + -"row,z,Temperature,Temperature_sigfigs,Temperature_WODflag,WOD_cruise_identifier,wod_unique_cast,lat,lon,time,date,GMT_time,Access_no,Project,dataset,ARGOS_last_fix,ARGOS_next_fix,crs,profile,WODf,WODfp,WODfd\n" + -"0,0.0,7.9,2,0,US025547,8015632,45.28,-142.24,83369.90625,19980403,21.81665,573,AUTONOMOUS PINNIPED ENVIRONMENTAL SAMPLERS (APES),animal mounted,4.836731,14.149658,-2147483647,-2147483647,-2147483647,-2147483647,-2147483647\n" + -"1,10.0,7.9,2,0,US025547,8015632,45.28,-142.24,83369.90625,19980403,21.81665,573,AUTONOMOUS PINNIPED ENVIRONMENTAL SAMPLERS (APES),animal mounted,4.836731,14.149658,-2147483647,-2147483647,-2147483647,-2147483647,-2147483647\n" + -"2,42.0,7.8,2,0,US025547,8015632,45.28,-142.24,83369.90625,19980403,21.81665,573,AUTONOMOUS PINNIPED ENVIRONMENTAL SAMPLERS (APES),animal mounted,4.836731,14.149658,-2147483647,-2147483647,-2147483647,-2147483647,-2147483647\n" + -"3,76.0,7.8,2,0,US025547,8015632,45.28,-142.24,83369.90625,19980403,21.81665,573,AUTONOMOUS PINNIPED ENVIRONMENTAL SAMPLERS (APES),animal mounted,4.836731,14.149658,-2147483647,-2147483647,-2147483647,-2147483647,-2147483647\n" + -"4,120.0,7.8,2,0,US025547,8015632,45.28,-142.24,83369.90625,19980403,21.81665,573,AUTONOMOUS PINNIPED ENVIRONMENTAL SAMPLERS (APES),animal mounted,4.836731,14.149658,-2147483647,-2147483647,-2147483647,-2147483647,-2147483647\n" + -"5,166.0,7.5,2,0,US025547,8015632,45.28,-142.24,83369.90625,19980403,21.81665,573,AUTONOMOUS PINNIPED ENVIRONMENTAL SAMPLERS (APES),animal mounted,4.836731,14.149658,-2147483647,-2147483647,-2147483647,-2147483647,-2147483647\n" + -"6,212.0,7.0,2,0,US025547,8015632,45.28,-142.24,83369.90625,19980403,21.81665,573,AUTONOMOUS PINNIPED ENVIRONMENTAL SAMPLERS (APES),animal mounted,4.836731,14.149658,-2147483647,-2147483647,-2147483647,-2147483647,-2147483647\n" + -"7,260.0,6.5,2,0,US025547,8015632,45.28,-142.24,83369.90625,19980403,21.81665,573,AUTONOMOUS PINNIPED ENVIRONMENTAL SAMPLERS (APES),animal mounted,4.836731,14.149658,-2147483647,-2147483647,-2147483647,-2147483647,-2147483647\n" + -"8,308.0,5.8,2,0,US025547,8015632,45.28,-142.24,83369.90625,19980403,21.81665,573,AUTONOMOUS PINNIPED ENVIRONMENTAL SAMPLERS (APES),animal mounted,4.836731,14.149658,-2147483647,-2147483647,-2147483647,-2147483647,-2147483647\n" + -"9,354.0,5.2,2,0,US025547,8015632,45.28,-142.24,83369.90625,19980403,21.81665,573,AUTONOMOUS PINNIPED ENVIRONMENTAL SAMPLERS (APES),animal mounted,4.836731,14.149658,-2147483647,-2147483647,-2147483647,-2147483647,-2147483647\n" + -"10,402.0,4.9,2,0,US025547,8015632,45.28,-142.24,83369.90625,19980403,21.81665,573,AUTONOMOUS PINNIPED ENVIRONMENTAL SAMPLERS (APES),animal mounted,4.836731,14.149658,-2147483647,-2147483647,-2147483647,-2147483647,-2147483647\n"; +"z,Temperature,Temperature_sigfigs,Temperature_WODflag,WOD_cruise_identifier,wod_unique_cast,lat,lon,time,date,GMT_time,Access_no,Project,dataset,ARGOS_last_fix,ARGOS_next_fix,crs,profile,WODf,WODfp,WODfd\n" + +"0.0,7.9,2,0,US025547,8015632,45.28,-142.24,83369.90625,19980403,21.81665,573,AUTONOMOUS PINNIPED ENVIRONMENTAL SAMPLERS (APES),animal mounted,4.836731,14.149658,-2147483647,-2147483647,-2147483647,-2147483647,-2147483647\n" + +"10.0,7.9,2,0,US025547,8015632,45.28,-142.24,83369.90625,19980403,21.81665,573,AUTONOMOUS PINNIPED ENVIRONMENTAL SAMPLERS (APES),animal mounted,4.836731,14.149658,-2147483647,-2147483647,-2147483647,-2147483647,-2147483647\n" + +"42.0,7.8,2,0,US025547,8015632,45.28,-142.24,83369.90625,19980403,21.81665,573,AUTONOMOUS PINNIPED ENVIRONMENTAL SAMPLERS (APES),animal mounted,4.836731,14.149658,-2147483647,-2147483647,-2147483647,-2147483647,-2147483647\n" + +"76.0,7.8,2,0,US025547,8015632,45.28,-142.24,83369.90625,19980403,21.81665,573,AUTONOMOUS PINNIPED ENVIRONMENTAL SAMPLERS (APES),animal mounted,4.836731,14.149658,-2147483647,-2147483647,-2147483647,-2147483647,-2147483647\n" + +"120.0,7.8,2,0,US025547,8015632,45.28,-142.24,83369.90625,19980403,21.81665,573,AUTONOMOUS PINNIPED ENVIRONMENTAL SAMPLERS (APES),animal mounted,4.836731,14.149658,-2147483647,-2147483647,-2147483647,-2147483647,-2147483647\n" + +"166.0,7.5,2,0,US025547,8015632,45.28,-142.24,83369.90625,19980403,21.81665,573,AUTONOMOUS PINNIPED ENVIRONMENTAL SAMPLERS (APES),animal mounted,4.836731,14.149658,-2147483647,-2147483647,-2147483647,-2147483647,-2147483647\n" + +"212.0,7.0,2,0,US025547,8015632,45.28,-142.24,83369.90625,19980403,21.81665,573,AUTONOMOUS PINNIPED ENVIRONMENTAL SAMPLERS (APES),animal mounted,4.836731,14.149658,-2147483647,-2147483647,-2147483647,-2147483647,-2147483647\n" + +"260.0,6.5,2,0,US025547,8015632,45.28,-142.24,83369.90625,19980403,21.81665,573,AUTONOMOUS PINNIPED ENVIRONMENTAL SAMPLERS (APES),animal mounted,4.836731,14.149658,-2147483647,-2147483647,-2147483647,-2147483647,-2147483647\n" + +"308.0,5.8,2,0,US025547,8015632,45.28,-142.24,83369.90625,19980403,21.81665,573,AUTONOMOUS PINNIPED ENVIRONMENTAL SAMPLERS (APES),animal mounted,4.836731,14.149658,-2147483647,-2147483647,-2147483647,-2147483647,-2147483647\n" + +"354.0,5.2,2,0,US025547,8015632,45.28,-142.24,83369.90625,19980403,21.81665,573,AUTONOMOUS PINNIPED ENVIRONMENTAL SAMPLERS (APES),animal mounted,4.836731,14.149658,-2147483647,-2147483647,-2147483647,-2147483647,-2147483647\n" + +"402.0,4.9,2,0,US025547,8015632,45.28,-142.24,83369.90625,19980403,21.81665,573,AUTONOMOUS PINNIPED ENVIRONMENTAL SAMPLERS (APES),animal mounted,4.836731,14.149658,-2147483647,-2147483647,-2147483647,-2147483647,-2147483647\n"; Test.ensureEqual(results, expected, "results=\n" + results); //test same but !getMetadata table.readNDNc(fiName, null, null, 0, 0, false); - results = table.toCSVString(); + results = table.toString(); expected = "{\n" + "dimensions:\n" + @@ -7392,24 +8278,24 @@ public static void testReadNDNc2() throws Exception { "\n" + "// global attributes:\n" + "}\n" + -"row,z,Temperature,Temperature_sigfigs,Temperature_WODflag,WOD_cruise_identifier,wod_unique_cast,lat,lon,time,date,GMT_time,Access_no,Project,dataset,ARGOS_last_fix,ARGOS_next_fix,crs,profile,WODf,WODfp,WODfd\n" + -"0,0.0,7.9,2,0,US025547,8015632,45.28,-142.24,83369.90625,19980403,21.81665,573,AUTONOMOUS PINNIPED ENVIRONMENTAL SAMPLERS (APES),animal mounted,4.836731,14.149658,-2147483647,-2147483647,-2147483647,-2147483647,-2147483647\n" + -"1,10.0,7.9,2,0,US025547,8015632,45.28,-142.24,83369.90625,19980403,21.81665,573,AUTONOMOUS PINNIPED ENVIRONMENTAL SAMPLERS (APES),animal mounted,4.836731,14.149658,-2147483647,-2147483647,-2147483647,-2147483647,-2147483647\n" + -"2,42.0,7.8,2,0,US025547,8015632,45.28,-142.24,83369.90625,19980403,21.81665,573,AUTONOMOUS PINNIPED ENVIRONMENTAL SAMPLERS (APES),animal mounted,4.836731,14.149658,-2147483647,-2147483647,-2147483647,-2147483647,-2147483647\n" + -"3,76.0,7.8,2,0,US025547,8015632,45.28,-142.24,83369.90625,19980403,21.81665,573,AUTONOMOUS PINNIPED ENVIRONMENTAL SAMPLERS (APES),animal mounted,4.836731,14.149658,-2147483647,-2147483647,-2147483647,-2147483647,-2147483647\n" + -"4,120.0,7.8,2,0,US025547,8015632,45.28,-142.24,83369.90625,19980403,21.81665,573,AUTONOMOUS PINNIPED ENVIRONMENTAL SAMPLERS (APES),animal mounted,4.836731,14.149658,-2147483647,-2147483647,-2147483647,-2147483647,-2147483647\n" + -"5,166.0,7.5,2,0,US025547,8015632,45.28,-142.24,83369.90625,19980403,21.81665,573,AUTONOMOUS PINNIPED ENVIRONMENTAL SAMPLERS (APES),animal mounted,4.836731,14.149658,-2147483647,-2147483647,-2147483647,-2147483647,-2147483647\n" + -"6,212.0,7.0,2,0,US025547,8015632,45.28,-142.24,83369.90625,19980403,21.81665,573,AUTONOMOUS PINNIPED ENVIRONMENTAL SAMPLERS (APES),animal mounted,4.836731,14.149658,-2147483647,-2147483647,-2147483647,-2147483647,-2147483647\n" + -"7,260.0,6.5,2,0,US025547,8015632,45.28,-142.24,83369.90625,19980403,21.81665,573,AUTONOMOUS PINNIPED ENVIRONMENTAL SAMPLERS (APES),animal mounted,4.836731,14.149658,-2147483647,-2147483647,-2147483647,-2147483647,-2147483647\n" + -"8,308.0,5.8,2,0,US025547,8015632,45.28,-142.24,83369.90625,19980403,21.81665,573,AUTONOMOUS PINNIPED ENVIRONMENTAL SAMPLERS (APES),animal mounted,4.836731,14.149658,-2147483647,-2147483647,-2147483647,-2147483647,-2147483647\n" + -"9,354.0,5.2,2,0,US025547,8015632,45.28,-142.24,83369.90625,19980403,21.81665,573,AUTONOMOUS PINNIPED ENVIRONMENTAL SAMPLERS (APES),animal mounted,4.836731,14.149658,-2147483647,-2147483647,-2147483647,-2147483647,-2147483647\n" + -"10,402.0,4.9,2,0,US025547,8015632,45.28,-142.24,83369.90625,19980403,21.81665,573,AUTONOMOUS PINNIPED ENVIRONMENTAL SAMPLERS (APES),animal mounted,4.836731,14.149658,-2147483647,-2147483647,-2147483647,-2147483647,-2147483647\n"; +"z,Temperature,Temperature_sigfigs,Temperature_WODflag,WOD_cruise_identifier,wod_unique_cast,lat,lon,time,date,GMT_time,Access_no,Project,dataset,ARGOS_last_fix,ARGOS_next_fix,crs,profile,WODf,WODfp,WODfd\n" + +"0.0,7.9,2,0,US025547,8015632,45.28,-142.24,83369.90625,19980403,21.81665,573,AUTONOMOUS PINNIPED ENVIRONMENTAL SAMPLERS (APES),animal mounted,4.836731,14.149658,-2147483647,-2147483647,-2147483647,-2147483647,-2147483647\n" + +"10.0,7.9,2,0,US025547,8015632,45.28,-142.24,83369.90625,19980403,21.81665,573,AUTONOMOUS PINNIPED ENVIRONMENTAL SAMPLERS (APES),animal mounted,4.836731,14.149658,-2147483647,-2147483647,-2147483647,-2147483647,-2147483647\n" + +"42.0,7.8,2,0,US025547,8015632,45.28,-142.24,83369.90625,19980403,21.81665,573,AUTONOMOUS PINNIPED ENVIRONMENTAL SAMPLERS (APES),animal mounted,4.836731,14.149658,-2147483647,-2147483647,-2147483647,-2147483647,-2147483647\n" + +"76.0,7.8,2,0,US025547,8015632,45.28,-142.24,83369.90625,19980403,21.81665,573,AUTONOMOUS PINNIPED ENVIRONMENTAL SAMPLERS (APES),animal mounted,4.836731,14.149658,-2147483647,-2147483647,-2147483647,-2147483647,-2147483647\n" + +"120.0,7.8,2,0,US025547,8015632,45.28,-142.24,83369.90625,19980403,21.81665,573,AUTONOMOUS PINNIPED ENVIRONMENTAL SAMPLERS (APES),animal mounted,4.836731,14.149658,-2147483647,-2147483647,-2147483647,-2147483647,-2147483647\n" + +"166.0,7.5,2,0,US025547,8015632,45.28,-142.24,83369.90625,19980403,21.81665,573,AUTONOMOUS PINNIPED ENVIRONMENTAL SAMPLERS (APES),animal mounted,4.836731,14.149658,-2147483647,-2147483647,-2147483647,-2147483647,-2147483647\n" + +"212.0,7.0,2,0,US025547,8015632,45.28,-142.24,83369.90625,19980403,21.81665,573,AUTONOMOUS PINNIPED ENVIRONMENTAL SAMPLERS (APES),animal mounted,4.836731,14.149658,-2147483647,-2147483647,-2147483647,-2147483647,-2147483647\n" + +"260.0,6.5,2,0,US025547,8015632,45.28,-142.24,83369.90625,19980403,21.81665,573,AUTONOMOUS PINNIPED ENVIRONMENTAL SAMPLERS (APES),animal mounted,4.836731,14.149658,-2147483647,-2147483647,-2147483647,-2147483647,-2147483647\n" + +"308.0,5.8,2,0,US025547,8015632,45.28,-142.24,83369.90625,19980403,21.81665,573,AUTONOMOUS PINNIPED ENVIRONMENTAL SAMPLERS (APES),animal mounted,4.836731,14.149658,-2147483647,-2147483647,-2147483647,-2147483647,-2147483647\n" + +"354.0,5.2,2,0,US025547,8015632,45.28,-142.24,83369.90625,19980403,21.81665,573,AUTONOMOUS PINNIPED ENVIRONMENTAL SAMPLERS (APES),animal mounted,4.836731,14.149658,-2147483647,-2147483647,-2147483647,-2147483647,-2147483647\n" + +"402.0,4.9,2,0,US025547,8015632,45.28,-142.24,83369.90625,19980403,21.81665,573,AUTONOMOUS PINNIPED ENVIRONMENTAL SAMPLERS (APES),animal mounted,4.836731,14.149658,-2147483647,-2147483647,-2147483647,-2147483647,-2147483647\n"; Test.ensureEqual(results, expected, "results=\n" + results); //test specify 0D and 1D data vars (out-of-order, implied axis var, and nonsense var), !getMetadata table.readNDNc(fiName, new String[]{ "lon", "lat", "time", "Temperature", "WOD_cruise_identifier", "junk"}, "z", 100, 200, false); - results = table.dataToCSVString(); + results = table.dataToString(); expected = "z,Temperature,WOD_cruise_identifier,lat,lon,time\n" + "120.0,7.8,US025547,45.28,-142.24,83369.90625\n" + @@ -7418,7 +8304,7 @@ public static void testReadNDNc2() throws Exception { //request axis vars only, with constraints table.readNDNc(fiName, new String[]{"z"}, "z", 100, 200, false); - results = table.dataToCSVString(); + results = table.dataToString(); expected = "z\n" + "120.0\n" + @@ -7428,7 +8314,7 @@ public static void testReadNDNc2() throws Exception { //request 0D vars only, with constraints (ignored) table.readNDNc(fiName, new String[]{ "WOD_cruise_identifier", "Project", "junk", "lon", "lat"}, "z", 100, 200, false); - results = table.dataToCSVString(); + results = table.dataToString(); expected = "WOD_cruise_identifier,lat,lon,Project\n" + "US025547,45.28,-142.24,AUTONOMOUS PINNIPED ENVIRONMENTAL SAMPLERS (APES)\n"; @@ -7437,7 +8323,7 @@ public static void testReadNDNc2() throws Exception { //request axis var and 0D vars only, with constraints table.readNDNc(fiName, new String[]{ "WOD_cruise_identifier", "Project", "z", "junk", "lon", "lat"}, "z", 100, 200, false); - results = table.dataToCSVString(); + results = table.dataToString(); expected = "z,WOD_cruise_identifier,lat,lon,Project\n" + "120.0,US025547,45.28,-142.24,AUTONOMOUS PINNIPED ENVIRONMENTAL SAMPLERS (APES)\n" + @@ -7614,6 +8500,7 @@ else if (loadVariableNames.size() > 0) " nRows=" + nRows() + " nCols=" + nColumns() + " time=" + (System.currentTimeMillis() - time)); if (debugMode) ensureValid(); + decodeCharsAndStrings(); return; } @@ -7913,6 +8800,7 @@ else if (loadVariableNames.size() > 0) " nRows=" + nRows() + " nCols=" + nColumns() + " time=" + (System.currentTimeMillis() - time)); if (debugMode) ensureValid(); + decodeCharsAndStrings(); return; } @@ -8116,7 +9004,7 @@ else if (loadVariableNames.size() > 0) if (debugMode) String2.log(" Debug: outerTable(nRows=" + outerTable.nRows() + ") nLoadOrConVariablesInOuterTable=" + nLoadOrConVariablesInOuterTable + - " First <=3 rows:\n" + outerTable.dataToCSVString(3)); + " First <=3 rows:\n" + outerTable.dataToString(3)); globalAttributes.set(cdmOuterName, subsetVars.toString()); //may be "", that's okay if (cdmInnerName != null) globalAttributes.set(cdmInnerName, ""); //nLevel=2 will set it properly below @@ -8172,6 +9060,7 @@ else if (loadVariableNames.size() > 0) " nRows=" + nRows() + " nCols=" + nColumns() + " time=" + (System.currentTimeMillis() - time)); if (debugMode) ensureValid(); + decodeCharsAndStrings(); return; } } //else if no outerTable columns, all outerTable features are considered good @@ -8183,7 +9072,7 @@ else if (loadVariableNames.size() > 0) " Debug: outerTable has nCols=" + outerTableNColumns + " nRows=" + outerTableNRows + " nKeepRows=" + outerNGood + (outerTableNRows == 0? "" : - "\n" + outerTable.dataToCSVString(5))); + "\n" + outerTable.dataToString(5))); //*** read nLevels=1 obs data @@ -8452,6 +9341,7 @@ else if (loadVariableNames.size() > 0) " nRows=" + nRows() + " nCols=" + nColumns() + " time=" + (System.currentTimeMillis() - time)); if (debugMode) ensureValid(); + decodeCharsAndStrings(); return; } @@ -8504,6 +9394,7 @@ else if (loadVariableNames.size() > 0) " nRows=" + nRows() + " nCols=" + nColumns() + " time=" + (System.currentTimeMillis() - time)); if (debugMode) ensureValid(); + decodeCharsAndStrings(); return; } } //below, innerTable may have 0 or more columns @@ -8564,12 +9455,12 @@ else if (loadVariableNames.size() > 0) //Remove rows where all obs data is MV //Rows with only outerTable or innerTable MVs have been removed by obsKeep above. if (debugMode) String2.log(" Debug: before remove rows where all obs data is MV (nRows=" + - nRows() + "):\n" + dataToCSVString(3)); + nRows() + "):\n" + dataToString(3)); obsKeep = rowsWithData(); addColumn(0, "outerKeyColumn", outerKeyColumnPA, new Attributes()); addColumn(1, "innerKeyColumn", innerKeyColumnPA, new Attributes()); justKeep(obsKeep); - //String2.log("after read vars\n" + dataToCSVString()); + //String2.log("after read vars\n" + dataToString()); if (debugMode) { String2.log(" Debug: after removeRowsWithJustMVs nRows=" + nRows()); ensureValid(); //throws Exception if not @@ -8630,6 +9521,7 @@ else if (loadVariableNames.size() > 0) ", all). nRows=" + nRows() + " nCols=" + nColumns() + " time=" + (System.currentTimeMillis() - time)); if (debugMode) ensureValid(); + decodeCharsAndStrings(); return; } @@ -8722,7 +9614,7 @@ else if (loadVariableNames.size() > 0) if (debugMode) String2.log(" Debug: ragged innerTable has nCols=" + innerTableNColumns + " nRows=" + innerTableNRows + " nKeepRows=" + keepNInner + "\n" + - innerTable.dataToCSVString()); + innerTable.dataToString()); //Are we done? Are those all the variables we need that are in the file? @@ -8761,6 +9653,7 @@ else if (loadVariableNames.size() > 0) " nRows=" + nRows() + " nCols=" + nColumns() + " time=" + (System.currentTimeMillis() - time)); if (debugMode) ensureValid(); + decodeCharsAndStrings(); return; } @@ -8909,7 +9802,7 @@ else if (loadVariableNames.size() > 0) } innerTableNRows = innerTable.nRows(); } - //String2.log(" innerTable=\n" + innerTable.dataToCSVString()); + //String2.log(" innerTable=\n" + innerTable.dataToString()); //rowsWithData (but don't remove any rows) //Note that innerTable MUST exist. @@ -8975,6 +9868,7 @@ else if (loadVariableNames.size() > 0) " nRows=" + nRows() + " nCols=" + nColumns() + " time=" + (System.currentTimeMillis() - time)); if (debugMode) ensureValid(); + decodeCharsAndStrings(); return; } @@ -9015,7 +9909,7 @@ else if (loadVariableNames.size() > 0) removeAllColumns(); return; } - if (debugMode) String2.log(" Debug: interiorTable=\n" + interiorTable.dataToCSVString(5)); + if (debugMode) String2.log(" Debug: interiorTable=\n" + interiorTable.dataToString(5)); //are we done? if (nLoadOrConVariablesInFile == nLoadOrConVariablesInInteriorTable) { @@ -9041,6 +9935,7 @@ else if (loadVariableNames.size() > 0) " nRows=" + nRows() + " nCols=" + nColumns() + " time=" + (System.currentTimeMillis() - time)); if (debugMode) ensureValid(); + decodeCharsAndStrings(); return; } } @@ -9102,7 +9997,7 @@ else if (loadVariableNames.size() > 0) addColumn(0, "outerIndexCol", outerIndexColumnPA, new Attributes()); addColumn(1, "innerIndexCol", innerIndexColumnPA, new Attributes()); addColumn(2, "interiorIndexCol", interiorIndexColumnPA, new Attributes()); - //String2.log(" obs before justKeep(obsKeep):\n" + dataToCSVString()); + //String2.log(" obs before justKeep(obsKeep):\n" + dataToString()); justKeep(obsKeep); if (debugMode) { String2.log(" Debug: main table nRows before=" + preNRows + @@ -9200,6 +10095,7 @@ else if (loadVariableNames.size() > 0) //finish up tryToApplyConstraintsAndKeep(-1, conNames, conOps, conValues); //may be 0 rows left reorderColumns(loadVariableNames, true); //discard others + decodeCharsAndStrings(); } finally { //make sure ncFile is explicitly closed @@ -9229,7 +10125,7 @@ public static void testReadNcCF7SampleDims() throws Exception { Table table = new Table(); String results, expected; //From Ajay Krishnan, NCEI/NODC, from - //http://data.nodc.noaa.gov/thredds/catalog/testdata/wod_ragged/05052016/catalog.html?dataset=testdata/wod_ragged/05052016/ind199105_ctd.nc + //https://data.nodc.noaa.gov/thredds/catalog/testdata/wod_ragged/05052016/catalog.html?dataset=testdata/wod_ragged/05052016/ind199105_ctd.nc String fileName = String2.unitTestDataDir + "nccf/ncei/ind199105_ctd.nc"; Attributes gatts; @@ -9245,7 +10141,7 @@ public static void testReadNcCF7SampleDims() throws Exception { table.readNcCF(fileName, StringArray.fromCSV( "zztop,wod_unique_cast,lat,lon,time,z,z_WODflag" + scalarVars), null, null, null); - results = table.dataToCSVString(5); + results = table.dataToString(5); expected = //with netcdf-java 4.6.5 and before, the last 3 vars had 0's. //with netcdf-java 4.6.6 and after, they are the default cf missing values @@ -9253,19 +10149,19 @@ public static void testReadNcCF7SampleDims() throws Exception { //I notified source of files: Ajay Krisnan, but he never replied. //so I'm going with 4.6.6 and odd values //was -//"row,wod_unique_cast,lat,lon,time,z,z_WODflag,crs,WODf,WODfd\n" + -//"0,3390296,-43.7802,67.3953,80838.31180554628,2.9759612,0,0,0,0\n" + -//"1,3390296,-43.7802,67.3953,80838.31180554628,3.967939,0,0,0,0\n" + -//"2,3390296,-43.7802,67.3953,80838.31180554628,5.9518795,0,0,0,0\n" + -//"3,3390296,-43.7802,67.3953,80838.31180554628,7.9358006,0,0,0,0\n" + -//"4,3390296,-43.7802,67.3953,80838.31180554628,9.919703,0,0,0,0\n" + +//"wod_unique_cast,lat,lon,time,z,z_WODflag,crs,WODf,WODfd\n" + +//"3390296,-43.7802,67.3953,80838.31180554628,2.9759612,0,0,0,0\n" + +//"3390296,-43.7802,67.3953,80838.31180554628,3.967939,0,0,0,0\n" + +//"3390296,-43.7802,67.3953,80838.31180554628,5.9518795,0,0,0,0\n" + +//"3390296,-43.7802,67.3953,80838.31180554628,7.9358006,0,0,0,0\n" + +//"3390296,-43.7802,67.3953,80838.31180554628,9.919703,0,0,0,0\n" + //"...\n"; -"row,wod_unique_cast,lat,lon,time,z,z_WODflag,crs,WODf,WODfd\n" + -"0,3390296,-43.7802,67.3953,80838.31180554628,2.9759612,0,-2147483647,-32767,-32767\n" + -"1,3390296,-43.7802,67.3953,80838.31180554628,3.967939,0,-2147483647,-32767,-32767\n" + -"2,3390296,-43.7802,67.3953,80838.31180554628,5.9518795,0,-2147483647,-32767,-32767\n" + -"3,3390296,-43.7802,67.3953,80838.31180554628,7.9358006,0,-2147483647,-32767,-32767\n" + -"4,3390296,-43.7802,67.3953,80838.31180554628,9.919703,0,-2147483647,-32767,-32767\n" + +"wod_unique_cast,lat,lon,time,z,z_WODflag,crs,WODf,WODfd\n" + +"3390296,-43.7802,67.3953,80838.31180554628,2.9759612,0,-2147483647,-32767,-32767\n" + +"3390296,-43.7802,67.3953,80838.31180554628,3.967939,0,-2147483647,-32767,-32767\n" + +"3390296,-43.7802,67.3953,80838.31180554628,5.9518795,0,-2147483647,-32767,-32767\n" + +"3390296,-43.7802,67.3953,80838.31180554628,7.9358006,0,-2147483647,-32767,-32767\n" + +"3390296,-43.7802,67.3953,80838.31180554628,9.919703,0,-2147483647,-32767,-32767\n" + "...\n"; Test.ensureEqual(results, expected, "results=\n" + results); @@ -9273,22 +10169,22 @@ public static void testReadNcCF7SampleDims() throws Exception { table.readNcCF(fileName, StringArray.fromCSV( "zztop,wod_unique_cast,lat,lon,time,Temperature,Temperature_WODflag" + scalarVars), null, null, null); - results = table.dataToCSVString(5); + results = table.dataToString(5); expected = //was -//"row,wod_unique_cast,lat,lon,time,Temperature,Temperature_WODflag,crs,WODf,WODfd\n" + -//"0,3390296,-43.7802,67.3953,80838.31180554628,14.519,0,0,0,0\n" + -//"1,3390296,-43.7802,67.3953,80838.31180554628,14.526,0,0,0,0\n" + -//"2,3390296,-43.7802,67.3953,80838.31180554628,14.537,0,0,0,0\n" + -//"3,3390296,-43.7802,67.3953,80838.31180554628,14.533,0,0,0,0\n" + -//"4,3390296,-43.7802,67.3953,80838.31180554628,14.532,0,0,0,0\n" + +//"wod_unique_cast,lat,lon,time,Temperature,Temperature_WODflag,crs,WODf,WODfd\n" + +//"3390296,-43.7802,67.3953,80838.31180554628,14.519,0,0,0,0\n" + +//"3390296,-43.7802,67.3953,80838.31180554628,14.526,0,0,0,0\n" + +//"3390296,-43.7802,67.3953,80838.31180554628,14.537,0,0,0,0\n" + +//"3390296,-43.7802,67.3953,80838.31180554628,14.533,0,0,0,0\n" + +//"3390296,-43.7802,67.3953,80838.31180554628,14.532,0,0,0,0\n" + //"...\n"; -"row,wod_unique_cast,lat,lon,time,Temperature,Temperature_WODflag,crs,WODf,WODfd\n" + -"0,3390296,-43.7802,67.3953,80838.31180554628,14.519,0,-2147483647,-32767,-32767\n" + -"1,3390296,-43.7802,67.3953,80838.31180554628,14.526,0,-2147483647,-32767,-32767\n" + -"2,3390296,-43.7802,67.3953,80838.31180554628,14.537,0,-2147483647,-32767,-32767\n" + -"3,3390296,-43.7802,67.3953,80838.31180554628,14.533,0,-2147483647,-32767,-32767\n" + -"4,3390296,-43.7802,67.3953,80838.31180554628,14.532,0,-2147483647,-32767,-32767\n" + +"wod_unique_cast,lat,lon,time,Temperature,Temperature_WODflag,crs,WODf,WODfd\n" + +"3390296,-43.7802,67.3953,80838.31180554628,14.519,0,-2147483647,-32767,-32767\n" + +"3390296,-43.7802,67.3953,80838.31180554628,14.526,0,-2147483647,-32767,-32767\n" + +"3390296,-43.7802,67.3953,80838.31180554628,14.537,0,-2147483647,-32767,-32767\n" + +"3390296,-43.7802,67.3953,80838.31180554628,14.533,0,-2147483647,-32767,-32767\n" + +"3390296,-43.7802,67.3953,80838.31180554628,14.532,0,-2147483647,-32767,-32767\n" + "...\n"; Test.ensureEqual(results, expected, "results=\n" + results); @@ -9296,22 +10192,22 @@ public static void testReadNcCF7SampleDims() throws Exception { table.readNcCF(fileName, StringArray.fromCSV( "zztop,wod_unique_cast,lat,lon,time" + scalarVars), null, null, null); - results = table.dataToCSVString(5); + results = table.dataToString(5); expected = //was -//"row,wod_unique_cast,lat,lon,time,crs,WODf,WODfd\n" + -//"0,3390296,-43.7802,67.3953,80838.31180554628,0,0,0\n" + -//"1,3390301,-44.5177,67.9403,80838.68055558205,0,0,0\n" + -//"2,3390310,-45.2592,68.3755,80839.08888889104,0,0,0\n" + -//"3,3390318,-46.0113,68.7568,80839.39652776718,0,0,0\n" + -//"4,3390328,-47.0115,69.431,80839.87291669846,0,0,0\n" + +//"wod_unique_cast,lat,lon,time,crs,WODf,WODfd\n" + +//"3390296,-43.7802,67.3953,80838.31180554628,0,0,0\n" + +//"3390301,-44.5177,67.9403,80838.68055558205,0,0,0\n" + +//"3390310,-45.2592,68.3755,80839.08888889104,0,0,0\n" + +//"3390318,-46.0113,68.7568,80839.39652776718,0,0,0\n" + +//"3390328,-47.0115,69.431,80839.87291669846,0,0,0\n" + //"...\n"; -"row,wod_unique_cast,lat,lon,time,crs,WODf,WODfd\n" + -"0,3390296,-43.7802,67.3953,80838.31180554628,-2147483647,-32767,-32767\n" + -"1,3390301,-44.5177,67.9403,80838.68055558205,-2147483647,-32767,-32767\n" + -"2,3390310,-45.2592,68.3755,80839.08888889104,-2147483647,-32767,-32767\n" + -"3,3390318,-46.0113,68.7568,80839.39652776718,-2147483647,-32767,-32767\n" + -"4,3390328,-47.0115,69.431,80839.87291669846,-2147483647,-32767,-32767\n" + +"wod_unique_cast,lat,lon,time,crs,WODf,WODfd\n" + +"3390296,-43.7802,67.3953,80838.31180554628,-2147483647,-32767,-32767\n" + +"3390301,-44.5177,67.9403,80838.68055558205,-2147483647,-32767,-32767\n" + +"3390310,-45.2592,68.3755,80839.08888889104,-2147483647,-32767,-32767\n" + +"3390318,-46.0113,68.7568,80839.39652776718,-2147483647,-32767,-32767\n" + +"3390328,-47.0115,69.431,80839.87291669846,-2147483647,-32767,-32767\n" + "...\n"; Test.ensureEqual(results, expected, "results=\n" + results); @@ -9319,22 +10215,22 @@ public static void testReadNcCF7SampleDims() throws Exception { table.readNcCF(fileName, StringArray.fromCSV( "zztop,Temperature,Temperature_WODflag" + scalarVars), null, null, null); - results = table.dataToCSVString(5); + results = table.dataToString(5); expected = //was -//"row,Temperature,Temperature_WODflag,crs,WODf,WODfd\n" + -//"0,14.519,0,0,0,0\n" + -//"1,14.526,0,0,0,0\n" + -//"2,14.537,0,0,0,0\n" + -//"3,14.533,0,0,0,0\n" + -//"4,14.532,0,0,0,0\n" + +//"Temperature,Temperature_WODflag,crs,WODf,WODfd\n" + +//"14.519,0,0,0,0\n" + +//"14.526,0,0,0,0\n" + +//"14.537,0,0,0,0\n" + +//"14.533,0,0,0,0\n" + +//"14.532,0,0,0,0\n" + //"...\n"; -"row,Temperature,Temperature_WODflag,crs,WODf,WODfd\n" + -"0,14.519,0,-2147483647,-32767,-32767\n" + -"1,14.526,0,-2147483647,-32767,-32767\n" + -"2,14.537,0,-2147483647,-32767,-32767\n" + -"3,14.533,0,-2147483647,-32767,-32767\n" + -"4,14.532,0,-2147483647,-32767,-32767\n" + +"Temperature,Temperature_WODflag,crs,WODf,WODfd\n" + +"14.519,0,-2147483647,-32767,-32767\n" + +"14.526,0,-2147483647,-32767,-32767\n" + +"14.537,0,-2147483647,-32767,-32767\n" + +"14.533,0,-2147483647,-32767,-32767\n" + +"14.532,0,-2147483647,-32767,-32767\n" + "...\n"; Test.ensureEqual(results, expected, "results=\n" + results); @@ -9342,22 +10238,22 @@ public static void testReadNcCF7SampleDims() throws Exception { table.readNcCF(fileName, StringArray.fromCSV( "zztop,WODf,crs"), null, null, null); - results = table.dataToCSVString(5); + results = table.dataToString(5); expected = //was -//"row,WODf,crs\n" + -//"0,0,0\n"; -"row,WODf,crs\n" + -"0,-32767,-2147483647\n"; +//"WODf,crs\n" + +//"0,0\n"; +"WODf,crs\n" + +"-32767,-2147483647\n"; Test.ensureEqual(results, expected, "results=\n" + results); //test reading WHOLE file (should just catch z_obs dimension) table.readNcCF(fileName, null, null, null, null); - results = table.dataToCSVString(3); + results = table.dataToString(3); expected = //note it catches z_obs dimension (z, z_WODflag, z_sigfig), not others. // Temperature_row_size, and Temperature_WODprofileflag are [casts], and don't use Temperature_obs. -"row,country,WOD_cruise_identifier,originators_cruise_identifier,wod_unique_cast," + +"country,WOD_cruise_identifier,originators_cruise_identifier,wod_unique_cast," + "lat,lon,time,date,GMT_time,Access_no,Project,Platform,Institute,Cast_Tow_number," + "Orig_Stat_Num,Bottom_Depth,Cast_Duration,Cast_Direction,High_res_pair,dataset," + "dbase_orig,origflagset,z,z_WODflag,z_sigfig,Temperature_row_size," + @@ -9367,7 +10263,7 @@ public static void testReadNcCF7SampleDims() throws Exception { "Pressure_row_size,Chlorophyll_row_size,Chlorophyll_WODprofileflag," + "Chlorophyll_Instrument,Chlorophyll_uncalibrated,Conductivit_row_size,crs,WODf,WODfp,WODfd\n" + -"0,FRANCE,FR008787,35MF68SUZIL,3390296,-43.7802,67.3953,80838.31180554628," + +"FRANCE,FR008787,35MF68SUZIL,3390296,-43.7802,67.3953,80838.31180554628," + "19910501,7.483333,841,WORLD OCEAN CIRCULATION EXPERIMENT (WOCE)," + "MARION DUFRESNE (C.s.FNGB;built 1972;decomm-d 1995;renamed Fres;IMO7208388)," + "NATIONAL MUSEUM OF NATURAL HISTORY (PARIS),1,37.0,4438.0,9.96921E36,,7498735," + @@ -9376,7 +10272,7 @@ public static void testReadNcCF7SampleDims() throws Exception { //was "-2147483647,0,0,0,0,0\n" + "-2147483647,0,-2147483647,-32767,-32767,-32767\n" + -"1,FRANCE,FR008787,35MF68SUZIL,3390296,-43.7802,67.3953,80838.31180554628," + +"FRANCE,FR008787,35MF68SUZIL,3390296,-43.7802,67.3953,80838.31180554628," + "19910501,7.483333,841,WORLD OCEAN CIRCULATION EXPERIMENT (WOCE)," + "MARION DUFRESNE (C.s.FNGB;built 1972;decomm-d 1995;renamed Fres;IMO7208388)," + "NATIONAL MUSEUM OF NATURAL HISTORY (PARIS),1,37.0,4438.0,9.96921E36,,7498735," + @@ -9385,7 +10281,7 @@ public static void testReadNcCF7SampleDims() throws Exception { //was "-2147483647,0,0,0,0,0\n" + "-2147483647,0,-2147483647,-32767,-32767,-32767\n" + -"2,FRANCE,FR008787,35MF68SUZIL,3390296,-43.7802,67.3953,80838.31180554628," + +"FRANCE,FR008787,35MF68SUZIL,3390296,-43.7802,67.3953,80838.31180554628," + "19910501,7.483333,841,WORLD OCEAN CIRCULATION EXPERIMENT (WOCE)," + "MARION DUFRESNE (C.s.FNGB;built 1972;decomm-d 1995;renamed Fres;IMO7208388)," + "NATIONAL MUSEUM OF NATURAL HISTORY (PARIS),1,37.0,4438.0,9.96921E36,,7498735," + @@ -9424,16 +10320,16 @@ public static void testReadNcCF7SampleDims() throws Exception { "zztop,z_row_size,Temperature_row_size,Salinity_row_size,Oxygen_row_size," + "Pressure_row_size,Chlorophyll_row_size,Conductivity_row_size"), null, null, null); - results = table.dataToCSVString(7); + results = table.dataToString(7); expected = //verified with dumpString above -"row,Temperature_row_size,Salinity_row_size,Oxygen_row_size,Pressure_row_size,Chlorophyll_row_size\n" + -"0,2204,2204,2204,2204,0\n" + -"1,1844,1844,1844,1844,0\n" + -"2,1684,1684,1684,1684,0\n" + -"3,1587,1587,1587,1587,0\n" + -"4,357,357,357,357,0\n" + -"5,34,34,0,34,34\n" + -"6,34,34,0,34,34\n" + +"Temperature_row_size,Salinity_row_size,Oxygen_row_size,Pressure_row_size,Chlorophyll_row_size\n" + +"2204,2204,2204,2204,0\n" + +"1844,1844,1844,1844,0\n" + +"1684,1684,1684,1684,0\n" + +"1587,1587,1587,1587,0\n" + +"357,357,357,357,0\n" + +"34,34,0,34,34\n" + +"34,34,0,34,34\n" + "...\n"; Test.ensureEqual(results, expected, "results=\n" + results); @@ -9445,7 +10341,7 @@ public static void testReadNcCF7SampleDims() throws Exception { table.readNcCF(fileName, StringArray.fromCSV( "zztop,wod_unique_cast,lat,lon,time,z,z_WODflag,Temperature,Temperature_WODflag" + scalarVars), null, null, null); - results = table.dataToCSVString(5); + results = table.dataToString(5); } catch (Throwable t2) { results = t2.toString(); } @@ -9476,14 +10372,14 @@ public static void testReadNcCFPoint(boolean pauseAfterEach) throws Exception { table.readNcCF(fileName, null, null, null, null); //String2.log(table.toCSVString()); - results = table.dataToCSVString(5); + results = table.dataToString(5); expected = -"row,obs,lat,lon,alt,time,temperature,humidity\n" + -"0,0,41.0,112.0,7.745540487338979,573,26.225288,11.245576\n" + -"1,1,179.0,68.0,3.0855444414144264,2248,12.695349,67.73824\n" + -"2,2,10.0,11.0,3.254759157455159,71,21.193731,48.589462\n" + -"3,3,106.0,22.0,4.549437636401848,1714,35.339344,39.594116\n" + -"4,4,75.0,16.0,6.061720687265453,1209,22.593496,28.170149\n" + +"obs,lat,lon,alt,time,temperature,humidity\n" + +"0,41.0,112.0,7.745540487338979,573,26.225288,11.245576\n" + +"1,179.0,68.0,3.0855444414144264,2248,12.695349,67.73824\n" + +"2,10.0,11.0,3.254759157455159,71,21.193731,48.589462\n" + +"3,106.0,22.0,4.549437636401848,1714,35.339344,39.594116\n" + +"4,75.0,16.0,6.061720687265453,1209,22.593496,28.170149\n" + "...\n"; Test.ensureEqual(results, expected, "results=\n" + results); if (pauseAfterEach) @@ -9495,7 +10391,7 @@ public static void testReadNcCFPoint(boolean pauseAfterEach) throws Exception { StringArray.fromCSV(""), StringArray.fromCSV(""), StringArray.fromCSV("")); - results = table.dataToCSVString(5); + results = table.dataToString(5); //expected is same Test.ensureEqual(results, expected, "results=\n" + results); if (pauseAfterEach) @@ -9522,7 +10418,7 @@ public static void testReadNcCFPoint(boolean pauseAfterEach) throws Exception { StringArray.fromCSV("obs"), StringArray.fromCSV("="), StringArray.fromCSV("2")); - results = table.dataToCSVString(); + results = table.dataToString(); expected = "obs,lat,time,temperature\n" + "2,10.0,71,21.193731\n"; @@ -9535,7 +10431,7 @@ public static void testReadNcCFPoint(boolean pauseAfterEach) throws Exception { StringArray.fromCSV("time"), StringArray.fromCSV("="), StringArray.fromCSV("71")); - results = table.dataToCSVString(); + results = table.dataToString(); expected = "obs,lat,time,temperature\n" + "2,10.0,71,21.193731\n"; @@ -9548,7 +10444,7 @@ public static void testReadNcCFPoint(boolean pauseAfterEach) throws Exception { StringArray.fromCSV("temperature"), StringArray.fromCSV("="), StringArray.fromCSV("21.193731")); - results = table.dataToCSVString(); + results = table.dataToString(); expected = "obs,lat,time,temperature\n" + "2,10.0,71,21.193731\n"; @@ -9563,7 +10459,7 @@ public static void testReadNcCFPoint(boolean pauseAfterEach) throws Exception { StringArray.fromCSV("obs"), StringArray.fromCSV("="), StringArray.fromCSV("2")); - results = table.dataToCSVString(); + results = table.dataToString(); expected = "obs\n" + "2\n"; @@ -9582,7 +10478,7 @@ public static void testReadNcCFPoint(boolean pauseAfterEach) throws Exception { StringArray.fromCSV("temperature"), StringArray.fromCSV("="), StringArray.fromCSV("21.193731")); - results = table.dataToCSVString(); + results = table.dataToString(); expected = "temperature\n" + "21.193731\n"; @@ -9654,17 +10550,17 @@ public static void testReadNcCF1(boolean pauseAfterEach) throws Exception { String2.log("\n\n** Test nLevels=1/contiguousRagged no loadVars, no constraints"); table.readNcCF(profileFileName, null, null, null, null); //String2.log(table.toCSVString()); - results = table.dataToCSVString(5); + results = table.dataToString(5); expected = -"row,id,longitude,latitude,time,altitude,chlorophyll,chlorophyll_qc,oxygen,oxygen_qc,pressure,pressure_qc,salinity,salinity_qc,temperature,temperature_qc\n" + -"0,465958,163.08,39.0,1.107754559E9,-2.0,,,,,,,,,10.1,0.0\n" + -"1,465958,163.08,39.0,1.107754559E9,-58.0,,,,,,,,,9.9,0.0\n" + -"2,465958,163.08,39.0,1.107754559E9,-96.0,,,,,,,,,9.2,0.0\n" + -"3,465958,163.08,39.0,1.107754559E9,-138.0,,,,,,,,,8.8,0.0\n" + -"4,465958,163.08,39.0,1.107754559E9,-158.0,,,,,,,,,8.1,0.0\n" + +"id,longitude,latitude,time,altitude,chlorophyll,chlorophyll_qc,oxygen,oxygen_qc,pressure,pressure_qc,salinity,salinity_qc,temperature,temperature_qc\n" + +"465958,163.08,39.0,1.107754559E9,-2.0,,,,,,,,,10.1,0.0\n" + +"465958,163.08,39.0,1.107754559E9,-58.0,,,,,,,,,9.9,0.0\n" + +"465958,163.08,39.0,1.107754559E9,-96.0,,,,,,,,,9.2,0.0\n" + +"465958,163.08,39.0,1.107754559E9,-138.0,,,,,,,,,8.8,0.0\n" + +"465958,163.08,39.0,1.107754559E9,-158.0,,,,,,,,,8.1,0.0\n" + "...\n"; Test.ensureEqual(results, expected, ""); - Test.ensureEqual(table.nRows(), 118, table.toCSVString()); + Test.ensureEqual(table.nRows(), 118, table.toString()); results = table.columnAttributes(0).toString(); expected = " actual_range=465958, 848984\n" + @@ -9747,7 +10643,7 @@ public static void testReadNcCF1(boolean pauseAfterEach) throws Exception { "just outerTable loadVars, no constraints"); table.readNcCF(profileFileName, StringArray.fromCSV( "longitude,latitude,time,zztop,id"), null, null, null); - results = table.dataToCSVString(); + results = table.dataToString(); expected = "longitude,latitude,time,id\n" + "163.08,39.0,1.107754559E9,465958\n" + @@ -9799,7 +10695,7 @@ public static void testReadNcCF1(boolean pauseAfterEach) throws Exception { StringArray.fromCSV("id"), StringArray.fromCSV("="), StringArray.fromCSV("848984")); - results = table.dataToCSVString(); + results = table.dataToString(); expected = "longitude,latitude,time,id\n" + "214.66,54.8,1.107759959E9,848984\n"; @@ -9833,7 +10729,7 @@ public static void testReadNcCF1(boolean pauseAfterEach) throws Exception { StringArray.fromCSV("id,temperature"), StringArray.fromCSV("=,>="), StringArray.fromCSV("848984,5")); - results = table.dataToCSVString(); + results = table.dataToString(); expected = "longitude,latitude,time,altitude,temperature,temperature_qc,id\n" + "214.66,54.8,1.107759959E9,-2.0,5.8,0.0,848984\n" + @@ -9854,7 +10750,7 @@ public static void testReadNcCF1(boolean pauseAfterEach) throws Exception { String2.log("\n\n** Test nLevels=1/contiguousRagged just obs loadVars, no constraints"); table.readNcCF(profileFileName, StringArray.fromCSV( "salinity,temperature,zztop"), null, null, null); - results = table.dataToCSVString(); + results = table.dataToString(); expected = "salinity,temperature\n" + ",10.1\n" + @@ -9867,7 +10763,7 @@ public static void testReadNcCF1(boolean pauseAfterEach) throws Exception { ",6.7\n" + ",6.0\n"; Test.ensureEqual(results.substring(0, expected.length()), expected, "results=\n" + results); - Test.ensureEqual(table.nRows(), 118, table.dataToCSVString()); + Test.ensureEqual(table.nRows(), 118, table.dataToString()); gatts = table.globalAttributes(); Test.ensureEqual(gatts.getString("cdm_data_type"), "Profile", gatts.toString()); Test.ensureEqual(gatts.getString("cdm_profile_variables"), @@ -9893,7 +10789,7 @@ public static void testReadNcCF1(boolean pauseAfterEach) throws Exception { StringArray.fromCSV("temperature"), StringArray.fromCSV(">"), StringArray.fromCSV("24.5")); - results = table.dataToCSVString(); + results = table.dataToString(); expected = "temperature\n" + "24.8\n" + @@ -9976,18 +10872,18 @@ public static void testReadNcCF1(boolean pauseAfterEach) throws Exception { String2.log("\n\n** Test nLevels=1/" + fileType + " no loadVars, no constraints"); table.readNcCF(fileName, null, null, null, null); -String2.log(table.toCSVString()); - results = table.dataToCSVString(5); +String2.log(table.toString()); + results = table.dataToString(5); expected = -"row,line_station,longitude,latitude,altitude,time,obsScientific,obsValue,obsUnits\n" + -"0,076.7_100,-124.32333,33.388332,-214.1,1.10064E9,Argyropelecus sladeni,2,number of larvae\n" + -"1,076.7_100,-124.32333,33.388332,-214.1,1.10064E9,Chauliodus macouni,3,number of larvae\n" + -"2,076.7_100,-124.32333,33.388332,-214.1,1.10064E9,Danaphos oculatus,4,number of larvae\n" + -"3,076.7_100,-124.32333,33.388332,-214.1,1.10064E9,Diogenichthys atlanticus,3,number of larvae\n" + -"4,076.7_100,-124.32333,33.388332,-214.1,1.10064E9,Idiacanthus antrostomus,3,number of larvae\n" + +"line_station,longitude,latitude,altitude,time,obsScientific,obsValue,obsUnits\n" + +"076.7_100,-124.32333,33.388332,-214.1,1.10064E9,Argyropelecus sladeni,2,number of larvae\n" + +"076.7_100,-124.32333,33.388332,-214.1,1.10064E9,Chauliodus macouni,3,number of larvae\n" + +"076.7_100,-124.32333,33.388332,-214.1,1.10064E9,Danaphos oculatus,4,number of larvae\n" + +"076.7_100,-124.32333,33.388332,-214.1,1.10064E9,Diogenichthys atlanticus,3,number of larvae\n" + +"076.7_100,-124.32333,33.388332,-214.1,1.10064E9,Idiacanthus antrostomus,3,number of larvae\n" + "...\n"; Test.ensureEqual(results, expected, ""); - Test.ensureEqual(table.nRows(), 23, table.toCSVString()); + Test.ensureEqual(table.nRows(), 23, table.toString()); results = table.columnAttributes(0).toString(); expected = " cf_role=\"timeseries_id\"\n" + @@ -10041,7 +10937,7 @@ public static void testReadNcCF1(boolean pauseAfterEach) throws Exception { "just outerTable loadVars, no constraints"); table.readNcCF(fileName, StringArray.fromCSV( "line_station,zztop"), null, null, null); - results = table.dataToCSVString(); + results = table.dataToString(); expected = "line_station\n" + "076.7_100\n" + @@ -10074,7 +10970,7 @@ public static void testReadNcCF1(boolean pauseAfterEach) throws Exception { StringArray.fromCSV("line_station"), StringArray.fromCSV("="), StringArray.fromCSV("083.3_100")); - results = table.dataToCSVString(); + results = table.dataToString(); expected = "line_station\n" + "083.3_100\n"; @@ -10108,7 +11004,7 @@ public static void testReadNcCF1(boolean pauseAfterEach) throws Exception { StringArray.fromCSV("line_station,obsValue"), StringArray.fromCSV("=,="), StringArray.fromCSV("083.3_100,1")); - results = table.dataToCSVString(); + results = table.dataToString(); expected = "longitude,latitude,altitude,time,line_station,obsScientific,obsValue,obsUnits\n" + "-123.49333,32.245,-211.5,1.10027676E9,083.3_100,Argyropelecus sladeni,1,number of larvae\n" + @@ -10142,7 +11038,7 @@ public static void testReadNcCF1(boolean pauseAfterEach) throws Exception { String2.log("\n\n** Test nLevels=1/" + fileType + " just obs loadVars, no constraints"); table.readNcCF(fileName, StringArray.fromCSV( "obsScientific,obsValue,obsUnits,zztop"), null, null, null); - results = table.dataToCSVString(); + results = table.dataToString(); expected = "obsScientific,obsValue,obsUnits\n" + "Argyropelecus sladeni,2,number of larvae\n" + @@ -10194,7 +11090,7 @@ public static void testReadNcCF1(boolean pauseAfterEach) throws Exception { StringArray.fromCSV("obsValue"), StringArray.fromCSV("="), StringArray.fromCSV("4")); - results = table.dataToCSVString(); + results = table.dataToString(); expected = "obsScientific,obsValue,obsUnits\n" + "Danaphos oculatus,4,number of larvae\n" + @@ -10270,7 +11166,7 @@ public static void testReadNcCF1Kevin() throws Exception { String2.log(NcHelper.dumpString(fileName, false)); table.readNcCF(fileName, null, null, null, null); - String2.log(table.toCSVString()); + String2.log(table.toString()); debugMode = oDebug; } @@ -10328,7 +11224,7 @@ public static void testReadGocdNcCF() throws Exception { // :instrument_type = ""; table.readNcCF(fileName, null, null, null, null); //String2.log(table.toCSVString()); - results = table.dataToCSVString(5); + results = table.dataToString(5); expected = "zztop\n"; Test.ensureEqual(results, expected, "results=\n" + results); @@ -10658,22 +11554,22 @@ public static void testReadGocdNcCF() throws Exception { Test.ensureEqual(results, expected, "results=\n" + results); //String2.log(table.toCSVString()); - results = table.dataToCSVString(5); + results = table.dataToString(5); expected = -"row,sampling_interval,seafloor_depth,latitude,longitude,latitude_quality_flag,longitude_quality_flag," + +"sampling_interval,seafloor_depth,latitude,longitude,latitude_quality_flag,longitude_quality_flag," + "depth,depth_quality_flag,time,time_quality_flag,u,u_quality_flag,v,v_quality_flag," + "current_speed,current_speed_quality_flag,current_direction," + "current_direction_quality_flag,crs\n" + // sapmInt,sfDpth lat lon depth time u v cspeed cdir crs -"0,9999.9,9999.9,21.0958,-158.3554,1,1,30.0,1,38730.9986,1,-0.004,1,0.174,1,0.174,1,358.7,1,0\n" + -"1,9999.9,9999.9,21.0958,-158.3554,1,1,40.0,1,38730.9986,1,-0.008,1,0.169,1,0.1692,1,357.3,1,0\n" + -"2,9999.9,9999.9,21.0958,-158.3554,1,1,50.0,1,38730.9986,1,-0.01,1,0.165,1,0.1653,1,356.5,1,0\n" + -"3,9999.9,9999.9,21.0958,-158.3554,1,1,60.0,1,38730.9986,1,-0.009,1,0.163,1,0.1632,1,356.8,1,0\n" + -"4,9999.9,9999.9,21.0958,-158.3554,1,1,70.0,1,38730.9986,1,-0.012,1,0.173,1,0.1734,1,356.0,1,0\n" + +"9999.9,9999.9,21.0958,-158.3554,1,1,30.0,1,38730.9986,1,-0.004,1,0.174,1,0.174,1,358.7,1,0\n" + +"9999.9,9999.9,21.0958,-158.3554,1,1,40.0,1,38730.9986,1,-0.008,1,0.169,1,0.1692,1,357.3,1,0\n" + +"9999.9,9999.9,21.0958,-158.3554,1,1,50.0,1,38730.9986,1,-0.01,1,0.165,1,0.1653,1,356.5,1,0\n" + +"9999.9,9999.9,21.0958,-158.3554,1,1,60.0,1,38730.9986,1,-0.009,1,0.163,1,0.1632,1,356.8,1,0\n" + +"9999.9,9999.9,21.0958,-158.3554,1,1,70.0,1,38730.9986,1,-0.012,1,0.173,1,0.1734,1,356.0,1,0\n" + "...\n"; Test.ensureEqual(results, expected, "results=\n" + results); - results = table.dataToCSVString(); //no row numbers + results = table.dataToString(); //no row numbers expected = //why are cspeed and cdir known, but u,v not? //sampInt sfDpth lat lon depth time u v cspeed cdir crs "9999.9,9999.9,-14.2758,-170.6805,1,1,680.0,1,38751.8319,1,9999.9,-9,9999.9,-9,141.42,1,45.0,1,0\n" + @@ -10753,22 +11649,22 @@ public static void testReadGocdNcCF() throws Exception { Test.ensureEqual(results, expected, "results=\n" + results); //String2.log(table.toCSVString()); - results = table.dataToCSVString(5); + results = table.dataToString(5); expected = -"row,sampling_interval,seafloor_depth,latitude,longitude," + +"sampling_interval,seafloor_depth,latitude,longitude," + "latitude_quality_flag,longitude_quality_flag,depth,depth_quality_flag,time,time_quality_flag,u,u_quality_flag,v,v_quality_flag," + "current_speed,current_speed_quality_flag,current_direction," + "current_direction_quality_flag,crs\n" + // si sDepth lat lon q q depth q time q u q v q cspeed q cDir q crs -"0,60.0,32.6386,42.37859,-70.78094,1,1,26.34,1,38621.7448,1,0.0066,1,0.0072,1,0.0111,1,42.8,1,0\n" + -"1,60.0,32.6386,42.37859,-70.78094,1,1,24.34,1,38621.7448,1,0.0279,1,-0.008,1,0.0292,1,106.0,1,0\n" + -"2,60.0,32.6386,42.37859,-70.78094,1,1,22.34,1,38621.7448,1,0.0325,1,2.0E-4,1,0.033,1,89.7,1,0\n" + -"3,60.0,32.6386,42.37859,-70.78094,1,1,20.34,1,38621.7448,1,-0.0094,1,0.0011,1,0.0121,1,277.0,1,0\n" + -"4,60.0,32.6386,42.37859,-70.78094,1,1,18.34,1,38621.7448,1,-0.0383,1,-0.0367,1,0.0532,1,226.2,1,0\n" + +"60.0,32.6386,42.37859,-70.78094,1,1,26.34,1,38621.7448,1,0.0066,1,0.0072,1,0.0111,1,42.8,1,0\n" + +"60.0,32.6386,42.37859,-70.78094,1,1,24.34,1,38621.7448,1,0.0279,1,-0.008,1,0.0292,1,106.0,1,0\n" + +"60.0,32.6386,42.37859,-70.78094,1,1,22.34,1,38621.7448,1,0.0325,1,2.0E-4,1,0.033,1,89.7,1,0\n" + +"60.0,32.6386,42.37859,-70.78094,1,1,20.34,1,38621.7448,1,-0.0094,1,0.0011,1,0.0121,1,277.0,1,0\n" + +"60.0,32.6386,42.37859,-70.78094,1,1,18.34,1,38621.7448,1,-0.0383,1,-0.0367,1,0.0532,1,226.2,1,0\n" + "...\n"; Test.ensureEqual(results, expected, "results=\n" + results); - results = table.dataToCSVString(); //no row numbers + results = table.dataToString(); //no row numbers //String2.log(results); expected = //why are cspeed and cdir known, but u,v not? //si sDepth lat lon q q depth q time q u q v q cspeed q cDir q crs @@ -10792,19 +11688,19 @@ public static void testReadGocdNcCF() throws Exception { results = NcHelper.dumpString(fileName, false); //"u"); //false); String2.log(results); table.readNcCF(fileName, null, null, null, null); - String2.log(table.dataToCSVString()); - results = table.dataToCSVString(5); + String2.log(table.dataToString()); + results = table.dataToString(5); expected = -"row,sampling_interval,seafloor_depth,latitude,longitude,latitude_quality_flag," + +"sampling_interval,seafloor_depth,latitude,longitude,latitude_quality_flag," + "longitude_quality_flag,depth,depth_quality_flag,time,time_quality_flag," + "u,u_quality_flag,v,v_quality_flag,current_speed,current_speed_quality_flag," + "current_direction,current_direction_quality_flag,crs\n" + //si sd lat lon q q depth q time q u q v q cSpeed p dir q crs -"0,60.0,2640.0,62.894997,-35.857998,1,1,1980.0,1,31662.424999999814,1,0.015,1,0.0865,1,0.0878,1,9.8,1,0\n" + -"1,60.0,2640.0,62.894997,-35.857998,1,1,1980.0,1,31662.46666666679,1,-0.0045,1,0.0933,1,0.0934,1,357.2,1,0\n" + -"2,60.0,2640.0,62.894997,-35.857998,1,1,1980.0,1,31662.508333333302,1,-0.0072,1,0.0856,1,0.0859,1,355.2,1,0\n" + -"3,60.0,2640.0,62.894997,-35.857998,1,1,1980.0,1,31662.549999999814,1,-0.0065999995,1,0.1025,1,0.1027,1,356.3,1,0\n" + -"4,60.0,2640.0,62.894997,-35.857998,1,1,1980.0,1,31662.59166666679,1,-0.0036,1,0.11200001,1,0.1121,1,358.2,1,0\n" + +"60.0,2640.0,62.894997,-35.857998,1,1,1980.0,1,31662.424999999814,1,0.015,1,0.0865,1,0.0878,1,9.8,1,0\n" + +"60.0,2640.0,62.894997,-35.857998,1,1,1980.0,1,31662.46666666679,1,-0.0045,1,0.0933,1,0.0934,1,357.2,1,0\n" + +"60.0,2640.0,62.894997,-35.857998,1,1,1980.0,1,31662.508333333302,1,-0.0072,1,0.0856,1,0.0859,1,355.2,1,0\n" + +"60.0,2640.0,62.894997,-35.857998,1,1,1980.0,1,31662.549999999814,1,-0.0065999995,1,0.1025,1,0.1027,1,356.3,1,0\n" + +"60.0,2640.0,62.894997,-35.857998,1,1,1980.0,1,31662.59166666679,1,-0.0036,1,0.11200001,1,0.1121,1,358.2,1,0\n" + "...\n"; Test.ensureEqual(results, expected, "results=\n" + results); /* */ @@ -10818,8 +11714,8 @@ public static void testReadGocdNcCF() throws Exception { results = NcHelper.dumpString(fileName, false); //"u"); //false); String2.log(results); table.readNcCF(fileName, null, null, null, null); - String2.log(table.dataToCSVString()); - results = table.dataToCSVString(5); + String2.log(table.dataToString()); + results = table.dataToString(5); expected = "zz\n"; Test.ensureEqual(results, expected, "results=\n" + results); @@ -10857,18 +11753,18 @@ public static void testReadNcCF2(boolean pauseAfterEach) throws Exception { /* */ String2.log("\n\n** Test nLevels=2/" + fileType + " no loadVars, no constraints"); table.readNcCF(fileName, null, null, null, null); -String2.log(table.toCSVString()); - results = table.dataToCSVString(5); +String2.log(table.toString()); + results = table.dataToString(5); expected = -"row,platform,cruise,org,type,station_id,longitude,latitude,time,depth,temperature,salinity\n" + -"0,33P2,Q990046312,ME,TE,13968849,176.64,-75.45,1.3351446E9,4.0,-1.84,35.64\n" + -"1,33P2,Q990046312,ME,TE,13968849,176.64,-75.45,1.3351446E9,10.0,-1.84,35.64\n" + -"2,33P2,Q990046312,ME,TE,13968849,176.64,-75.45,1.3351446E9,20.0,-1.83,35.64\n" + -"3,33P2,Q990046312,ME,TE,13968849,176.64,-75.45,1.3351446E9,30.0,-1.83,35.64\n" + -"4,33P2,Q990046312,ME,TE,13968849,176.64,-75.45,1.3351446E9,49.0,-1.83,35.64\n" + +"platform,cruise,org,type,station_id,longitude,latitude,time,depth,temperature,salinity\n" + +"33P2,Q990046312,ME,TE,13968849,176.64,-75.45,1.3351446E9,4.0,-1.84,35.64\n" + +"33P2,Q990046312,ME,TE,13968849,176.64,-75.45,1.3351446E9,10.0,-1.84,35.64\n" + +"33P2,Q990046312,ME,TE,13968849,176.64,-75.45,1.3351446E9,20.0,-1.83,35.64\n" + +"33P2,Q990046312,ME,TE,13968849,176.64,-75.45,1.3351446E9,30.0,-1.83,35.64\n" + +"33P2,Q990046312,ME,TE,13968849,176.64,-75.45,1.3351446E9,49.0,-1.83,35.64\n" + "...\n"; Test.ensureEqual(results, expected, ""); - Test.ensureEqual(table.nRows(), 53, table.toCSVString()); + Test.ensureEqual(table.nRows(), 53, table.toString()); results = table.columnAttributes(0).toString(); expected = " comment=\"See the list of platform codes (sorted in various ways) at http://www.nodc.noaa.gov/GTSPP/document/codetbls/calllist.html\"\n" + @@ -11072,7 +11968,7 @@ public static void testReadNcCF2(boolean pauseAfterEach) throws Exception { StringArray.fromCSV("platform"), StringArray.fromCSV("="), StringArray.fromCSV("33P2")); - results = table.dataToCSVString(); + results = table.dataToString(); expected = "platform,cruise\n" + "33P2,Q990046312\n"; @@ -11106,7 +12002,7 @@ public static void testReadNcCF2(boolean pauseAfterEach) throws Exception { "just innerTable loadVars, no constraints"); table.readNcCF(fileName, StringArray.fromCSV( "station_id,zztop,type"), null, null, null); - results = table.dataToCSVString(); + results = table.dataToString(); expected = "station_id,type\n" + "13968849,TE\n" + @@ -11133,7 +12029,7 @@ public static void testReadNcCF2(boolean pauseAfterEach) throws Exception { StringArray.fromCSV("station_id"), StringArray.fromCSV("="), StringArray.fromCSV("13933177")); - results = table.dataToCSVString(); + results = table.dataToString(); expected = "station_id,type\n" + "13933177,BA\n"; @@ -11166,9 +12062,9 @@ public static void testReadNcCF2(boolean pauseAfterEach) throws Exception { String2.log("\n\n** Test nLevels=2/" + fileType + " " + "just outerTable and innerTable loadVars, no constraints"); table.readNcCF(fileName, StringArray.fromCSV( - "row,cruise,org,type,station_id,longitude,latitude,time,zztop,platform"), + "cruise,org,type,station_id,longitude,latitude,time,zztop,platform"), null, null, null); - results = table.dataToCSVString(); + results = table.dataToString(); expected = "cruise,org,type,station_id,longitude,latitude,time,platform\n" + "Q990046312,ME,TE,13968849,176.64,-75.45,1.3351446E9,33P2\n" + @@ -11194,7 +12090,7 @@ public static void testReadNcCF2(boolean pauseAfterEach) throws Exception { StringArray.fromCSV("platform,station_id"), StringArray.fromCSV("=,="), StringArray.fromCSV("33P2,13968850")); - results = table.dataToCSVString(); + results = table.dataToString(); expected = "cruise,org,type,station_id,longitude,latitude,time,platform\n" + "Q990046312,ME,TE,13968850,176.64,-75.43,1.335216E9,33P2\n"; @@ -11231,7 +12127,7 @@ public static void testReadNcCF2(boolean pauseAfterEach) throws Exception { StringArray.fromCSV("platform,salinity"), StringArray.fromCSV("=,>="), StringArray.fromCSV("33P2,35.98")); - results = table.dataToCSVString(); + results = table.dataToString(); expected = "salinity,platform,cruise\n" + "35.99,33P2,Q990046312\n" + @@ -11267,7 +12163,7 @@ public static void testReadNcCF2(boolean pauseAfterEach) throws Exception { "just innerTable and obs loadVars, no constraints"); table.readNcCF(fileName, StringArray.fromCSV( "latitude,longitude,time,zztop,salinity"), null, null, null); - results = table.dataToCSVString(); + results = table.dataToString(); expected = "latitude,longitude,time,salinity\n" + "-75.45,176.64,1.3351446E9,35.64\n" + @@ -11348,7 +12244,7 @@ public static void testReadNcCF2(boolean pauseAfterEach) throws Exception { StringArray.fromCSV("time,salinity"), StringArray.fromCSV("=,="), StringArray.fromCSV("1.335216E9,35.77")); - results = table.dataToCSVString(); + results = table.dataToString(); expected = "latitude,longitude,time,salinity\n" + "-75.43,176.64,1.335216E9,35.77\n"; @@ -11385,7 +12281,7 @@ public static void testReadNcCF2(boolean pauseAfterEach) throws Exception { StringArray.fromCSV("salinity"), StringArray.fromCSV("="), StringArray.fromCSV("35.77")); - results = table.dataToCSVString(); + results = table.dataToString(); expected = "temperature,salinity\n" + "-1.1,35.77\n" + @@ -11445,35 +12341,35 @@ public static void testReadNcCFASAProfile(boolean pauseAfterEach) throws Excepti " " + fileName); String2.log(NcHelper.dumpString(fileName, true)); table.readNcCF(fileName, null, null, null, null); - results = table.dataToCSVString(18); + results = table.dataToString(18); expected = /* pre 2012-10-02 was -"row,lat,lon,profile,time,z,temperature,humidity\n" + -"0,137.0,30.0,0,0,8.055641,32.962334,14.262256\n" + -"1,137.0,30.0,0,0,8.9350815,25.433783,41.722572\n" + -"2,137.0,30.0,0,0,5.1047354,4.0391192,44.34395\n" + -"3,137.0,30.0,0,0,4.2890472,6.0850625,13.220096\n" + -"4,137.0,30.0,0,0,1.9311341,32.794086,32.313293\n"; */ +"lat,lon,profile,time,z,temperature,humidity\n" + +"137.0,30.0,0,0,8.055641,32.962334,14.262256\n" + +"137.0,30.0,0,0,8.9350815,25.433783,41.722572\n" + +"137.0,30.0,0,0,5.1047354,4.0391192,44.34395\n" + +"137.0,30.0,0,0,4.2890472,6.0850625,13.220096\n" + +"137.0,30.0,0,0,1.9311341,32.794086,32.313293\n"; */ //z seems to be made up numbers. not in order (for a given profile) as one would expect. -"row,lat,lon,profile,time,z,temperature,humidity\n" + -"0,34.0,115.0,0,0,9.913809,30.592709,32.71529\n" + -"1,34.0,115.0,0,0,5.699307,17.442251,76.26051\n" + -"2,34.0,115.0,0,0,0.617254,14.230382,13.789284\n" + -"3,34.0,115.0,0,0,2.6114788,38.859676,21.792738\n" + -"4,34.0,115.0,0,0,6.519849,28.003593,33.264217\n" + -"5,34.0,115.0,0,0,8.975919,10.699942,61.52172\n" + -"6,34.0,115.0,0,0,9.912431,32.747574,85.96188\n" + -"7,34.0,115.0,0,0,7.5545244,18.109398,41.733406\n" + -"8,34.0,115.0,0,0,7.568512,10.165248,84.50128\n" + -"9,34.0,115.0,0,0,3.376015,0.48572874,5.2108083\n" + -"10,11.0,95.0,1,3600,0.16332848,1.193263,87.431725\n" + -"11,11.0,95.0,1,3600,4.9485574,31.53037,65.04175\n" + -"12,11.0,95.0,1,3600,6.424919,11.956788,54.758873\n" + -"13,11.0,95.0,1,3600,4.7111635,36.69692,50.6536\n" + -"14,11.0,95.0,1,3600,6.854408,21.065716,83.941765\n" + -"15,11.0,95.0,1,3600,9.321201,31.395382,17.139112\n" + -"16,176.0,17.0,2,7200,7.2918577,17.65049,66.33111\n" + -"17,176.0,17.0,2,7200,3.270435,35.854877,17.296724\n" + +"lat,lon,profile,time,z,temperature,humidity\n" + +"34.0,115.0,0,0,9.913809,30.592709,32.71529\n" + +"34.0,115.0,0,0,5.699307,17.442251,76.26051\n" + +"34.0,115.0,0,0,0.617254,14.230382,13.789284\n" + +"34.0,115.0,0,0,2.6114788,38.859676,21.792738\n" + +"34.0,115.0,0,0,6.519849,28.003593,33.264217\n" + +"34.0,115.0,0,0,8.975919,10.699942,61.52172\n" + +"34.0,115.0,0,0,9.912431,32.747574,85.96188\n" + +"34.0,115.0,0,0,7.5545244,18.109398,41.733406\n" + +"34.0,115.0,0,0,7.568512,10.165248,84.50128\n" + +"34.0,115.0,0,0,3.376015,0.48572874,5.2108083\n" + +"11.0,95.0,1,3600,0.16332848,1.193263,87.431725\n" + +"11.0,95.0,1,3600,4.9485574,31.53037,65.04175\n" + +"11.0,95.0,1,3600,6.424919,11.956788,54.758873\n" + +"11.0,95.0,1,3600,4.7111635,36.69692,50.6536\n" + +"11.0,95.0,1,3600,6.854408,21.065716,83.941765\n" + +"11.0,95.0,1,3600,9.321201,31.395382,17.139112\n" + +"176.0,17.0,2,7200,7.2918577,17.65049,66.33111\n" + +"176.0,17.0,2,7200,3.270435,35.854877,17.296724\n" + "...\n"; @@ -11669,7 +12565,7 @@ public static void testReadNcCFASAProfile(boolean pauseAfterEach) throws Excepti //test get outer+obs variables with outer constraint table.readNcCF(fileName, StringArray.fromCSV("profile,lat,lon,temperature"), StringArray.fromCSV("lat"), StringArray.fromCSV("="), StringArray.fromCSV("11")); - results = table.dataToCSVString(); + results = table.dataToString(); expected = "profile,lat,lon,temperature\n" + "1,11.0,95.0,1.193263\n" + @@ -11683,7 +12579,7 @@ public static void testReadNcCFASAProfile(boolean pauseAfterEach) throws Excepti //test get outer+obs variables with obs constraint table.readNcCF(fileName, StringArray.fromCSV("profile,lat,lon,temperature"), StringArray.fromCSV("temperature"), StringArray.fromCSV("="), StringArray.fromCSV("11.956788")); - results = table.dataToCSVString(); + results = table.dataToString(); expected = "profile,lat,lon,temperature\n" + "1,11.0,95.0,11.956788\n"; @@ -11692,7 +12588,7 @@ public static void testReadNcCFASAProfile(boolean pauseAfterEach) throws Excepti //test get obs variables with outer constraint table.readNcCF(fileName, StringArray.fromCSV("lat,temperature"), StringArray.fromCSV("lat"), StringArray.fromCSV("="), StringArray.fromCSV("11")); - results = table.dataToCSVString(); + results = table.dataToString(); expected = "lat,temperature\n" + "11.0,1.193263\n" + @@ -11714,54 +12610,54 @@ public static void testReadNcCFASAProfile(boolean pauseAfterEach) throws Excepti " " + fileName); String2.log(NcHelper.dumpString(fileName, false)); table.readNcCF(fileName, null, null, null, null); - results = table.dataToCSVString(45); + results = table.dataToString(45); expected = -"row,lat,lon,profile,time,alt,temperature,humidity,wind_speed\n" + -"0,171.0,119.0,0,0,3.6755686,17.65279,-999.9,51.078896\n" + -"1,171.0,119.0,0,0,0.26343155,6.2052555,-999.9,64.199974\n" + -"2,171.0,119.0,0,0,3.175112,12.011641,-999.9,6.9850345\n" + -"3,171.0,119.0,0,0,4.208357,21.89748,-999.9,58.273148\n" + -"4,171.0,119.0,0,0,2.6554945,21.416033,-999.9,71.660774\n" + -"5,171.0,119.0,0,0,4.9972143,1.4952343,-999.9,31.470207\n" + -"6,171.0,119.0,0,0,1.9827757,21.466,-999.9,11.440447\n" + -"7,171.0,119.0,0,0,4.1058283,14.191161,-999.9,21.072964\n" + -"8,171.0,119.0,0,0,5.648934,7.727216,-999.9,20.63561\n" + -"9,171.0,119.0,0,0,1.2512851,21.434706,-999.9,60.469204\n" + -"10,171.0,119.0,0,0,9.600934,2.1928697,-999.9,71.77351\n" + -"11,171.0,119.0,0,0,1.9799258,16.0188,-999.9,55.211063\n" + -"12,171.0,119.0,0,0,1.2364764,3.242274,-999.9,11.2599\n" + -"13,171.0,119.0,0,0,2.834809,39.97538,-999.9,84.81159\n" + -"14,171.0,119.0,0,0,3.950956,19.135057,-999.9,29.651375\n" + -"15,171.0,119.0,0,0,8.663035,36.685486,-999.9,16.686064\n" + -"16,171.0,119.0,0,0,1.8081368,31.313751,-999.9,55.862072\n" + -"17,171.0,119.0,0,0,7.7147174,22.89713,-999.9,55.927597\n" + -"18,171.0,119.0,0,0,9.629576,18.616583,-999.9,68.66041\n" + -"19,171.0,119.0,0,0,6.9754705,7.9321976,-999.9,60.648094\n" + -"20,171.0,119.0,0,0,2.7991323,11.907311,-999.9,67.411575\n" + -"21,171.0,119.0,0,0,1.5943866,29.448673,-999.9,79.15605\n" + -"22,171.0,119.0,0,0,0.9762172,3.3020692,-999.9,85.00339\n" + -"23,171.0,119.0,0,0,5.5088353,12.813819,-999.9,77.104706\n" + -"24,171.0,119.0,0,0,7.2601357,38.730194,-999.9,18.446539\n" + -"25,171.0,119.0,0,0,8.384121,19.790619,-999.9,74.80566\n" + -"26,171.0,119.0,0,0,6.4686337,23.498947,-999.9,76.68345\n" + -"27,171.0,119.0,0,0,2.0993211,21.344112,-999.9,28.282118\n" + -"28,171.0,119.0,0,0,0.8403456,17.045395,-999.9,88.80201\n" + -"29,171.0,119.0,0,0,9.251101,15.639243,-999.9,70.71877\n" + -"30,171.0,119.0,0,0,1.3482393,9.54115,-999.9,59.91356\n" + -"31,171.0,119.0,0,0,3.6940877,30.967232,-999.9,35.620453\n" + -"32,171.0,119.0,0,0,6.3351345,6.0343504,-999.9,44.98056\n" + -"33,171.0,119.0,0,0,6.3332343,20.940767,-999.9,76.89658\n" + -"34,171.0,119.0,0,0,0.053762503,20.765089,-999.9,12.856414\n" + -"35,171.0,119.0,0,0,1.0131614,12.508157,-999.9,69.99224\n" + -"36,171.0,119.0,0,0,4.424666,37.28969,-999.9,24.69326\n" + -"37,171.0,119.0,0,0,1.5825375,17.199543,-999.9,63.037647\n" + -"38,171.0,119.0,0,0,3.072151,13.194056,-999.9,33.561863\n" + -"39,171.0,119.0,0,0,5.897976,6.350154,-999.9,9.787908\n" + -"40,171.0,119.0,0,0,1.6135278,22.95996,-999.9,85.10665\n" + -"41,171.0,119.0,0,0,6.9384937,7.619196,-999.9,33.569344\n" + -"42,155.0,158.0,1,3600,2.1733663,4.981018,-999.9,41.24567\n" + -"43,155.0,158.0,1,3600,2.189715,16.313164,-999.9,8.15441\n" + -"44,155.0,158.0,1,3600,9.445334,18.173727,-999.9,52.259445\n" + +"lat,lon,profile,time,alt,temperature,humidity,wind_speed\n" + +"171.0,119.0,0,0,3.6755686,17.65279,-999.9,51.078896\n" + +"171.0,119.0,0,0,0.26343155,6.2052555,-999.9,64.199974\n" + +"171.0,119.0,0,0,3.175112,12.011641,-999.9,6.9850345\n" + +"171.0,119.0,0,0,4.208357,21.89748,-999.9,58.273148\n" + +"171.0,119.0,0,0,2.6554945,21.416033,-999.9,71.660774\n" + +"171.0,119.0,0,0,4.9972143,1.4952343,-999.9,31.470207\n" + +"171.0,119.0,0,0,1.9827757,21.466,-999.9,11.440447\n" + +"171.0,119.0,0,0,4.1058283,14.191161,-999.9,21.072964\n" + +"171.0,119.0,0,0,5.648934,7.727216,-999.9,20.63561\n" + +"171.0,119.0,0,0,1.2512851,21.434706,-999.9,60.469204\n" + +"171.0,119.0,0,0,9.600934,2.1928697,-999.9,71.77351\n" + +"171.0,119.0,0,0,1.9799258,16.0188,-999.9,55.211063\n" + +"171.0,119.0,0,0,1.2364764,3.242274,-999.9,11.2599\n" + +"171.0,119.0,0,0,2.834809,39.97538,-999.9,84.81159\n" + +"171.0,119.0,0,0,3.950956,19.135057,-999.9,29.651375\n" + +"171.0,119.0,0,0,8.663035,36.685486,-999.9,16.686064\n" + +"171.0,119.0,0,0,1.8081368,31.313751,-999.9,55.862072\n" + +"171.0,119.0,0,0,7.7147174,22.89713,-999.9,55.927597\n" + +"171.0,119.0,0,0,9.629576,18.616583,-999.9,68.66041\n" + +"171.0,119.0,0,0,6.9754705,7.9321976,-999.9,60.648094\n" + +"171.0,119.0,0,0,2.7991323,11.907311,-999.9,67.411575\n" + +"171.0,119.0,0,0,1.5943866,29.448673,-999.9,79.15605\n" + +"171.0,119.0,0,0,0.9762172,3.3020692,-999.9,85.00339\n" + +"171.0,119.0,0,0,5.5088353,12.813819,-999.9,77.104706\n" + +"171.0,119.0,0,0,7.2601357,38.730194,-999.9,18.446539\n" + +"171.0,119.0,0,0,8.384121,19.790619,-999.9,74.80566\n" + +"171.0,119.0,0,0,6.4686337,23.498947,-999.9,76.68345\n" + +"171.0,119.0,0,0,2.0993211,21.344112,-999.9,28.282118\n" + +"171.0,119.0,0,0,0.8403456,17.045395,-999.9,88.80201\n" + +"171.0,119.0,0,0,9.251101,15.639243,-999.9,70.71877\n" + +"171.0,119.0,0,0,1.3482393,9.54115,-999.9,59.91356\n" + +"171.0,119.0,0,0,3.6940877,30.967232,-999.9,35.620453\n" + +"171.0,119.0,0,0,6.3351345,6.0343504,-999.9,44.98056\n" + +"171.0,119.0,0,0,6.3332343,20.940767,-999.9,76.89658\n" + +"171.0,119.0,0,0,0.053762503,20.765089,-999.9,12.856414\n" + +"171.0,119.0,0,0,1.0131614,12.508157,-999.9,69.99224\n" + +"171.0,119.0,0,0,4.424666,37.28969,-999.9,24.69326\n" + +"171.0,119.0,0,0,1.5825375,17.199543,-999.9,63.037647\n" + +"171.0,119.0,0,0,3.072151,13.194056,-999.9,33.561863\n" + +"171.0,119.0,0,0,5.897976,6.350154,-999.9,9.787908\n" + +"171.0,119.0,0,0,1.6135278,22.95996,-999.9,85.10665\n" + +"171.0,119.0,0,0,6.9384937,7.619196,-999.9,33.569344\n" + +"155.0,158.0,1,3600,2.1733663,4.981018,-999.9,41.24567\n" + +"155.0,158.0,1,3600,2.189715,16.313164,-999.9,8.15441\n" + +"155.0,158.0,1,3600,9.445334,18.173727,-999.9,52.259445\n" + "...\n"; Test.ensureEqual(results, expected, "results=\n" + results); // 42 * 142 = 5964 obs spaces, so it is incomplete @@ -11963,30 +12859,30 @@ public static void testReadNcCFASAProfile(boolean pauseAfterEach) throws Excepti " " + fileName); String2.log(NcHelper.dumpString(fileName, true)); table.readNcCF(fileName, null, null, null, null); -String2.log(table.dataToCSVString()); - results = table.dataToCSVString(20); +String2.log(table.dataToString()); + results = table.dataToString(20); expected = -"row,lat,lon,profile,time,z,temperature,humidity\n" + -"0,93.0,71.0,0,0,0.38200212,23.69535,52.60904\n" + -"1,93.0,71.0,0,0,1.200709,29.121883,12.060117\n" + -"2,93.0,71.0,0,0,2.6969194,23.355228,9.943134\n" + -"3,93.0,71.0,0,0,3.4035592,21.57062,75.10006\n" + -"4,93.0,71.0,0,0,5.829337,2.9969826,17.760695\n" + -"5,93.0,71.0,0,0,5.8626857,37.635395,86.32262\n" + -"6,93.0,71.0,0,0,6.5773344,2.3481517,85.33706\n" + -"7,93.0,71.0,0,0,7.7204447,5.337912,54.993973\n" + -"8,93.0,71.0,0,0,8.301987,32.431896,88.71708\n" + -"9,93.0,71.0,0,0,9.088309,30.518106,44.74581\n" + -"10,45.0,151.0,1,3600,0.47979552,28.567852,65.933014\n" + -"11,45.0,151.0,1,3600,0.594338,7.940218,79.38502\n" + -"12,45.0,151.0,1,3600,4.0314445,20.808128,13.365513\n" + -"13,45.0,151.0,1,3600,6.101271,4.62561,8.945877\n" + -"14,45.0,151.0,1,3600,6.1228404,13.251722,50.431633\n" + -"15,45.0,151.0,1,3600,8.454789,17.803867,4.852586\n" + -"16,169.0,145.0,2,7200,1.9213479,7.1473145,11.227387\n" + -"17,169.0,145.0,2,7200,3.328237,27.21546,29.352453\n" + -"18,112.0,9.0,3,10800,0.009190708,6.3910594,56.909916\n" + -"19,112.0,9.0,3,10800,0.013856917,13.634793,63.741573\n" + +"lat,lon,profile,time,z,temperature,humidity\n" + +"93.0,71.0,0,0,0.38200212,23.69535,52.60904\n" + +"93.0,71.0,0,0,1.200709,29.121883,12.060117\n" + +"93.0,71.0,0,0,2.6969194,23.355228,9.943134\n" + +"93.0,71.0,0,0,3.4035592,21.57062,75.10006\n" + +"93.0,71.0,0,0,5.829337,2.9969826,17.760695\n" + +"93.0,71.0,0,0,5.8626857,37.635395,86.32262\n" + +"93.0,71.0,0,0,6.5773344,2.3481517,85.33706\n" + +"93.0,71.0,0,0,7.7204447,5.337912,54.993973\n" + +"93.0,71.0,0,0,8.301987,32.431896,88.71708\n" + +"93.0,71.0,0,0,9.088309,30.518106,44.74581\n" + +"45.0,151.0,1,3600,0.47979552,28.567852,65.933014\n" + +"45.0,151.0,1,3600,0.594338,7.940218,79.38502\n" + +"45.0,151.0,1,3600,4.0314445,20.808128,13.365513\n" + +"45.0,151.0,1,3600,6.101271,4.62561,8.945877\n" + +"45.0,151.0,1,3600,6.1228404,13.251722,50.431633\n" + +"45.0,151.0,1,3600,8.454789,17.803867,4.852586\n" + +"169.0,145.0,2,7200,1.9213479,7.1473145,11.227387\n" + +"169.0,145.0,2,7200,3.328237,27.21546,29.352453\n" + +"112.0,9.0,3,10800,0.009190708,6.3910594,56.909916\n" + +"112.0,9.0,3,10800,0.013856917,13.634793,63.741573\n" + "...\n"; Test.ensureEqual(results, expected, "results=\n" + results); Test.ensureEqual(table.nRows(), 100, ""); @@ -12005,7 +12901,7 @@ public static void testReadNcCFASAProfile(boolean pauseAfterEach) throws Excepti StringArray.fromCSV("profile,temperature"), StringArray.fromCSV("=,="), StringArray.fromCSV("3,13.634793")); - results = table.dataToCSVString(); + results = table.dataToString(); expected = "profile,temperature\n" + "3,13.634793\n"; @@ -12196,7 +13092,7 @@ public static void testReadNcCFASAProfile(boolean pauseAfterEach) throws Excepti StringArray.fromCSV("profile,z"), StringArray.fromCSV("=,="), StringArray.fromCSV("3,0.013856917")); - results = table.dataToCSVString(); + results = table.dataToString(); expected = "profile,z\n" + "3,0.013856917\n"; @@ -12252,64 +13148,64 @@ public static void testReadNcCFASAProfile(boolean pauseAfterEach) throws Excepti " " + fileName); String2.log(NcHelper.dumpString(fileName, true)); table.readNcCF(fileName, null, null, null, null); - results = table.dataToCSVString(55); + results = table.dataToString(55); expected = //z[obs] is in the innerTable -"row,lat,lon,profile,time,z,temperature,humidity\n" + -"0,19.0,116.0,0,0,1.6315197,15.477672,45.439682\n" + -"1,19.0,116.0,0,0,7.0598154,31.758614,35.987625\n" + -"2,19.0,116.0,0,0,0.36953768,2.4893014,65.79051\n" + -"3,19.0,116.0,0,0,7.5342026,26.857018,48.042828\n" + -"4,19.0,116.0,0,0,7.404938,19.151163,35.629215\n" + -"5,19.0,116.0,0,0,1.4442251,24.565704,37.29833\n" + -"6,19.0,116.0,0,0,9.80883,15.455084,23.763685\n" + -"7,19.0,116.0,0,0,8.060886,26.090511,14.579169\n" + -"8,19.0,116.0,0,0,1.965906,8.010671,69.79476\n" + -"9,19.0,116.0,0,0,9.60608,26.692741,78.83376\n" + -"10,19.0,116.0,0,0,9.839138,39.378746,37.22304\n" + -"11,19.0,116.0,0,0,6.2004266,14.685706,39.81143\n" + -"12,19.0,116.0,0,0,6.9113455,7.344667,18.64804\n" + -"13,19.0,116.0,0,0,8.798231,7.1495833,25.831097\n" + -"14,19.0,116.0,0,0,2.3565977,0.25708458,32.442547\n" + -"15,19.0,116.0,0,0,8.742956,34.86492,49.41099\n" + -"16,19.0,116.0,0,0,8.557564,35.413876,66.573906\n" + -"17,19.0,116.0,0,0,9.6161375,37.28068,4.6605506\n" + -"18,19.0,116.0,0,0,6.610992,5.4654717,60.635574\n" + -"19,19.0,116.0,0,0,1.936887,33.513893,82.823166\n" + -"20,19.0,116.0,0,0,3.0184858,31.41321,75.51568\n" + -"21,19.0,116.0,0,0,2.5581324,15.092895,79.2067\n" + -"22,19.0,116.0,0,0,7.1288857,20.573462,27.601343\n" + -"23,19.0,116.0,0,0,1.5220404,0.5649648,3.6447735\n" + -"24,19.0,116.0,0,0,3.276416,27.345316,62.10269\n" + -"25,19.0,116.0,0,0,0.40930283,27.671362,79.762955\n" + -"26,19.0,116.0,0,0,2.4845016,31.252121,61.57929\n" + -"27,19.0,116.0,0,0,9.366717,9.342631,78.63049\n" + -"28,19.0,116.0,0,0,0.3365049,20.81806,29.236477\n" + -"29,19.0,116.0,0,0,7.646478,3.1961684,7.8138685\n" + -"30,19.0,116.0,0,0,5.075439,36.427265,20.879707\n" + -"31,19.0,116.0,0,0,5.1594234,18.314194,6.4109855\n" + -"32,19.0,116.0,0,0,2.1663764,10.056105,5.798549\n" + -"33,19.0,116.0,0,0,9.028424,5.7192965,56.243206\n" + -"34,19.0,116.0,0,0,9.031402,13.884695,36.763905\n" + -"35,19.0,116.0,0,0,5.26929,3.5693107,84.04594\n" + -"36,19.0,116.0,0,0,2.6247969,8.933488,28.76576\n" + -"37,19.0,116.0,0,0,9.745737,24.357897,76.431816\n" + -"38,19.0,116.0,0,0,3.722143,17.96677,18.759092\n" + -"39,19.0,116.0,0,0,1.9264901,28.71267,52.148735\n" + -"40,19.0,116.0,0,0,3.9815784,35.91171,33.082714\n" + -"41,19.0,116.0,0,0,4.657818,31.10753,65.25383\n" + -"42,109.0,178.0,1,3600,1.6315197,26.582031,10.312429\n" + -"43,109.0,178.0,1,3600,7.0598154,4.909754,50.415916\n" + -"44,109.0,178.0,1,3600,0.36953768,30.069138,36.845417\n" + -"45,109.0,178.0,1,3600,7.5342026,3.341837,52.53064\n" + -"46,109.0,178.0,1,3600,7.404938,36.832874,81.62572\n" + -"47,109.0,178.0,1,3600,1.4442251,21.88992,78.833565\n" + -"48,109.0,178.0,1,3600,9.80883,25.902088,50.43351\n" + -"49,109.0,178.0,1,3600,8.060886,30.653927,81.53324\n" + -"50,109.0,178.0,1,3600,1.965906,0.8834069,86.67266\n" + -"51,109.0,178.0,1,3600,9.60608,27.2307,74.25348\n" + -"52,109.0,178.0,1,3600,9.839138,15.706074,86.22133\n" + -"53,109.0,178.0,1,3600,6.2004266,34.751484,79.71265\n" + -"54,109.0,178.0,1,3600,6.9113455,16.43026,30.387852\n" + +"lat,lon,profile,time,z,temperature,humidity\n" + +"19.0,116.0,0,0,1.6315197,15.477672,45.439682\n" + +"19.0,116.0,0,0,7.0598154,31.758614,35.987625\n" + +"19.0,116.0,0,0,0.36953768,2.4893014,65.79051\n" + +"19.0,116.0,0,0,7.5342026,26.857018,48.042828\n" + +"19.0,116.0,0,0,7.404938,19.151163,35.629215\n" + +"19.0,116.0,0,0,1.4442251,24.565704,37.29833\n" + +"19.0,116.0,0,0,9.80883,15.455084,23.763685\n" + +"19.0,116.0,0,0,8.060886,26.090511,14.579169\n" + +"19.0,116.0,0,0,1.965906,8.010671,69.79476\n" + +"19.0,116.0,0,0,9.60608,26.692741,78.83376\n" + +"19.0,116.0,0,0,9.839138,39.378746,37.22304\n" + +"19.0,116.0,0,0,6.2004266,14.685706,39.81143\n" + +"19.0,116.0,0,0,6.9113455,7.344667,18.64804\n" + +"19.0,116.0,0,0,8.798231,7.1495833,25.831097\n" + +"19.0,116.0,0,0,2.3565977,0.25708458,32.442547\n" + +"19.0,116.0,0,0,8.742956,34.86492,49.41099\n" + +"19.0,116.0,0,0,8.557564,35.413876,66.573906\n" + +"19.0,116.0,0,0,9.6161375,37.28068,4.6605506\n" + +"19.0,116.0,0,0,6.610992,5.4654717,60.635574\n" + +"19.0,116.0,0,0,1.936887,33.513893,82.823166\n" + +"19.0,116.0,0,0,3.0184858,31.41321,75.51568\n" + +"19.0,116.0,0,0,2.5581324,15.092895,79.2067\n" + +"19.0,116.0,0,0,7.1288857,20.573462,27.601343\n" + +"19.0,116.0,0,0,1.5220404,0.5649648,3.6447735\n" + +"19.0,116.0,0,0,3.276416,27.345316,62.10269\n" + +"19.0,116.0,0,0,0.40930283,27.671362,79.762955\n" + +"19.0,116.0,0,0,2.4845016,31.252121,61.57929\n" + +"19.0,116.0,0,0,9.366717,9.342631,78.63049\n" + +"19.0,116.0,0,0,0.3365049,20.81806,29.236477\n" + +"19.0,116.0,0,0,7.646478,3.1961684,7.8138685\n" + +"19.0,116.0,0,0,5.075439,36.427265,20.879707\n" + +"19.0,116.0,0,0,5.1594234,18.314194,6.4109855\n" + +"19.0,116.0,0,0,2.1663764,10.056105,5.798549\n" + +"19.0,116.0,0,0,9.028424,5.7192965,56.243206\n" + +"19.0,116.0,0,0,9.031402,13.884695,36.763905\n" + +"19.0,116.0,0,0,5.26929,3.5693107,84.04594\n" + +"19.0,116.0,0,0,2.6247969,8.933488,28.76576\n" + +"19.0,116.0,0,0,9.745737,24.357897,76.431816\n" + +"19.0,116.0,0,0,3.722143,17.96677,18.759092\n" + +"19.0,116.0,0,0,1.9264901,28.71267,52.148735\n" + +"19.0,116.0,0,0,3.9815784,35.91171,33.082714\n" + +"19.0,116.0,0,0,4.657818,31.10753,65.25383\n" + +"109.0,178.0,1,3600,1.6315197,26.582031,10.312429\n" + +"109.0,178.0,1,3600,7.0598154,4.909754,50.415916\n" + +"109.0,178.0,1,3600,0.36953768,30.069138,36.845417\n" + +"109.0,178.0,1,3600,7.5342026,3.341837,52.53064\n" + +"109.0,178.0,1,3600,7.404938,36.832874,81.62572\n" + +"109.0,178.0,1,3600,1.4442251,21.88992,78.833565\n" + +"109.0,178.0,1,3600,9.80883,25.902088,50.43351\n" + +"109.0,178.0,1,3600,8.060886,30.653927,81.53324\n" + +"109.0,178.0,1,3600,1.965906,0.8834069,86.67266\n" + +"109.0,178.0,1,3600,9.60608,27.2307,74.25348\n" + +"109.0,178.0,1,3600,9.839138,15.706074,86.22133\n" + +"109.0,178.0,1,3600,6.2004266,34.751484,79.71265\n" + +"109.0,178.0,1,3600,6.9113455,16.43026,30.387852\n" + "...\n"; Test.ensureEqual(results, expected, "results=\n" + results); Test.ensureEqual(table.nRows(), 5964, ""); @@ -12429,21 +13325,21 @@ public static void testReadNcCFASATimeSeries(boolean pauseAfterEach) throws Exce //!!! obs vars are temperature[time=100][station=10] //so outer=time and inner is station! table.readNcCF(fileName, null, null, null, null); - results = table.dataToCSVString(12); + results = table.dataToString(12); expected = -"row,lat,lon,station_name,alt,time,temperature,humidity\n" + -"0,8.0,146.0,Station-0,0.8488673,0,18.618036,27.177536\n" + -"1,4.0,53.0,Station-1,1.8478156,0,13.216496,83.71079\n" + -"2,90.0,159.0,Station-2,3.4614673,0,39.300182,44.69293\n" + -"3,55.0,25.0,Station-3,4.8902116,0,17.008652,2.3659434\n" + -"4,115.0,30.0,Station-4,9.45969,0,24.951536,7.1026664\n" + -"5,165.0,125.0,Station-5,0.17808062,0,35.995247,41.411594\n" + -"6,143.0,175.0,Station-6,8.85507,0,24.334364,39.776123\n" + -"7,157.0,175.0,Station-7,0.47320434,0,33.077255,1.1665242\n" + -"8,101.0,80.0,Station-8,7.470208,0,6.9397545,72.75068\n" + -"9,167.0,57.0,Station-9,0.6709764,0,28.991974,71.65753\n" + -"10,8.0,146.0,Station-0,0.8488673,3600,3.0675685,53.43748\n" + -"11,4.0,53.0,Station-1,1.8478156,3600,37.31892,46.79294\n" + +"lat,lon,station_name,alt,time,temperature,humidity\n" + +"8.0,146.0,Station-0,0.8488673,0,18.618036,27.177536\n" + +"4.0,53.0,Station-1,1.8478156,0,13.216496,83.71079\n" + +"90.0,159.0,Station-2,3.4614673,0,39.300182,44.69293\n" + +"55.0,25.0,Station-3,4.8902116,0,17.008652,2.3659434\n" + +"115.0,30.0,Station-4,9.45969,0,24.951536,7.1026664\n" + +"165.0,125.0,Station-5,0.17808062,0,35.995247,41.411594\n" + +"143.0,175.0,Station-6,8.85507,0,24.334364,39.776123\n" + +"157.0,175.0,Station-7,0.47320434,0,33.077255,1.1665242\n" + +"101.0,80.0,Station-8,7.470208,0,6.9397545,72.75068\n" + +"167.0,57.0,Station-9,0.6709764,0,28.991974,71.65753\n" + +"8.0,146.0,Station-0,0.8488673,3600,3.0675685,53.43748\n" + +"4.0,53.0,Station-1,1.8478156,3600,37.31892,46.79294\n" + "...\n"; Test.ensureEqual(results, expected, "results=\n" + results); Test.ensureEqual(table.nRows(), 1000, ""); @@ -12734,7 +13630,7 @@ public static void testReadNcCFASATimeSeries(boolean pauseAfterEach) throws Exce //read just inner [station] vars table.readNcCF(fileName, StringArray.fromCSV("station_name,lat,lon"), null, null, null); - results = table.dataToCSVString(); + results = table.dataToString(); expected = "station_name,lat,lon\n" + "Station-0,8.0,146.0\n" + @@ -12752,7 +13648,7 @@ public static void testReadNcCFASATimeSeries(boolean pauseAfterEach) throws Exce //read just inner [station] vars, with constraint table.readNcCF(fileName, StringArray.fromCSV("station_name,lat,lon"), StringArray.fromCSV("lat"), StringArray.fromCSV(">"), StringArray.fromCSV("150")); - results = table.dataToCSVString(); + results = table.dataToString(); expected = "station_name,lat,lon\n" + "Station-5,165.0,125.0\n" + @@ -12762,14 +13658,14 @@ public static void testReadNcCFASATimeSeries(boolean pauseAfterEach) throws Exce //read just outer [time] vars table.readNcCF(fileName, StringArray.fromCSV("time"), null, null, null); - results = table.dataToCSVString(5); + results = table.dataToString(5); expected = -"row,time\n" + -"0,0\n" + -"1,3600\n" + -"2,7200\n" + -"3,10800\n" + -"4,14400\n" + +"time\n" + +"0\n" + +"3600\n" + +"7200\n" + +"10800\n" + +"14400\n" + "...\n"; Test.ensureEqual(results, expected, "results=\n" + results); Test.ensureEqual(table.nRows(), 100, ""); @@ -12777,7 +13673,7 @@ public static void testReadNcCFASATimeSeries(boolean pauseAfterEach) throws Exce //read just outer [time] vars, with constraint table.readNcCF(fileName, StringArray.fromCSV("time"), StringArray.fromCSV("time,time"), StringArray.fromCSV(">,<"), StringArray.fromCSV("7000,11000")); - results = table.dataToCSVString(); + results = table.dataToString(); expected = "time\n" + "7200\n" + @@ -12786,7 +13682,7 @@ public static void testReadNcCFASATimeSeries(boolean pauseAfterEach) throws Exce //read just outer+inner [time][station] vars table.readNcCF(fileName, StringArray.fromCSV("station_name,lat,lon,time"), null, null, null); - results = table.dataToCSVString(); + results = table.dataToString(); expected = "station_name,lat,lon,time\n" + "Station-0,8.0,146.0,0\n" + @@ -12821,7 +13717,7 @@ public static void testReadNcCFASATimeSeries(boolean pauseAfterEach) throws Exce //read just outer+inner [time][station] vars, with outer [time] constraint table.readNcCF(fileName, StringArray.fromCSV("station_name,lat,lon,time"), StringArray.fromCSV("time"), StringArray.fromCSV("="), StringArray.fromCSV("345600")); - results = table.dataToCSVString(); + results = table.dataToString(); expected = "station_name,lat,lon,time\n" + "Station-0,8.0,146.0,345600\n" + @@ -12839,7 +13735,7 @@ public static void testReadNcCFASATimeSeries(boolean pauseAfterEach) throws Exce //read just outer+inner [time][station] vars, with inner [station] constraint table.readNcCF(fileName, StringArray.fromCSV("station_name,lat,lon,time"), StringArray.fromCSV("lat"), StringArray.fromCSV("="), StringArray.fromCSV("165")); - results = table.dataToCSVString(); + results = table.dataToString(); expected = "station_name,lat,lon,time\n" + "Station-5,165.0,125.0,0\n" + @@ -12855,7 +13751,7 @@ public static void testReadNcCFASATimeSeries(boolean pauseAfterEach) throws Exce //read just outer+inner [time][station] vars, with outer and inner constraint table.readNcCF(fileName, StringArray.fromCSV("station_name,lat,lon,time"), StringArray.fromCSV("time,lat"), StringArray.fromCSV("=,="), StringArray.fromCSV("345600,165")); - results = table.dataToCSVString(); + results = table.dataToString(); expected = "station_name,lat,lon,time\n" + "Station-5,165.0,125.0,345600\n"; @@ -12864,7 +13760,7 @@ public static void testReadNcCFASATimeSeries(boolean pauseAfterEach) throws Exce //read just outer+inner+obs vars, with outer and inner constraint table.readNcCF(fileName, StringArray.fromCSV(""), StringArray.fromCSV("time,lat"), StringArray.fromCSV("=,="), StringArray.fromCSV("345600,165")); - results = table.dataToCSVString(); + results = table.dataToString(); expected = "lat,lon,station_name,alt,time,temperature,humidity\n" + "165.0,125.0,Station-5,0.17808062,345600,38.457962,28.075706\n"; @@ -12874,7 +13770,7 @@ public static void testReadNcCFASATimeSeries(boolean pauseAfterEach) throws Exce String2.log(NcHelper.dumpString(fileName, false)); table.readNcCF(fileName, StringArray.fromCSV("lat,time,temperature,humidity"), StringArray.fromCSV("time,lat"), StringArray.fromCSV("=,="), StringArray.fromCSV("345600,165")); - results = table.dataToCSVString(); + results = table.dataToString(); expected = "lat,time,temperature,humidity\n" + "165.0,345600,38.457962,28.075706\n"; @@ -12883,7 +13779,7 @@ public static void testReadNcCFASATimeSeries(boolean pauseAfterEach) throws Exce //read just inner+obs vars, with outer and inner constraint table.readNcCF(fileName, StringArray.fromCSV("lat,lon,station_name,time,temperature"), StringArray.fromCSV("time,lat"), StringArray.fromCSV("=,="), StringArray.fromCSV("345600,165")); - results = table.dataToCSVString(); + results = table.dataToString(); expected = "lat,lon,station_name,time,temperature\n" + "165.0,125.0,Station-5,345600,38.457962\n"; @@ -12892,7 +13788,7 @@ public static void testReadNcCFASATimeSeries(boolean pauseAfterEach) throws Exce //read just obs vars, with outer and inner constraint table.readNcCF(fileName, StringArray.fromCSV("time,lat,temperature"), StringArray.fromCSV("time,lat"), StringArray.fromCSV("=,="), StringArray.fromCSV("345600,165")); - results = table.dataToCSVString(); + results = table.dataToString(); expected = "time,lat,temperature\n" + "345600,165.0,38.457962\n"; @@ -12907,33 +13803,33 @@ public static void testReadNcCFASATimeSeries(boolean pauseAfterEach) throws Exce " " + fileName); String2.log(NcHelper.dumpString(fileName, false)); table.readNcCF(fileName, null, null, null, null); - results = table.dataToCSVString(24); + results = table.dataToString(24); expected = -"row,lat,lon,station_elevation,station_info,station_name,alt,time,temperature,humidity\n" + -"0,121.0,81.0,4.859895,0,Station-0,1.9358816,0,17.0,56.0\n" + -"1,121.0,81.0,4.859895,0,Station-0,1.9358816,3600,7.0,49.0\n" + -"2,121.0,81.0,4.859895,0,Station-0,1.9358816,7200,19.0,86.0\n" + -"3,121.0,81.0,4.859895,0,Station-0,1.9358816,10800,5.0,81.0\n" + -"4,121.0,81.0,4.859895,0,Station-0,1.9358816,14400,0.0,55.0\n" + -"5,121.0,81.0,4.859895,0,Station-0,1.9358816,18000,10.0,9.0\n" + -"6,121.0,81.0,4.859895,0,Station-0,1.9358816,21600,32.0,57.0\n" + -"7,121.0,81.0,4.859895,0,Station-0,1.9358816,25200,39.0,39.0\n" + -"8,121.0,81.0,4.859895,0,Station-0,1.9358816,28800,39.0,68.0\n" + -"9,121.0,81.0,4.859895,0,Station-0,1.9358816,32400,29.0,6.0\n" + -"10,121.0,81.0,4.859895,0,Station-0,1.9358816,36000,26.0,12.0\n" + -"11,121.0,81.0,4.859895,0,Station-0,1.9358816,39600,24.0,72.0\n" + -"12,121.0,81.0,4.859895,0,Station-0,1.9358816,43200,14.0,80.0\n" + -"13,121.0,81.0,4.859895,0,Station-0,1.9358816,46800,38.0,52.0\n" + -"14,121.0,81.0,4.859895,0,Station-0,1.9358816,50400,35.0,46.0\n" + -"15,121.0,81.0,4.859895,0,Station-0,1.9358816,54000,33.0,48.0\n" + -"16,121.0,81.0,4.859895,0,Station-0,1.9358816,57600,34.0,85.0\n" + -"17,121.0,81.0,4.859895,0,Station-0,1.9358816,61200,27.0,3.0\n" + -"18,121.0,81.0,4.859895,0,Station-0,1.9358816,64800,37.0,61.0\n" + -"19,121.0,81.0,4.859895,0,Station-0,1.9358816,68400,0.0,0.0\n" + -"20,150.0,73.0,2.6002314,1,Station-1,4.052759,0,25.0,73.0\n" + -"21,150.0,73.0,2.6002314,1,Station-1,4.052759,3600,29.0,74.0\n" + -"22,150.0,73.0,2.6002314,1,Station-1,4.052759,7200,33.0,88.0\n" + -"23,150.0,73.0,2.6002314,1,Station-1,4.052759,10800,25.0,3.0\n" + +"lat,lon,station_elevation,station_info,station_name,alt,time,temperature,humidity\n" + +"121.0,81.0,4.859895,0,Station-0,1.9358816,0,17.0,56.0\n" + +"121.0,81.0,4.859895,0,Station-0,1.9358816,3600,7.0,49.0\n" + +"121.0,81.0,4.859895,0,Station-0,1.9358816,7200,19.0,86.0\n" + +"121.0,81.0,4.859895,0,Station-0,1.9358816,10800,5.0,81.0\n" + +"121.0,81.0,4.859895,0,Station-0,1.9358816,14400,0.0,55.0\n" + +"121.0,81.0,4.859895,0,Station-0,1.9358816,18000,10.0,9.0\n" + +"121.0,81.0,4.859895,0,Station-0,1.9358816,21600,32.0,57.0\n" + +"121.0,81.0,4.859895,0,Station-0,1.9358816,25200,39.0,39.0\n" + +"121.0,81.0,4.859895,0,Station-0,1.9358816,28800,39.0,68.0\n" + +"121.0,81.0,4.859895,0,Station-0,1.9358816,32400,29.0,6.0\n" + +"121.0,81.0,4.859895,0,Station-0,1.9358816,36000,26.0,12.0\n" + +"121.0,81.0,4.859895,0,Station-0,1.9358816,39600,24.0,72.0\n" + +"121.0,81.0,4.859895,0,Station-0,1.9358816,43200,14.0,80.0\n" + +"121.0,81.0,4.859895,0,Station-0,1.9358816,46800,38.0,52.0\n" + +"121.0,81.0,4.859895,0,Station-0,1.9358816,50400,35.0,46.0\n" + +"121.0,81.0,4.859895,0,Station-0,1.9358816,54000,33.0,48.0\n" + +"121.0,81.0,4.859895,0,Station-0,1.9358816,57600,34.0,85.0\n" + +"121.0,81.0,4.859895,0,Station-0,1.9358816,61200,27.0,3.0\n" + +"121.0,81.0,4.859895,0,Station-0,1.9358816,64800,37.0,61.0\n" + +"121.0,81.0,4.859895,0,Station-0,1.9358816,68400,0.0,0.0\n" + +"150.0,73.0,2.6002314,1,Station-1,4.052759,0,25.0,73.0\n" + +"150.0,73.0,2.6002314,1,Station-1,4.052759,3600,29.0,74.0\n" + +"150.0,73.0,2.6002314,1,Station-1,4.052759,7200,33.0,88.0\n" + +"150.0,73.0,2.6002314,1,Station-1,4.052759,10800,25.0,3.0\n" + "...\n"; Test.ensureEqual(results, expected, "results=\n" + results); Test.ensureEqual(table.nRows(), 200, ""); @@ -13052,7 +13948,7 @@ public static void testReadNcCFASATimeSeries(boolean pauseAfterEach) throws Exce //read just outer [station] vars table.readNcCF(fileName, StringArray.fromCSV("station_name,lat,lon"), null, null, null); - results = table.dataToCSVString(); + results = table.dataToString(); expected = "station_name,lat,lon\n" + "Station-0,121.0,81.0\n" + @@ -13070,7 +13966,7 @@ public static void testReadNcCFASATimeSeries(boolean pauseAfterEach) throws Exce //read just outer [station] vars, with constraint table.readNcCF(fileName, StringArray.fromCSV("station_name,lat,lon"), StringArray.fromCSV("lat"), StringArray.fromCSV(">"), StringArray.fromCSV("155")); - results = table.dataToCSVString(); + results = table.dataToString(); expected = "station_name,lat,lon\n" + "Station-5,161.0,100.0\n" + @@ -13082,7 +13978,7 @@ public static void testReadNcCFASATimeSeries(boolean pauseAfterEach) throws Exce //read just outer+obs vars, with outer constraint table.readNcCF(fileName, StringArray.fromCSV("lat,time,temperature,humidity"), StringArray.fromCSV("lat"), StringArray.fromCSV("="), StringArray.fromCSV("150")); - results = table.dataToCSVString(); + results = table.dataToString(); expected = //metadata mv=-999.9 for temp and humidity, so 9e36 below are "valid" values "lat,time,temperature,humidity\n" + @@ -13131,7 +14027,7 @@ public static void testReadNcCFASATimeSeries(boolean pauseAfterEach) throws Exce //read just outer+obs vars, with outer + obs constraint table.readNcCF(fileName, StringArray.fromCSV("lat,time,temperature,humidity"), StringArray.fromCSV("lat,humidity"), StringArray.fromCSV("=,="), StringArray.fromCSV("150,43")); - results = table.dataToCSVString(); + results = table.dataToString(); expected = "lat,time,temperature,humidity\n" + "150.0,28800,1.0,43.0\n" + @@ -13141,7 +14037,7 @@ public static void testReadNcCFASATimeSeries(boolean pauseAfterEach) throws Exce //read just outer+obs vars, table.readNcCF(fileName, StringArray.fromCSV("time,temperature,humidity"), StringArray.fromCSV("time,temperature"), StringArray.fromCSV("=,="), StringArray.fromCSV("7200,33")); - results = table.dataToCSVString(); + results = table.dataToString(); expected = //"22,150.0,73.0,2.6002314,1,Station-1,4.052759,7200,33.0,88.0\n" + from above "time,temperature,humidity\n" + @@ -13151,7 +14047,7 @@ public static void testReadNcCFASATimeSeries(boolean pauseAfterEach) throws Exce //read just obs vars, with outer and obs constraint table.readNcCF(fileName, StringArray.fromCSV("time,lon,alt,temperature"), StringArray.fromCSV("time,lon"), StringArray.fromCSV("=,="), StringArray.fromCSV("7200,73")); - results = table.dataToCSVString(); + results = table.dataToString(); expected = "time,lon,alt,temperature\n" + "7200,73.0,4.052759,33.0\n"; @@ -13161,7 +14057,7 @@ public static void testReadNcCFASATimeSeries(boolean pauseAfterEach) throws Exce //read just obs vars, with outer and obs constraint table.readNcCF(fileName, StringArray.fromCSV("time,lon,alt,temperature"), StringArray.fromCSV("lon,temperature"), StringArray.fromCSV("=,="), StringArray.fromCSV("73,33")); - results = table.dataToCSVString(); + results = table.dataToString(); expected = "time,lon,alt,temperature\n" + "7200,73.0,4.052759,33.0\n" + @@ -13171,7 +14067,7 @@ public static void testReadNcCFASATimeSeries(boolean pauseAfterEach) throws Exce //read just obs vars, with outer and obs constraint table.readNcCF(fileName, StringArray.fromCSV("time,lon,alt,temperature"), StringArray.fromCSV("time,temperature"), StringArray.fromCSV("=,="), StringArray.fromCSV("7200,33")); - results = table.dataToCSVString(); + results = table.dataToString(); expected = "time,lon,alt,temperature\n" + "7200,73.0,4.052759,33.0\n"; @@ -13203,29 +14099,29 @@ public static void testReadNcCFASATrajectory(boolean pauseAfterEach) throws Exce " " + fileName); String2.log(NcHelper.dumpString(fileName, false)); table.readNcCF(fileName, null, null, null, null); - results = table.dataToCSVString(20); + results = table.dataToString(20); expected = -"row,lat,lon,trajectory_info,trajectory_name,time,z,temperature,humidity\n" + -"0,16.937433,-35.901237,0,Trajectory0,0,0.0,18.559397,46.487503\n" + -"1,32.011345,-8.81588,0,Trajectory0,3600,1.0,34.649773,16.22458\n" + -"2,3.137092,-64.15942,0,Trajectory0,7200,2.0,35.318504,77.41457\n" + -"3,10.783036,-11.503419,0,Trajectory0,10800,3.0,19.39111,56.601\n" + -"4,4.6016994,-6.416601,0,Trajectory0,14400,4.0,5.4162874,62.606712\n" + -"5,25.337688,-69.37197,0,Trajectory0,18000,5.0,2.604784,16.390015\n" + -"6,30.219189,-71.78619,0,Trajectory0,21600,6.0,22.968603,62.276855\n" + -"7,5.3421707,-29.245968,0,Trajectory0,25200,7.0,8.609019,14.976101\n" + -"8,25.687958,-57.089973,0,Trajectory0,28800,8.0,9.202528,79.17113\n" + -"9,31.82367,-58.56237,0,Trajectory0,32400,9.0,1.5670301,26.49425\n" + -"10,23.310976,-3.997997,0,Trajectory0,36000,10.0,23.187065,64.34719\n" + -"11,43.486816,-62.39688,0,Trajectory0,39600,11.0,37.44155,29.570276\n" + -"12,44.56024,-54.139122,0,Trajectory0,43200,12.0,11.75348,72.36402\n" + -"13,42.48622,-42.518707,1,Trajectory1,0,0.0,20.665886,67.27393\n" + -"14,32.187572,-73.20317,1,Trajectory1,3600,1.0,26.498121,79.754486\n" + -"15,6.4802227,-72.74957,1,Trajectory1,7200,2.0,17.64227,70.126625\n" + -"16,38.596996,-67.64374,1,Trajectory1,10800,3.0,23.615097,59.626125\n" + -"17,24.085066,-63.833694,1,Trajectory1,14400,4.0,30.743101,35.862038\n" + -"18,24.221394,-57.373817,1,Trajectory1,18000,5.0,39.391495,28.661589\n" + -"19,22.637892,-47.858807,1,Trajectory1,21600,6.0,1.2310536,55.708595\n" + +"lat,lon,trajectory_info,trajectory_name,time,z,temperature,humidity\n" + +"16.937433,-35.901237,0,Trajectory0,0,0.0,18.559397,46.487503\n" + +"32.011345,-8.81588,0,Trajectory0,3600,1.0,34.649773,16.22458\n" + +"3.137092,-64.15942,0,Trajectory0,7200,2.0,35.318504,77.41457\n" + +"10.783036,-11.503419,0,Trajectory0,10800,3.0,19.39111,56.601\n" + +"4.6016994,-6.416601,0,Trajectory0,14400,4.0,5.4162874,62.606712\n" + +"25.337688,-69.37197,0,Trajectory0,18000,5.0,2.604784,16.390015\n" + +"30.219189,-71.78619,0,Trajectory0,21600,6.0,22.968603,62.276855\n" + +"5.3421707,-29.245968,0,Trajectory0,25200,7.0,8.609019,14.976101\n" + +"25.687958,-57.089973,0,Trajectory0,28800,8.0,9.202528,79.17113\n" + +"31.82367,-58.56237,0,Trajectory0,32400,9.0,1.5670301,26.49425\n" + +"23.310976,-3.997997,0,Trajectory0,36000,10.0,23.187065,64.34719\n" + +"43.486816,-62.39688,0,Trajectory0,39600,11.0,37.44155,29.570276\n" + +"44.56024,-54.139122,0,Trajectory0,43200,12.0,11.75348,72.36402\n" + +"42.48622,-42.518707,1,Trajectory1,0,0.0,20.665886,67.27393\n" + +"32.187572,-73.20317,1,Trajectory1,3600,1.0,26.498121,79.754486\n" + +"6.4802227,-72.74957,1,Trajectory1,7200,2.0,17.64227,70.126625\n" + +"38.596996,-67.64374,1,Trajectory1,10800,3.0,23.615097,59.626125\n" + +"24.085066,-63.833694,1,Trajectory1,14400,4.0,30.743101,35.862038\n" + +"24.221394,-57.373817,1,Trajectory1,18000,5.0,39.391495,28.661589\n" + +"22.637892,-47.858807,1,Trajectory1,21600,6.0,1.2310536,55.708595\n" + "...\n"; Test.ensureEqual(results, expected, "results=\n" + results); Test.ensureEqual(table.nRows(), 163, ""); @@ -13607,48 +14503,48 @@ public static void testReadNcCFASATrajectory(boolean pauseAfterEach) throws Exce table.readNcCF(fileName, null, null, null, null); - results = table.dataToCSVString(55); + results = table.dataToString(55); expected = //rows that a human thinks should be rejected are kept //probably because trajectory_info and trajectory_name aren't missing values // AND because lat,lon have missing values but there is no missing_value attribute. -"row,lat,lon,trajectory_info,trajectory_name,time,z,temperature,humidity\n" + -"0,2.152863,-35.078842,0,Trajectory0,0,0.0,13.466983,65.38418\n" + -"1,33.60481,-44.12696,0,Trajectory0,3600,1.0,23.050304,7.0401154\n" + -"2,22.562508,-18.115444,0,Trajectory0,7200,2.0,15.112072,45.15019\n" + -"3,13.432817,-21.772585,0,Trajectory0,10800,3.0,22.97767,12.618799\n" + -"4,43.011986,-28.655304,0,Trajectory0,14400,4.0,21.318092,4.788235\n" + -"5,18.84832,-25.418892,0,Trajectory0,18000,5.0,27.496708,66.337166\n" + -"6,18.040411,-30.469133,0,Trajectory0,21600,6.0,30.678926,31.57974\n" + -"7,32.34516,-75.79432,0,Trajectory0,25200,7.0,12.096431,11.228316\n" + -"8,8.652234,-69.01581,0,Trajectory0,28800,8.0,12.523737,47.003998\n" + -"9,18.905367,-18.362652,0,Trajectory0,32400,9.0,22.805552,8.789174\n" + -"10,12.184539,-42.194824,0,Trajectory0,36000,10.0,17.411797,40.25377\n" + -"11,16.498188,-74.44906,0,Trajectory0,39600,11.0,27.783548,20.712833\n" + -"12,1.5479256,-53.522717,0,Trajectory0,43200,12.0,11.809888,5.6157913\n" + -"13,22.033587,-28.557417,0,Trajectory0,46800,13.0,13.730549,2.8293543\n" + -"14,5.997217,-35.043163,0,Trajectory0,50400,14.0,6.549969,30.482803\n" + -"15,8.580469,-45.364418,0,Trajectory0,54000,15.0,11.789269,2.303839\n" + -"16,6.253441,-9.302229,0,Trajectory0,57600,16.0,24.03656,56.802467\n" + -"17,12.948677,-20.07699,0,Trajectory0,61200,17.0,17.980707,66.24162\n" + -"18,41.49208,-19.628315,0,Trajectory0,64800,18.0,0.44739303,25.76894\n" + -"19,25.784758,-65.65333,0,Trajectory0,68400,19.0,13.147206,1.4286463\n" + -"20,25.884523,-64.92309,0,Trajectory0,72000,20.0,21.278152,72.43937\n" + -"21,7.5993505,-33.58001,0,Trajectory0,75600,21.0,14.465093,74.04942\n" + -"22,23.801714,-8.210893,0,Trajectory0,79200,22.0,17.250273,43.468597\n" + -"23,24.086273,-16.376455,0,Trajectory0,82800,23.0,36.73325,56.15435\n" + -"24,8.838917,-65.32871,0,Trajectory0,86400,24.0,21.714993,32.324383\n" + -"25,3.049409,-50.187355,0,Trajectory0,90000,25.0,17.755543,7.7604437\n" + -"26,32.699135,-13.603052,0,Trajectory0,93600,26.0,21.764454,68.36558\n" + -"27,28.82149,-4.238066,0,Trajectory0,97200,27.0,4.18221,75.262665\n" + -"28,4.573595,-15.691054,0,Trajectory0,100800,28.0,36.230297,74.156654\n" + -"29,30.231867,-29.110548,0,Trajectory0,104400,29.0,10.372004,8.0368805\n" + -"30,26.295082,-24.224209,0,Trajectory0,108000,30.0,7.0729938,31.468176\n" + -"31,26.146648,-35.461746,0,Trajectory0,111600,31.0,12.3075285,71.35397\n" + -"32,18.875525,-11.409157,0,Trajectory0,115200,32.0,30.241188,45.14291\n" + -"33,44.57873,-29.37942,0,Trajectory0,118800,33.0,21.847982,61.776512\n" + -"34,40.911667,-31.65526,0,Trajectory0,122400,34.0,30.369759,29.810774\n" + -"35,9.5415745,-57.1067,0,Trajectory0,126000,35.0,15.864324,33.90924\n" + +"lat,lon,trajectory_info,trajectory_name,time,z,temperature,humidity\n" + +"2.152863,-35.078842,0,Trajectory0,0,0.0,13.466983,65.38418\n" + +"33.60481,-44.12696,0,Trajectory0,3600,1.0,23.050304,7.0401154\n" + +"22.562508,-18.115444,0,Trajectory0,7200,2.0,15.112072,45.15019\n" + +"13.432817,-21.772585,0,Trajectory0,10800,3.0,22.97767,12.618799\n" + +"43.011986,-28.655304,0,Trajectory0,14400,4.0,21.318092,4.788235\n" + +"18.84832,-25.418892,0,Trajectory0,18000,5.0,27.496708,66.337166\n" + +"18.040411,-30.469133,0,Trajectory0,21600,6.0,30.678926,31.57974\n" + +"32.34516,-75.79432,0,Trajectory0,25200,7.0,12.096431,11.228316\n" + +"8.652234,-69.01581,0,Trajectory0,28800,8.0,12.523737,47.003998\n" + +"18.905367,-18.362652,0,Trajectory0,32400,9.0,22.805552,8.789174\n" + +"12.184539,-42.194824,0,Trajectory0,36000,10.0,17.411797,40.25377\n" + +"16.498188,-74.44906,0,Trajectory0,39600,11.0,27.783548,20.712833\n" + +"1.5479256,-53.522717,0,Trajectory0,43200,12.0,11.809888,5.6157913\n" + +"22.033587,-28.557417,0,Trajectory0,46800,13.0,13.730549,2.8293543\n" + +"5.997217,-35.043163,0,Trajectory0,50400,14.0,6.549969,30.482803\n" + +"8.580469,-45.364418,0,Trajectory0,54000,15.0,11.789269,2.303839\n" + +"6.253441,-9.302229,0,Trajectory0,57600,16.0,24.03656,56.802467\n" + +"12.948677,-20.07699,0,Trajectory0,61200,17.0,17.980707,66.24162\n" + +"41.49208,-19.628315,0,Trajectory0,64800,18.0,0.44739303,25.76894\n" + +"25.784758,-65.65333,0,Trajectory0,68400,19.0,13.147206,1.4286463\n" + +"25.884523,-64.92309,0,Trajectory0,72000,20.0,21.278152,72.43937\n" + +"7.5993505,-33.58001,0,Trajectory0,75600,21.0,14.465093,74.04942\n" + +"23.801714,-8.210893,0,Trajectory0,79200,22.0,17.250273,43.468597\n" + +"24.086273,-16.376455,0,Trajectory0,82800,23.0,36.73325,56.15435\n" + +"8.838917,-65.32871,0,Trajectory0,86400,24.0,21.714993,32.324383\n" + +"3.049409,-50.187355,0,Trajectory0,90000,25.0,17.755543,7.7604437\n" + +"32.699135,-13.603052,0,Trajectory0,93600,26.0,21.764454,68.36558\n" + +"28.82149,-4.238066,0,Trajectory0,97200,27.0,4.18221,75.262665\n" + +"4.573595,-15.691054,0,Trajectory0,100800,28.0,36.230297,74.156654\n" + +"30.231867,-29.110548,0,Trajectory0,104400,29.0,10.372004,8.0368805\n" + +"26.295082,-24.224209,0,Trajectory0,108000,30.0,7.0729938,31.468176\n" + +"26.146648,-35.461746,0,Trajectory0,111600,31.0,12.3075285,71.35397\n" + +"18.875525,-11.409157,0,Trajectory0,115200,32.0,30.241188,45.14291\n" + +"44.57873,-29.37942,0,Trajectory0,118800,33.0,21.847982,61.776512\n" + +"40.911667,-31.65526,0,Trajectory0,122400,34.0,30.369759,29.810774\n" + +"9.5415745,-57.1067,0,Trajectory0,126000,35.0,15.864324,33.90924\n" + //these aren't rejected because //* trajectory_info has the attribute missing_value=-999, // but the data above has trajectory_info=0, which isn't a missing value. @@ -13716,14 +14612,14 @@ public static void testReadNcCFASATrajectory(boolean pauseAfterEach) throws Exce " " + fileName); String2.log(NcHelper.dumpString(fileName, true)); table.readNcCF(fileName, null, null, null, null); - results = table.dataToCSVString(5); + results = table.dataToString(5); expected = -"row,lat,lon,trajectory_info,trajectory_name,time,z,temperature,humidity\n" + -"0,42.003387,-7.9335957,0,Trajectory1,0,0.0,12.522581,35.668747\n" + -"1,8.972063,-46.335754,0,Trajectory1,3600,1.0,25.658121,1.0647067\n" + -"2,25.841967,-49.1959,0,Trajectory1,7200,2.0,35.43442,13.059927\n" + -"3,35.699753,-40.790943,0,Trajectory1,10800,3.0,35.752117,48.576355\n" + -"4,11.132234,-25.553247,0,Trajectory1,14400,4.0,6.082586,64.91749\n" + +"lat,lon,trajectory_info,trajectory_name,time,z,temperature,humidity\n" + +"42.003387,-7.9335957,0,Trajectory1,0,0.0,12.522581,35.668747\n" + +"8.972063,-46.335754,0,Trajectory1,3600,1.0,25.658121,1.0647067\n" + +"25.841967,-49.1959,0,Trajectory1,7200,2.0,35.43442,13.059927\n" + +"35.699753,-40.790943,0,Trajectory1,10800,3.0,35.752117,48.576355\n" + +"11.132234,-25.553247,0,Trajectory1,14400,4.0,6.082586,64.91749\n" + "...\n"; Test.ensureEqual(results, expected, "results=\n" + results); Test.ensureEqual(table.nRows(), 100, ""); @@ -13922,14 +14818,14 @@ public static void testReadNcCFASATrajectory(boolean pauseAfterEach) throws Exce " " + fileName); String2.log(NcHelper.dumpString(fileName, true)); table.readNcCF(fileName, null, null, null, null); - results = table.dataToCSVString(5); + results = table.dataToString(5); expected = -"row,lat,lon,trajectory_info,trajectory_name,time,z,temperature,humidity\n" + -"0,11.256147,-5.989336,8,Trajectory8,72000,12.518082,14.902713,37.237553\n" + -"1,26.104128,-2.6626983,3,Trajectory3,111600,7.372036,24.243849,12.862466\n" + -"2,22.414213,-23.53803,4,Trajectory4,68400,5.7999315,1.4940661,20.668322\n" + -"3,22.181162,-34.355854,4,Trajectory4,122400,20.127024,6.8310843,55.93755\n" + -"4,2.177301,-58.388607,5,Trajectory5,162000,1.764841,27.893003,28.2276\n" + +"lat,lon,trajectory_info,trajectory_name,time,z,temperature,humidity\n" + +"11.256147,-5.989336,8,Trajectory8,72000,12.518082,14.902713,37.237553\n" + +"26.104128,-2.6626983,3,Trajectory3,111600,7.372036,24.243849,12.862466\n" + +"22.414213,-23.53803,4,Trajectory4,68400,5.7999315,1.4940661,20.668322\n" + +"22.181162,-34.355854,4,Trajectory4,122400,20.127024,6.8310843,55.93755\n" + +"2.177301,-58.388607,5,Trajectory5,162000,1.764841,27.893003,28.2276\n" + "...\n"; Test.ensureEqual(results, expected, "results=\n" + results); Test.ensureEqual(table.nRows(), 213, ""); @@ -14156,14 +15052,14 @@ public static void testReadNcCFASATimeSeriesProfile(boolean pauseAfterEach) thro " " + fileName); String2.log(NcHelper.dumpString(fileName, false)); table.readNcCF(fileName, null, null, null, null); - results = table.dataToCSVString(5); + results = table.dataToString(5); expected = -"row,lat,lon,station_info,station_name,alt,time,temperature\n" + -"0,37.5,-76.5,0,Station1,0.0,0,0.0\n" + -"1,37.5,-76.5,0,Station1,2.5,0,0.1\n" + -"2,37.5,-76.5,0,Station1,5.0,0,0.2\n" + -"3,37.5,-76.5,0,Station1,7.5,0,0.3\n" + -"4,37.5,-76.5,0,Station1,10.0,0,0.4\n" + +"lat,lon,station_info,station_name,alt,time,temperature\n" + +"37.5,-76.5,0,Station1,0.0,0,0.0\n" + +"37.5,-76.5,0,Station1,2.5,0,0.1\n" + +"37.5,-76.5,0,Station1,5.0,0,0.2\n" + +"37.5,-76.5,0,Station1,7.5,0,0.3\n" + +"37.5,-76.5,0,Station1,10.0,0,0.4\n" + "...\n"; Test.ensureEqual(results, expected, "results=\n" + results); Test.ensureEqual(table.nRows(), 240, ""); @@ -14378,7 +15274,7 @@ public static void testReadNcCFASATimeSeriesProfile(boolean pauseAfterEach) thro " " + fileName); String2.log(NcHelper.dumpString(fileName, false)); table.readNcCF(fileName, null, null, null, null); - results = table.dataToCSVString(); + results = table.dataToString(); expected = "lat,lon,station_info,station_name,alt,time,temperature\n" + "37.5,-76.5,0,Station1,0.0,0,0.0\n" + @@ -14613,21 +15509,21 @@ public static void testReadNcCFASATimeSeriesProfile(boolean pauseAfterEach) thro String2.log("\n\n** Testing incomplete file\n" + " " + fileName); String2.log(NcHelper.dumpString(fileName, false)); table.readNcCF(fileName, null, null, null, null); - results = table.dataToCSVString(12); + results = table.dataToString(12); expected = -"row,lat,lon,alt,station_info,station_name,time,temperature,humidity\n" + -"0,37.5,-76.5,0.0,0,Station1,0,15.698009,89.70879\n" + -"1,32.5,-78.3,0.0,1,Station2,0,8.11997,33.637585\n" + -"2,37.5,-76.5,10.0,0,Station1,0,10.9166565,55.78947\n" + -"3,32.5,-78.3,10.0,1,Station2,0,39.356647,65.43795\n" + -"4,37.5,-76.5,20.0,0,Station1,0,15.666663,50.176994\n" + -"5,32.5,-78.3,20.0,1,Station2,0,33.733116,58.14976\n" + -"6,37.5,-76.5,30.0,0,Station1,0,1.1587523,36.855045\n" + -"7,32.5,-78.3,30.0,1,Station2,0,4.65479,63.862186\n" + -"8,37.5,-76.5,0.0,0,Station1,3600,31.059647,65.01694\n" + -"9,32.5,-78.3,0.0,1,Station2,3600,33.374344,22.771135\n" + -"10,37.5,-76.5,10.0,0,Station1,3600,5.680936,35.675472\n" + -"11,32.5,-78.3,10.0,1,Station2,3600,17.763374,38.54674\n" + +"lat,lon,alt,station_info,station_name,time,temperature,humidity\n" + +"37.5,-76.5,0.0,0,Station1,0,15.698009,89.70879\n" + +"32.5,-78.3,0.0,1,Station2,0,8.11997,33.637585\n" + +"37.5,-76.5,10.0,0,Station1,0,10.9166565,55.78947\n" + +"32.5,-78.3,10.0,1,Station2,0,39.356647,65.43795\n" + +"37.5,-76.5,20.0,0,Station1,0,15.666663,50.176994\n" + +"32.5,-78.3,20.0,1,Station2,0,33.733116,58.14976\n" + +"37.5,-76.5,30.0,0,Station1,0,1.1587523,36.855045\n" + +"32.5,-78.3,30.0,1,Station2,0,4.65479,63.862186\n" + +"37.5,-76.5,0.0,0,Station1,3600,31.059647,65.01694\n" + +"32.5,-78.3,0.0,1,Station2,3600,33.374344,22.771135\n" + +"37.5,-76.5,10.0,0,Station1,3600,5.680936,35.675472\n" + +"32.5,-78.3,10.0,1,Station2,3600,17.763374,38.54674\n" + "...\n"; Test.ensureEqual(results, expected, "results=\n" + results); Test.ensureEqual(table.nRows(), 800, ""); @@ -14841,21 +15737,21 @@ public static void testReadNcCFASATimeSeriesProfile(boolean pauseAfterEach) thro StringArray.fromCSV("station_name"), StringArray.fromCSV("="), StringArray.fromCSV("Station1")); - results = table.dataToCSVString(12); + results = table.dataToString(12); expected = -"row,lat,lon,alt,station_info,station_name,time,temperature,humidity\n" + -"0,37.5,-76.5,0.0,0,Station1,0,15.698009,89.70879\n" + -"1,37.5,-76.5,10.0,0,Station1,0,10.9166565,55.78947\n" + -"2,37.5,-76.5,20.0,0,Station1,0,15.666663,50.176994\n" + -"3,37.5,-76.5,30.0,0,Station1,0,1.1587523,36.855045\n" + -"4,37.5,-76.5,0.0,0,Station1,3600,31.059647,65.01694\n" + -"5,37.5,-76.5,10.0,0,Station1,3600,5.680936,35.675472\n" + -"6,37.5,-76.5,20.0,0,Station1,3600,24.156359,45.77856\n" + -"7,37.5,-76.5,30.0,0,Station1,3600,25.934822,35.178967\n" + -"8,37.5,-76.5,0.0,0,Station1,7200,6.518481,12.735875\n" + -"9,37.5,-76.5,10.0,0,Station1,7200,4.463567,47.44697\n" + -"10,37.5,-76.5,20.0,0,Station1,7200,29.448772,20.438272\n" + -"11,37.5,-76.5,30.0,0,Station1,7200,37.245636,62.655357\n" + +"lat,lon,alt,station_info,station_name,time,temperature,humidity\n" + +"37.5,-76.5,0.0,0,Station1,0,15.698009,89.70879\n" + +"37.5,-76.5,10.0,0,Station1,0,10.9166565,55.78947\n" + +"37.5,-76.5,20.0,0,Station1,0,15.666663,50.176994\n" + +"37.5,-76.5,30.0,0,Station1,0,1.1587523,36.855045\n" + +"37.5,-76.5,0.0,0,Station1,3600,31.059647,65.01694\n" + +"37.5,-76.5,10.0,0,Station1,3600,5.680936,35.675472\n" + +"37.5,-76.5,20.0,0,Station1,3600,24.156359,45.77856\n" + +"37.5,-76.5,30.0,0,Station1,3600,25.934822,35.178967\n" + +"37.5,-76.5,0.0,0,Station1,7200,6.518481,12.735875\n" + +"37.5,-76.5,10.0,0,Station1,7200,4.463567,47.44697\n" + +"37.5,-76.5,20.0,0,Station1,7200,29.448772,20.438272\n" + +"37.5,-76.5,30.0,0,Station1,7200,37.245636,62.655357\n" + "...\n"; Test.ensureEqual(results, expected, "results=\n" + results); Test.ensureEqual(table.nRows(), 400, ""); @@ -15068,7 +15964,7 @@ public static void testReadNcCFASATimeSeriesProfile(boolean pauseAfterEach) thro StringArray.fromCSV("station_name"), StringArray.fromCSV("="), StringArray.fromCSV("Station1")); - results = table.dataToCSVString(); + results = table.dataToString(); expected = "lat,lon,station_name\n" + "37.5,-76.5,Station1\n"; @@ -15229,7 +16125,7 @@ public static void testReadNcCFASATimeSeriesProfile(boolean pauseAfterEach) thro " " + fileName); String2.log(NcHelper.dumpString(fileName, false)); table.readNcCF(fileName, null, null, null, null); - results = table.dataToCSVString(); + results = table.dataToString(); expected = //file has: //lat, lon, station_info, station_name, profile, time, station_index, row_size, height, temperature @@ -15459,7 +16355,7 @@ public static void testReadNcCFASATimeSeriesProfile(boolean pauseAfterEach) thro " " + fileName); String2.log(NcHelper.dumpString(fileName, true)); table.readNcCF(fileName, null, null, null, null); - results = table.dataToCSVString(); + results = table.dataToString(); expected = "lat,lon,station_info,station_name,profile,time,height,temperature\n" + "37.5,-76.5,0,Station1,0,0,0.5,6.7\n" + @@ -15681,7 +16577,7 @@ public static void testReadNcCFASATimeSeriesProfile(boolean pauseAfterEach) thro table.readNcCF(raggedSingleStationFileName, StringArray.fromCSV("zztop,lat,lon,station_info,station_name"), null, null, null); - results = table.dataToCSVString(); + results = table.dataToString(); expected = "lat,lon,station_info,station_name\n" + "37.5,-76.5,0,Station1\n"; @@ -15702,7 +16598,7 @@ public static void testReadNcCFASATimeSeriesProfile(boolean pauseAfterEach) thro table.readNcCF(raggedSingleStationFileName, StringArray.fromCSV("time,zztop,profile"), null, null, null); - results = table.dataToCSVString(); + results = table.dataToString(); expected = "time,profile\n" + "0,0\n" + @@ -15727,7 +16623,7 @@ public static void testReadNcCFASATimeSeriesProfile(boolean pauseAfterEach) thro table.readNcCF(raggedSingleStationFileName, StringArray.fromCSV("station_info,station_name,lon,lat,time,profile,"), null, null, null); - results = table.dataToCSVString(); + results = table.dataToString(); expected = "station_info,station_name,lon,lat,time,profile\n" + "0,Station1,-76.5,37.5,0,0\n" + @@ -15751,7 +16647,7 @@ public static void testReadNcCFASATimeSeriesProfile(boolean pauseAfterEach) thro StringArray.fromCSV("temperature"), StringArray.fromCSV(">="), StringArray.fromCSV("8")); - results = table.dataToCSVString(); + results = table.dataToString(); expected = "station_info,temperature,station_name\n" + "0,8.4,Station1\n" + @@ -15774,7 +16670,7 @@ public static void testReadNcCFASATimeSeriesProfile(boolean pauseAfterEach) thro StringArray.fromCSV("temperature"), StringArray.fromCSV(">="), StringArray.fromCSV("8")); - results = table.dataToCSVString(); + results = table.dataToString(); expected = "temperature,time\n" + "8.4,7200\n" + @@ -15967,7 +16863,7 @@ public static void testReadNcCFASATrajectoryProfile(boolean pauseAfterEach) thro " " + fileName); String2.log(NcHelper.dumpString(fileName, false)); table.readNcCF(fileName, null, null, null, null); - results = table.dataToCSVString(); + results = table.dataToString(); expected = "lat,lon,trajectory,alt,time,temperature,salinity\n" + "4.9986253,-35.718536,0,0.0,0,3.2502668,79.006065\n" + @@ -16206,23 +17102,23 @@ public static void testReadNcCFASATrajectoryProfile(boolean pauseAfterEach) thro " " + fileName); String2.log(NcHelper.dumpString(fileName, true)); table.readNcCF(fileName, null, null, null, null); - results = table.dataToCSVString(14); + results = table.dataToString(14); expected = -"row,lat,lon,trajectory,alt,time,temperature,salinity\n" + -"0,18.736742,-28.520071,0,0.0,0,8.067447,62.835354\n" + -"1,18.736742,-28.520071,0,1.0,0,15.871663,27.454027\n" + -"2,18.736742,-28.520071,0,2.0,0,32.306496,61.80094\n" + -"3,18.736742,-28.520071,0,3.0,0,27.631369,74.84051\n" + -"4,18.736742,-28.520071,0,4.0,0,22.757963,73.378914\n" + -"5,39.43245,-57.711514,1,0.0,0,36.39878,77.23479\n" + -"6,39.43245,-57.711514,1,1.0,0,14.957566,7.621207\n" + -"7,39.43245,-57.711514,1,2.0,0,5.405648,56.557266\n" + -"8,39.43245,-57.711514,1,3.0,0,4.9267964,75.427795\n" + -"9,39.43245,-57.711514,1,4.0,0,7.806849,42.65483\n" + -"10,39.43245,-57.711514,1,5.0,0,28.784224,13.940006\n" + -"11,39.43245,-57.711514,1,6.0,0,19.139135,53.46242\n" + -"12,25.034857,-62.39183,1,0.0,3600,24.302265,62.551056\n" + -"13,25.034857,-62.39183,1,1.0,3600,11.195762,4.3670874\n" + +"lat,lon,trajectory,alt,time,temperature,salinity\n" + +"18.736742,-28.520071,0,0.0,0,8.067447,62.835354\n" + +"18.736742,-28.520071,0,1.0,0,15.871663,27.454027\n" + +"18.736742,-28.520071,0,2.0,0,32.306496,61.80094\n" + +"18.736742,-28.520071,0,3.0,0,27.631369,74.84051\n" + +"18.736742,-28.520071,0,4.0,0,22.757963,73.378914\n" + +"39.43245,-57.711514,1,0.0,0,36.39878,77.23479\n" + +"39.43245,-57.711514,1,1.0,0,14.957566,7.621207\n" + +"39.43245,-57.711514,1,2.0,0,5.405648,56.557266\n" + +"39.43245,-57.711514,1,3.0,0,4.9267964,75.427795\n" + +"39.43245,-57.711514,1,4.0,0,7.806849,42.65483\n" + +"39.43245,-57.711514,1,5.0,0,28.784224,13.940006\n" + +"39.43245,-57.711514,1,6.0,0,19.139135,53.46242\n" + +"25.034857,-62.39183,1,0.0,3600,24.302265,62.551056\n" + +"25.034857,-62.39183,1,1.0,3600,11.195762,4.3670874\n" + "...\n"; Test.ensureEqual(results, expected, "results=\n" + results); Test.ensureEqual(table.nRows(), 65, ""); @@ -16437,7 +17333,7 @@ public static void testReadNcCFASATrajectoryProfile(boolean pauseAfterEach) thro StringArray.fromCSV("trajectory"), StringArray.fromCSV("="), StringArray.fromCSV("1")); - results = table.dataToCSVString(); + results = table.dataToString(); expected = "lat,lon,trajectory,alt,time,temperature,salinity\n" + "39.43245,-57.711514,1,0.0,0,36.39878,77.23479\n" + @@ -16487,7 +17383,7 @@ public static void testReadNcCFASATrajectoryProfile(boolean pauseAfterEach) thro StringArray.fromCSV("trajectory"), StringArray.fromCSV("="), StringArray.fromCSV("2")); - results = table.dataToCSVString(); + results = table.dataToString(); expected = "lat,lon,trajectory,time\n" + "22.20038,-74.5625,2,0\n" + @@ -16651,7 +17547,7 @@ public static void testReadNcCFASATrajectoryProfile(boolean pauseAfterEach) thro " " + fileName); String2.log(NcHelper.dumpString(fileName, true)); table.readNcCF(fileName, null, null, null, null); - results = table.dataToCSVString(); + results = table.dataToString(); expected = "trajectory,lat,lon,time,z,temperature,humidity\n" + "0,49.0,-60.0,176400,0.0,39.174652,78.30777\n" + @@ -16887,14 +17783,14 @@ public static void testReadNcCFASATrajectoryProfile(boolean pauseAfterEach) thro table.readNcCF(raggedMultipleStationFileName, StringArray.fromCSV("zztop,trajectory"), null, null, null); - results = table.dataToCSVString(20); + results = table.dataToString(20); expected = -"row,trajectory\n" + -"0,0\n" + -"1,1\n" + -"2,2\n" + -"3,3\n" + -"4,4\n"; +"trajectory\n" + +"0\n" + +"1\n" + +"2\n" + +"3\n" + +"4\n"; Test.ensureEqual(results, expected, "results=\n" + results); Test.ensureEqual(table.nRows(), 5, ""); results = table.columnAttributes(0).toString(); @@ -16910,7 +17806,7 @@ public static void testReadNcCFASATrajectoryProfile(boolean pauseAfterEach) thro table.readNcCF(raggedMultipleStationFileName, StringArray.fromCSV("time,zztop,trajectory"), null, null, null); - results = table.dataToCSVString(); + results = table.dataToString(); expected = "time,trajectory\n" + "176400,0\n" + @@ -16951,7 +17847,7 @@ public static void testReadNcCFASATrajectoryProfile(boolean pauseAfterEach) thro table.readNcCF(raggedMultipleStationFileName, StringArray.fromCSV("lon,lat,time,trajectory,"), null, null, null); - results = table.dataToCSVString(); + results = table.dataToString(); expected = "lon,lat,time,trajectory\n" + "-60.0,49.0,176400,0\n" + @@ -16993,7 +17889,7 @@ public static void testReadNcCFASATrajectoryProfile(boolean pauseAfterEach) thro StringArray.fromCSV("temperature"), StringArray.fromCSV(">="), StringArray.fromCSV("39")); - results = table.dataToCSVString(); + results = table.dataToString(); expected = "trajectory,temperature\n" + "0,39.174652\n" + @@ -17034,7 +17930,7 @@ public static void testReadNcCFASATrajectoryProfile(boolean pauseAfterEach) thro StringArray.fromCSV("temperature"), StringArray.fromCSV(">="), StringArray.fromCSV("39")); - results = table.dataToCSVString(); + results = table.dataToString(); expected = "temperature,time\n" + "39.174652,176400\n" + @@ -17419,7 +18315,7 @@ public void convertToFakeMissingValues() { * and the standard missing value (e.g., Xxx.MAX_VALUE) for other PrimitiveArrays. * This works on the current (possibly packed) data. So call this when the * data is packed. - * If the column is a String column, nothing will be done. + * If the column is a StringArray or CharArray column, nothing will be done. * *

!!!This is used inside the saveAsXxx methods to temporarily * convert to fake missing values. It is rarely called elsewhere. @@ -17429,7 +18325,8 @@ public void convertToFakeMissingValues() { public void convertToFakeMissingValues(int column) { //String2.log("Table.convertToFakeMissingValues column=" + column); PrimitiveArray pa = getColumn(column); - if (pa instanceof StringArray) + if (pa.elementClass() == String.class || + pa.elementClass() == char.class) return; //boolean removeMVF = false; //commented out 2010-10-26 so NDBC files have consistent _FillValue if (pa instanceof ByteArray) { @@ -17700,7 +18597,7 @@ public BitSet join(int nKeys, int keyCol, String mvKey, Table lookUpTable) { /** * This updates the data in this table with better data from otherTable * by matching rows based on the values in key columns which are in both tables - * (like a batch version of SQL's UPDATE http://www.w3schools.com/sql/sql_update.asp). + * (like a batch version of SQL's UPDATE https://www.w3schools.com/sql/sql_update.asp). * Afterwards, this table will have rows for *all* of the values of the key columns * from both tables. This is very fast and efficient, but may need lots of memory. * @@ -17847,13 +18744,10 @@ public int update(String keyNames[], Table otherTable) { * *

netcdf files use the ucar.nc2.dods classes are read with code in * netcdf-X.X.XX.jar which is part of the - * NetCDF Java Library * renamed as netcdf-latest.jar. - * Get slf4j-jdk14.jar from - * ftp://ftp.unidata.ucar.edu/pub/netcdf-java/slf4j-jdk14.jar - * and copy it to /WEB-INF/lib. - * Put both of these .jar files in the classpath for the compiler and for Java. + * Put it in the classpath for the compiler and for Java. * *

This sets globalAttributes and columnAttributes. * @@ -18042,7 +18936,9 @@ public void readOpendap(String fullName, String loadColumns[]) throws Exception /** - * For compatibility with older programs, this calls readOpendapSequence(url, false); + * For compatibility with older programs, this calls readOpendapSequence(url, false). + * 2016-12-07: With versions of Tomcat somewhere after 8.0, the url must be stongly percent-encoded. + * * @param url the url, already SSR.percentEncoded as needed */ public void readOpendapSequence(String url) throws Exception { @@ -18051,8 +18947,9 @@ public void readOpendapSequence(String url) throws Exception { /** * This populates the table from an opendap one level or two-level (Dapper-style) sequence response. - * See Opendap info: http://www.opendap.org/pdf/dap_2_data_model.pdf . + * See Opendap info: https://www.opendap.org/pdf/dap_2_data_model.pdf . * See Dapper Conventions: http://www.epic.noaa.gov/epic/software/dapper/dapperdocs/conventions/ . + * 2016-12-07: With versions of Tomcat somewhere after 8.0, the url must be stongly percent-encoded. * *

A typical dapper-style two-level nested sequence is: *

@@ -18679,7 +19576,10 @@ public static String[] getDapQueryParts(String dapQuery) throws Exception {
         for (int po = 0; po < dapQueryLength; po++) {
             char ch = dapQuery.charAt(po);
             //String2.log("ch=" + ch);
-            if (ch == '"') {             //what about \" within "..."?
+            if (ch == '\\') {     //next char is \\ escaped
+                if (po < dapQueryLength)
+                    po++;
+            } else if (ch == '"') {      
                 inQuotes = !inQuotes;
             } else if (ch == '&' && !inQuotes) {
                 String part = dapQuery.substring(start, po);
@@ -18760,7 +19660,8 @@ public static void testGetDapQueryParts() throws Exception {
      *      but in percent encoded form. 
      *      (see http://docs.opendap.org/index.php/UserGuideOPeNDAPMessages#Selecting_Data:_Using_Constraint_Expressions).
      *    
If an &-separated part is "distinct()", "orderBy("...")", - * "orderByMax("...")", "orderByMin("...")", "orderByMinMax("...")", "units("...")", + * "orderByMax("...")", "orderByMin("...")", "orderByMinMax("...")", + * "orderByClosest("...")", "orderByLimit("...")", "units("...")", * it is ignored. *
If an &-separated part starts with ".", it is ignored. * It can't be a variable name. @@ -18868,6 +19769,8 @@ public void parseDapQuery(String dapQuery, if (constraint.equals("distinct()") || (constraint.endsWith("\")") && (constraint.startsWith("orderBy(\"") || + constraint.startsWith("orderByClosest(\"") || + constraint.startsWith("orderByLimit(\"") || constraint.startsWith("orderByMax(\"") || constraint.startsWith("orderByMin(\"") || constraint.startsWith("orderByMinMax(\"") || @@ -19382,8 +20285,8 @@ public int subsetViaDapQuery(String dapQuery) throws Exception { int idCol = findColumnNumber(conName); int nRemain = lowApplyConstraint(true, idCol, conName, conOp, conVal, keep); - if (debugMode) - String2.log(">> nRemain=" + nRemain + " after " + conName + conOp + conVal); + if (debugMode) String2.log(">> nRemain=" + nRemain + + " after " + conName + conOp + conVal); if (nRemain == 0) { removeAllRows(); keep = null; @@ -19410,6 +20313,10 @@ public int subsetViaDapQuery(String dapQuery) throws Exception { removeDuplicates(); } else if (part.startsWith("orderBy(\"") && part.endsWith("\")")) { ascendingSort(StringArray.arrayFromCSV(part.substring(9, partL-2))); + } else if (part.equals("orderByClosest(\"") && part.endsWith("\")")) { + orderByClosest(part.substring(16, partL-2)); + } else if (part.equals("orderByLimit(\"") && part.endsWith("\")")) { + orderByClosest(part.substring(14, partL-2)); } else if (part.equals("orderByMin(\"") && part.endsWith("\")")) { orderByMin(StringArray.arrayFromCSV(part.substring(12, partL-2))); } else if (part.equals("orderByMax(\"") && part.endsWith("\")")) { @@ -19433,49 +20340,49 @@ public static void testSubsetViaDapQuery() throws Exception { table = getTestTable(false, true); //includeLongs, Strings table.subsetViaDapQuery(""); - results = table.dataToCSVString(); + results = table.dataToString(); expected = -"Time,Longitude,Latitude,Double Data,Int Data,Short Data,Byte Data,String Data\n" + -"0.0,-3,1.0,-1.0E300,-2000000000,-32000,-120,a\n" + -"1.125504062E9,-2,1.5,3.123,2,7,8,bb\n" + -"1.130954649E9,-1,2.0,1.0E300,2000000000,32000,120,ccc\n" + -",,,,,,,\n"; +"Time,Longitude,Latitude,Double Data,Int Data,Short Data,Byte Data,Char Data,String Data\n" + +"0.0,-3,1.0,-1.0E300,-2000000000,-32000,-120,\",\",a\n" + +"1.125504062E9,-2,1.5,3.123,2,7,8,\"\"\"\",bb\n" + +"1.130954649E9,-1,2.0,1.0E300,2000000000,32000,120,\\u20ac,ccc\n" + +",,,,,,,,\n"; Test.ensureEqual(results, expected, "results=\n" + results); // !=NaN table = getTestTable(false, true); //includeLongs, Strings table.subsetViaDapQuery("&Latitude!=NaN"); - results = table.dataToCSVString(); + results = table.dataToString(); expected = -"Time,Longitude,Latitude,Double Data,Int Data,Short Data,Byte Data,String Data\n" + -"0.0,-3,1.0,-1.0E300,-2000000000,-32000,-120,a\n" + -"1.125504062E9,-2,1.5,3.123,2,7,8,bb\n" + -"1.130954649E9,-1,2.0,1.0E300,2000000000,32000,120,ccc\n"; +"Time,Longitude,Latitude,Double Data,Int Data,Short Data,Byte Data,Char Data,String Data\n" + +"0.0,-3,1.0,-1.0E300,-2000000000,-32000,-120,\",\",a\n" + +"1.125504062E9,-2,1.5,3.123,2,7,8,\"\"\"\",bb\n" + +"1.130954649E9,-1,2.0,1.0E300,2000000000,32000,120,\\u20ac,ccc\n"; Test.ensureEqual(results, expected, "results=\n" + results); // =NaN table = getTestTable(false, true); //includeLongs, Strings table.subsetViaDapQuery("&Latitude=NaN"); - results = table.dataToCSVString(); + results = table.dataToString(); expected = -"Time,Longitude,Latitude,Double Data,Int Data,Short Data,Byte Data,String Data\n" + -",,,,,,,\n"; +"Time,Longitude,Latitude,Double Data,Int Data,Short Data,Byte Data,Char Data,String Data\n" + +",,,,,,,,\n"; Test.ensureEqual(results, expected, "results=\n" + results); //1125504062 seconds since 1970-01-01T00:00:00Z = 2005-08-31T16:01:02Z table = getTestTable(false, true); //includeLongs, Strings table.subsetViaDapQuery("&Time>2005-08-31T16:01:02Z"); - results = table.dataToCSVString(); + results = table.dataToString(); expected = -"Time,Longitude,Latitude,Double Data,Int Data,Short Data,Byte Data,String Data\n" + -"1.130954649E9,-1,2.0,1.0E300,2000000000,32000,120,ccc\n"; +"Time,Longitude,Latitude,Double Data,Int Data,Short Data,Byte Data,Char Data,String Data\n" + +"1.130954649E9,-1,2.0,1.0E300,2000000000,32000,120,\\u20ac,ccc\n"; //mv row removed because tests related to NaN (except NaN=NaN) return false. Test.ensureEqual(results, expected, "results=\n" + results); //1125504062 seconds since 1970-01-01T00:00:00Z = 2005-08-31T16:01:02Z table = getTestTable(false, true); //includeLongs, Strings table.subsetViaDapQuery("String Data,Time&Time>=2005-08-31T16:01:02Z"); - results = table.dataToCSVString(); + results = table.dataToString(); expected = "String Data,Time\n" + "bb,1.125504062E9\n" + @@ -19486,7 +20393,7 @@ public static void testSubsetViaDapQuery() throws Exception { //constraint var needn't be in resultsVars table = getTestTable(false, true); //includeLongs, Strings table.subsetViaDapQuery("String Data&Time>=2005-08-31T16:01:02Z"); - results = table.dataToCSVString(); + results = table.dataToString(); expected = "String Data\n" + "bb\n" + @@ -19497,7 +20404,7 @@ public static void testSubsetViaDapQuery() throws Exception { //return 0 rows table = getTestTable(false, true); //includeLongs, Strings table.subsetViaDapQuery("Longitude,Time&Time=2005-08-31"); - results = table.dataToCSVString(); + results = table.dataToString(); expected = "Longitude,Time\n"; Test.ensureEqual(results, expected, "results=\n" + results); @@ -19505,7 +20412,7 @@ public static void testSubsetViaDapQuery() throws Exception { //string table = getTestTable(false, true); //includeLongs, Strings table.subsetViaDapQuery("String Data,Time&Time>1970-01-01&String Data=\"bb\""); - results = table.dataToCSVString(); + results = table.dataToString(); expected = "String Data,Time\n" + "bb,1.125504062E9\n"; @@ -19514,7 +20421,7 @@ public static void testSubsetViaDapQuery() throws Exception { //regex table = getTestTable(false, true); //includeLongs, Strings table.subsetViaDapQuery("String Data,Time&String Data=~\"b{1,5}\""); - results = table.dataToCSVString(); + results = table.dataToString(); expected = "String Data,Time\n" + "bb,1.125504062E9\n"; @@ -20190,7 +21097,7 @@ public static void testOrderByMinMax() throws Exception { if (proc == 0) table.orderByMax(vars); if (proc == 1) table.orderByMin(vars); if (proc == 2) table.orderByMinMax(vars); - String results = table.dataToCSVString(); + String results = table.dataToString(); String expected[] = new String[]{ "substation,station,time,other\n" + "10,a,1,99\n" + @@ -20231,7 +21138,7 @@ public static void testOrderByMinMax() throws Exception { if (proc == 0) table.orderByMax(vars); if (proc == 1) table.orderByMin(vars); if (proc == 2) table.orderByMinMax(vars); - results = table.dataToCSVString(); + results = table.dataToString(); expected = new String[]{ "time,other\n" + "3,90\n", @@ -20261,7 +21168,7 @@ public static void testOrderByMinMax() throws Exception { if (proc == 0) table.orderByMax(vars); if (proc == 1) table.orderByMin(vars); if (proc == 2) table.orderByMinMax(vars); - results = table.dataToCSVString(); + results = table.dataToString(); expected = new String[]{ "time,other\n" + "1,99\n", @@ -20285,13 +21192,470 @@ public static void testOrderByMinMax() throws Exception { if (proc == 0) table.orderByMax(vars); if (proc == 1) table.orderByMin(vars); if (proc == 2) table.orderByMinMax(vars); - results = table.dataToCSVString(); + results = table.dataToString(); String expected4 = "time,other\n"; Test.ensureEqual(results, expected4, "results=\n" + results); } } + /** + * This is a higher level orderByClosest that takes the csv string + * with the names of the orderBy columns plus the interval + * (e.g., "10 minutes" becomes 600 seconds). + */ + public void orderByClosest(String orderByCSV) throws Exception { + + if (orderByCSV == null || orderByCSV.trim().length() == 0) + throw new SimpleException(QUERY_ERROR + ORDER_BY_CLOSEST_ERROR + + " (no csv)"); + String csv[] = String2.split(orderByCSV, ','); + if (csv.length < 2) + throw new SimpleException(QUERY_ERROR + ORDER_BY_CLOSEST_ERROR + + " (csv.length<2)"); + + int nKeyCols = csv.length - 1; + int keyCols[] = new int[nKeyCols]; + for (int k = 0; k < nKeyCols; k++) { + keyCols[k] = findColumnNumber(csv[k]); + if (keyCols[k] < 0) + throw new SimpleException(QUERY_ERROR + ORDER_BY_CLOSEST_ERROR + + " (unknown orderBy column=" + csv[k] + ")"); + } + + double numberTimeUnits[] = Calendar2.parseNumberTimeUnits(csv[nKeyCols]); //throws Exception + + orderByClosest(keyCols, numberTimeUnits); + } + + /** + * This is a higher level orderByClosest. + */ + public void orderByClosest(String orderBy[], double numberTimeUnits[]) throws Exception { + + int nKeyCols = orderBy.length; + int keyCols[] = new int[nKeyCols]; + for (int k = 0; k < nKeyCols; k++) { + keyCols[k] = findColumnNumber(orderBy[k]); + if (keyCols[k] < 0) + throw new SimpleException(QUERY_ERROR + ORDER_BY_CLOSEST_ERROR + + " (unknown orderBy column=" + orderBy[k] + ")"); + } + + orderByClosest(keyCols, numberTimeUnits); + } + + /** + * This sorts by keyColumnNames (the last of which must be a timestamp's + * doubles / epoch seconds), and then just keeps rows which are + * closest to the time interval (e.g., 10 minutes). + * Rows with time=NaN are not kept, so this may return 0 rows. + * + * @param keyColumns 1 or more column numbers (0..). + * @param numberTimeUnits e.g., 10 minutes is represented as [numer=10, timeUnits=60] + * timeUnits are from Calendar2.factorToGetSeconds. + * Note that Jan is the 0th month: so 2 months rounds to Jan 1, Mar 1, May 1, .... + * When the last keyColumn isn't a time variable, use TimeUnits=1. + * This handles timeUnits for Month (30*SECONDS_PER_DAY) and + * Year (360*SECONDS_PER_DAY) specially (as calendar months and years). + * @throws Exception if trouble (e.g., a keyColumnName not found) + */ + public void orderByClosest(int keyColumns[], double numberTimeUnits[]) throws Exception { + + //just 0 rows? + int nRows = nRows(); + if (nRows == 0) + return; + + int nKeyColumns = keyColumns.length; + if (nKeyColumns == 0) + throw new SimpleException(QUERY_ERROR + ORDER_BY_CLOSEST_ERROR + + " (orderBy.length=0)"); + PrimitiveArray lastKeyCol = getColumn(keyColumns[nKeyColumns - 1]); + + //ensure lastKeyCol is numeric + if (lastKeyCol instanceof StringArray) + throw new IllegalArgumentException(QUERY_ERROR + ORDER_BY_CLOSEST_ERROR + + " (The last orderBy column=" + getColumnName(keyColumns[nKeyColumns-1]) + + " isn't numeric.)"); + + //just 1 row? + if (nRows == 1) { + if (Double.isNaN(lastKeyCol.getDouble(0))) + removeRow(0); + return; + } + + //interval + if (numberTimeUnits == null || numberTimeUnits.length != 2) + throw new IllegalArgumentException(QUERY_ERROR + ORDER_BY_CLOSEST_ERROR + + " (numberTimeUnits.length must be 2)"); + if (!Math2.isFinite(numberTimeUnits[0]) || + !Math2.isFinite(numberTimeUnits[1])) + throw new IllegalArgumentException(QUERY_ERROR + ORDER_BY_CLOSEST_ERROR + + " (numberTimeUnits values can't be NaNs)"); + if (numberTimeUnits[0] <= 0 || numberTimeUnits[1] <= 0) + throw new IllegalArgumentException(QUERY_ERROR + ORDER_BY_CLOSEST_ERROR + + " (numberTimeUnits values must be positive numbers)"); + double simpleInterval = numberTimeUnits[0] * numberTimeUnits[1]; + int field = + numberTimeUnits[1] == 30 * Calendar2.SECONDS_PER_DAY? Calendar2.MONTH : + numberTimeUnits[1] == 360 * Calendar2.SECONDS_PER_DAY? Calendar2.YEAR : //but see getYear below + Integer.MAX_VALUE; + int intNumber = Math2.roundToInt(numberTimeUnits[0]); //used for Month and Year + if (field != Integer.MAX_VALUE && + (intNumber < 1 || intNumber != numberTimeUnits[0])) + throw new IllegalArgumentException(QUERY_ERROR + ORDER_BY_CLOSEST_ERROR + + " (The number of months or years must be a positive integer.)"); + if (field == Calendar2.MONTH && intNumber > 6) + throw new IllegalArgumentException(QUERY_ERROR + ORDER_BY_CLOSEST_ERROR + + " (The number of months must be 1 ... 6.)"); + + //sort based on keys + ascendingSort(keyColumns); + + //walk through the table, within a group: + // keep either this row or previous row + BitSet keep = new BitSet(); //all false + keep.set(0, nRows); //now keep all / all true + PrimitiveArray keyCols[] = new PrimitiveArray[nKeyColumns - 1]; //not including time + for (int kc = 0; kc < nKeyColumns - 1; kc++) + keyCols[kc] = getColumn(keyColumns[kc]); + ROW: + for (int row = 1; row < nRows; row++) { //1 because I'm looking backwards + + //are we at start of a new group / did keyColumns 0 ... n-2 change? + // If so, continue to next row (no changes to current or previous row's keep value) + //work backwards since last most likely to have changed + //-2 since -1 is the one that is changing (e.g., time) + for (int kc = nKeyColumns - 2; kc >= 0; kc--) { + if (keyCols[kc].compare(row - 1, row) != 0) { + if (Double.isNaN(lastKeyCol.getDouble(row))) + keep.clear(row); + continue ROW; //use a label because we are in a local loop + } + } + + //if prev or this row is NaN, continue to next row + double prevRT = lastKeyCol.getDouble(row - 1); + double thisRT = lastKeyCol.getDouble(row); + //check isNaN(thisRT) first + if (Double.isNaN(thisRT)) { //prev has already been checked/cleared. + keep.clear(row); + continue; + } + if (Double.isNaN(prevRT)) + continue; + + //now both prev and this are in same orderBy group and finite + if (field == Integer.MAX_VALUE) { + //use simpleInterval + //if prev and this resolve to different roundTo, continue to next row + prevRT /= simpleInterval; + thisRT /= simpleInterval; + double prevRint = Math.rint(prevRT); + double thisRint = Math.rint(thisRT); + if (prevRint != thisRint) + continue; + + //now both prev and this are in same group, finite, and roundTo same int + //clear the further of this or previous + //> vs >= is arbitrary + keep.clear(Math.abs(prevRT - prevRint) > Math.abs(thisRT - thisRint)? + row - 1 : row); + + } else { //month or year + //month + + //prev + //Finding floor is hard because of BC time and YEAR field being year within era + // (so I using getYear(gc) not gc.get(YEAR)) + //I'm sure there is a more efficient way, but this is quick, easy, correct. + //This is only inefficient when intNumber is big which is unlikely for month and year. + GregorianCalendar gc = Calendar2.epochSecondsToGc(prevRT); + Calendar2.clearSmallerFields(gc, field); + while ((field == Calendar2.YEAR? Calendar2.getYear(gc) : gc.get(field)) % intNumber != 0 || + Calendar2.gcToEpochSeconds(gc) > prevRT) + gc.add(field, -1); + double prevFloor = Calendar2.gcToEpochSeconds(gc); + gc.add(field, intNumber); + double prevCeil = Calendar2.gcToEpochSeconds(gc); + //< vs <= is arbitrary + double prevClosest = Math.abs(prevRT - prevFloor) < Math.abs(prevRT - prevCeil)? + prevFloor : prevCeil; + + //this + gc = Calendar2.epochSecondsToGc(thisRT); + Calendar2.clearSmallerFields(gc, field); + //String2.log(">> YEAR=" + Calendar2.getYear(gc)); + while ((field == Calendar2.YEAR? Calendar2.getYear(gc) : gc.get(field)) % intNumber != 0 || + Calendar2.gcToEpochSeconds(gc) > thisRT) + gc.add(field, -1); + double thisFloor = Calendar2.gcToEpochSeconds(gc); + if (debugMode) String2.log(">> this=" + Calendar2.safeEpochSecondsToIsoStringTZ(thisRT, "") + + " floor=" + Calendar2.safeEpochSecondsToIsoStringTZ(thisFloor, "") + + " YEAR=" + Calendar2.getYear(gc)); + gc.add(field, intNumber); + double thisCeil = Calendar2.gcToEpochSeconds(gc); + //< vs <= is arbitrary + double thisClosest = Math.abs(thisRT - thisFloor) < Math.abs(thisRT - thisCeil)? + thisFloor : thisCeil; + + //if prev and this resolve to different roundTo, continue to next row + if (prevClosest != thisClosest) + continue; + + //now both prev and this are in same group, finite, and roundTo same int + //clear the further of this or previous + //> vs >= is arbitrary + keep.clear(Math.abs(prevRT - prevClosest) > Math.abs(thisRT - thisClosest)? + row - 1 : row); + } + + } + //String2.log("\nkeep=" + keep.toString() + "\n" + dataToString()); + justKeep(keep); + + } + + /** + * This tests orderByClosest. + */ + public static void testOrderByClosest() throws Exception { + String2.log("\n*** Table.testOrderByClosest()"); + + //regular: 2 minutes + String2.log("\nTest 2 minutes"); + StringArray sar = new StringArray(new String[]{ + "b", "b", "b", "b", "b", "b", "c", "a", "d", "a"}); + DoubleArray dar = new DoubleArray(new double[]{ + -121,-100, Double.NaN, 110, 132, -2, 1e30, 132, 1e30, 125}); + IntArray iar = new IntArray(new int[] { + 0, 1, 2, 3, 4, 5, 6, 7, 8, 9}); + Table table = new Table(); + table.addColumn("iar", iar); + table.addColumn("sar", sar); + table.addColumn("dar", dar); + table.orderByClosest("sar, dar, 2 minutes"); + String results = table.dataToString(); + String expected = +"iar,sar,dar\n" + +"9,a,125.0\n" + +"0,b,-121.0\n" + +"5,b,-2.0\n" + +"3,b,110.0\n" + +"6,c,1.0E30\n" + +"8,d,1.0E30\n"; + Test.ensureEqual(results, expected, "results=\n" + results); + + //2 months: + //note that Jan is the 0th month: so 2 months rounds to Jan 1, Mar 1, May 1 + String2.log("\nTest 2 months"); + sar = new StringArray(new String[]{ + "b", "b", "b", "b", "b", "b", "c", "a", "d", "a"}); + String sa[] = { + "-0002-08-28", //0 b -121 + "-0002-09-28", //1 b -100 + "", //2 b NaN + "2014-06-25", //3 b 110, + "2014-07-25", //4 b 132, + "-912345-12-28", //5 b -2, + "2010-04-05", //6 c 82, + "2016-09-25", //7 a 132, + "2010-04-05", //8 d 82, + "2016-09-10"}; //9 a 125}); + dar = new DoubleArray(); + for (int i = 0; i < 10; i++) + dar.add(Calendar2.safeIsoStringToEpochSeconds(sa[i])); + iar = new IntArray(new int[] { + 0, 1, 2, 3, 4, 5, 6, 7, 8, 9}); + table.clear(); + table.addColumn("iar", iar); + table.addColumn("sar", sar); + table.addColumn("dar", dar); + table.orderByClosest("sar, dar, 2 months"); + StringArray sar2 = new StringArray(); + for (int i = 0; i < dar.size(); i++) + sar2.add(Calendar2.safeEpochSecondsToIsoStringTZ(dar.get(i), "")); + table.setColumn(2, sar2); + results = table.dataToString(); + expected = +"iar,sar,dar\n" + +"9,a,2016-09-10T00:00:00Z\n" + +"5,b,-912345-12-28T00:00:00Z\n" + +"0,b,-0002-08-28T00:00:00Z\n" + +"3,b,2014-06-25T00:00:00Z\n" + +"6,c,2010-04-05T00:00:00Z\n" + +"8,d,2010-04-05T00:00:00Z\n"; + Test.ensureEqual(results, expected, "results=\n" + results); + + //10 years: beware BC AD transition, see Calendar2.getYear + String2.log("\nTest 10 years"); + sar = new StringArray(new String[]{ + "b", "b", "b", "b", "b", "b", "c", "a", "d", "a"}); + sa = new String[] { + "-0002-12-30", //0 b -121 + "0004-01-14", //1 b -100 + "", //2 b NaN + "2018-06-25", //3 b 110, + "2024-07-25", //4 b 132, + "-912345-12-28",//5 b -2, + "0211-04-05", //6 c 82, + "2024-09-25", //7 a 132, + "0211-04-05", //8 d 82, + "2023-09-10"}; //9 a 125}); + dar = new DoubleArray(); + for (int i = 0; i < 10; i++) + dar.add(Calendar2.safeIsoStringToEpochSeconds(sa[i])); + iar = new IntArray(new int[] { + 0, 1, 2, 3, 4, 5, 6, 7, 8, 9}); + table.clear(); + table.addColumn("iar", iar); + table.addColumn("sar", sar); + table.addColumn("dar", dar); + table.orderByClosest("sar, dar, 10 years"); + sar2 = new StringArray(); + for (int i = 0; i < dar.size(); i++) + sar2.add(Calendar2.safeEpochSecondsToIsoStringTZ(dar.get(i), "")); + table.setColumn(2, sar2); + results = table.dataToString(); + expected = +"iar,sar,dar\n" + +"9,a,2023-09-10T00:00:00Z\n" + +"5,b,-912345-12-28T00:00:00Z\n" + +"0,b,-0002-12-30T00:00:00Z\n" + +"3,b,2018-06-25T00:00:00Z\n" + +"6,c,0211-04-05T00:00:00Z\n" + +"8,d,0211-04-05T00:00:00Z\n"; + Test.ensureEqual(results, expected, "results=\n" + results); + + } + + /** + * This is a higher level orderByLimit that takes the csv string + * with the names of the orderBy columns (may be none) plus the limitN + * (e.g., "10"). + */ + public void orderByLimit(String orderByCSV) throws Exception { + + if (orderByCSV == null || orderByCSV.trim().length() == 0) + throw new SimpleException(QUERY_ERROR + ORDER_BY_LIMIT_ERROR + + " (no csv)"); + String csv[] = String2.split(orderByCSV, ','); + if (csv.length == 0) + throw new SimpleException(QUERY_ERROR + ORDER_BY_LIMIT_ERROR + + " (csv.length=0)"); + + int nKeyCols = csv.length - 1; + int keyCols[] = new int[nKeyCols]; + for (int k = 0; k < nKeyCols; k++) { + keyCols[k] = findColumnNumber(csv[k]); + if (keyCols[k] < 0) + throw new SimpleException(QUERY_ERROR + ORDER_BY_LIMIT_ERROR + + " (unknown orderBy column=" + csv[k] + ")"); + } + + orderByLimit(keyCols, String2.parseInt(csv[nKeyCols])); + } + + /** + * This is a higher level orderByLimit. + */ + public void orderByLimit(String orderBy[], int limitN) throws Exception { + + int nKeyCols = orderBy.length; + int keyCols[] = new int[nKeyCols]; + for (int k = 0; k < nKeyCols; k++) { + keyCols[k] = findColumnNumber(orderBy[k]); + if (keyCols[k] < 0) + throw new SimpleException(QUERY_ERROR + ORDER_BY_LIMIT_ERROR + + " (unknown orderBy column=" + orderBy[k] + ")"); + } + + orderByLimit(keyCols, limitN); + } + + /** + * This sorts by keyColumnNames (may be none) and then just keeps at most + * limitN rows from each group. + * + * @param keyColumns 1 or more column numbers (0..). + * @param limitN a positive integer, e.g., 10 + * @throws Exception if trouble (e.g., a keyColumnName not found) + */ + public void orderByLimit(int keyColumns[], int limitN) throws Exception { + + //limitN + if (limitN < 0 || limitN == Integer.MAX_VALUE) + throw new IllegalArgumentException(QUERY_ERROR + ORDER_BY_LIMIT_ERROR + + " (limitN=" + limitN + " must be a positive integer)"); + + //just 0 or 1 rows? + int nRows = nRows(); + if (nRows <= 1) + return; + + //sort based on keys + int nKeyColumns = keyColumns.length; + if (nKeyColumns > 0) + ascendingSort(keyColumns); + + //walk through the table, within a group: + // keep either this row or previous row + BitSet keep = new BitSet(); //all false + keep.set(0, nRows); //now keep all / all true + PrimitiveArray keyCols[] = new PrimitiveArray[nKeyColumns]; + for (int kc = 0; kc < nKeyColumns; kc++) + keyCols[kc] = getColumn(keyColumns[kc]); + int count = 1; //since starting on row 1 + ROW: + for (int row = 1; row < nRows; row++) { //1 because I'm looking backwards + + //are we at start of a new group / did keyColumns 0 ... n-1 change? + // If so, continue to next row (no changes to current or previous row's keep value) + //work backwards since last most likely to have changed + for (int kc = nKeyColumns - 1; kc >= 0; kc--) { + if (keyCols[kc].compare(row - 1, row) != 0) { + count = 1; //this is first row in new group + continue ROW; //use a label because we are in a local loop + } + } + + //is count > limitN? + if (++count > limitN) + keep.clear(row); + } + //String2.log("\nkeep=" + keep.toString() + "\n" + dataToString()); + justKeep(keep); + + } + + /** + * This tests orderByLimit. + */ + public static void testOrderByLimit() throws Exception { + String2.log("\n*** Table.testOrderByLimit()"); + + StringArray sar = new StringArray(new String[]{ + "b", "b", "b", "b", "b", "b", "c", "a", "d", "a"}); + IntArray iar = new IntArray(new int[] { + 0, 1, 2, 3, 4, 5, 6, 7, 8, 9}); + Table table = new Table(); + table.addColumn("iar", iar); + table.addColumn("sar", sar); + table.orderByLimit("sar, 2"); + String results = table.dataToString(); + String expected = +"iar,sar\n" + +"7,a\n" + +"9,a\n" + +"0,b\n" + +"1,b\n" + +"6,c\n" + +"8,d\n"; + Test.ensureEqual(results, expected, "results=\n" + results); + } + /** * This is like the other saveAsMatlab, but writes to a file. @@ -20613,7 +21977,7 @@ public static void testSaveAsMatlab() throws Exception { * @param dimensionName the name for the rows dimension, * e.g., usually "time", "station", "observation", "trajectory", "row", or ...? *

OBSOLETE [To conform to the Unidata Observation Dataset Conventions - * (http://www.unidata.ucar.edu/software/netcdf-java/formats/UnidataObsConvention.html): + * (https://www.unidata.ucar.edu/software/netcdf-java/formats/UnidataObsConvention.html): * This sets the global attribute observationDimension={dimensionName}.] * @throws Exception */ @@ -20636,13 +22000,13 @@ public void saveAsFlatNc(String fullName, String dimensionName) throws Exception *
If no exception is thrown, the file was successfully created. *
!!!The file must have at least one row, or an Exception will be thrown * (nc dimensions can't be 0 length). - *
LongArray columns are saved as StringArray. + *
LongArray columns are saved as DoubleArray. * * @param fullName The full file name (dir + name + ext (usually .nc)) * @param dimensionName the name for the rows dimension, * e.g., usually "time", "station", "observation", "trajectory", "row", or ...? *

OBSOLETE [To conform to the Unidata Observation Dataset Conventions - * (http://www.unidata.ucar.edu/software/netcdf-java/formats/UnidataObsConvention.html): + * (https://www.unidata.ucar.edu/software/netcdf-java/formats/UnidataObsConvention.html): [GONE!] * This sets the global attribute observationDimension={dimensionName}.] * @param convertToFakeMissingValues if true, * NaN's are converted to DataHelper.FAKE_MISSING_VALUE temporarily. @@ -20662,6 +22026,7 @@ public void saveAsFlatNc(String fullName, String dimensionName, //open the file (before 'try'); if it fails, no temp file to delete NetcdfFileWriter nc = NetcdfFileWriter.createNew( NetcdfFileWriter.Version.netcdf3, fullName + randomInt); + boolean nc3Mode = true; try { Group rootGroup = nc.addGroup(null, ""); nc.setFill(false); @@ -20689,12 +22054,6 @@ public void saveAsFlatNc(String fullName, String dimensionName, PrimitiveArray pa = getColumn(col); tPA[col] = pa; Class type = pa.elementClass(); - if (type == long.class) { - pa = new StringArray(pa); - tPA[col] = pa; - type = pa.elementClass(); //String.class - } - if (type == String.class) { int max = Math.max(1, ((StringArray)pa).maxStringLength()); //nc libs want at least 1; 0 happens if no data Dimension lengthDimension = nc.addDimension(rootGroup, @@ -20718,15 +22077,24 @@ public void saveAsFlatNc(String fullName, String dimensionName, globalAttributes.set("id", File2.getNameNoExtension(fullName)); //set the globalAttributes - NcHelper.setAttributes(rootGroup, globalAttributes); + NcHelper.setAttributes(nc3Mode, rootGroup, globalAttributes); for (int col = 0; col < nColumns; col++) { //convert to fake MissingValues (in time to write attributes) - if (convertToFakeMissingValues && - tPA[col].elementClass() != String.class) //type check sidesteps long->String columns + Class tc = tPA[col].elementClass(); + if (convertToFakeMissingValues) convertToFakeMissingValues(col); - NcHelper.setAttributes(colVars[col], columnAttributes(col)); + Attributes tAtts = new Attributes(columnAttributes(col)); //use a copy + //String2.log(">> saveAsFlatNc col=" + tPA[col].elementClassString() + " enc=" + tAtts.getString(String2.ENCODING)); + if (tc == String.class && + tAtts.getString(String2.ENCODING) == null) //don't change if already specified + tAtts.add(String2.ENCODING, String2.ISO_8859_1); +// disabled until there is a standard +// else if (tc == char.class) +// tAtts.add(String2.CHARSET, String2.ISO_8859_1); + + NcHelper.setAttributes(nc3Mode, colVars[col], tAtts); } //leave "define" mode @@ -20737,8 +22105,7 @@ public void saveAsFlatNc(String fullName, String dimensionName, nc.write(colVars[col], NcHelper.get1DArray(tPA[col].toObjectArray())); //convert back to standard MissingValues - if (convertToFakeMissingValues && - tPA[col].elementClass() != String.class) //type check sidesteps long->String columns + if (convertToFakeMissingValues) convertToStandardMissingValues(col); } @@ -20813,6 +22180,7 @@ public void saveAsFlatNc(String fullName, String dimensionName, //create the file file = NetcdfFileWriter.createNew(NetcdfFileWriter.Version.netcdf3, fileName); + boolean nc3Mode = true; rootGroup = file.addGroup(null, ""); NcHelper.setAttributes(rootGroup, globalAttributes()); @@ -20827,7 +22195,7 @@ public void saveAsFlatNc(String fullName, String dimensionName, for (int col = 0; col < nCols; col++) { String colName = getColumnName(col); PrimitiveArray pa = column(col); - Attributes atts = columnAttributes(col); + Attributes atts = new Attributes(columnAttributes(col)); //use a copy if (pa.elementClass() == String.class) { //create a string variable int strlen = atts.getInt("strlen"); @@ -20848,6 +22216,10 @@ public void saveAsFlatNc(String fullName, String dimensionName, NcHelper.getDataType(pa.elementClass()), dims); } + if (pa.elementClass() == char.class) + atts.add(String2.CHARSET, String2.ISO_8859_1); + else if (pa.elementClass() == String.class) + atts.add(String2.ENCODING, String2.ISO_8859_1); NcHelper.setAttributes(colVars[col], atts); } @@ -21030,6 +22402,7 @@ public void saveAs4DNc(String fullName, int xColumn, int yColumn, NetcdfFileWriter nc = NetcdfFileWriter.createNew( NetcdfFileWriter.Version.netcdf3, fullName + randomInt); long make4IndicesTime = -1; + boolean nc3Mode = true; try { Group rootGroup = nc.addGroup(null, ""); @@ -21162,9 +22535,17 @@ public void saveAs4DNc(String fullName, int xColumn, int yColumn, globalAttributes.set("id", File2.getNameNoExtension(fullName)); //write Attributes (after adding variables since mv's and related attributes adjusted) - NcHelper.setAttributes(rootGroup, globalAttributes); - for (int col = 0; col < nColumns; col++) - NcHelper.setAttributes(colVars[col], columnAttributes(col)); + NcHelper.setAttributes(nc3Mode, rootGroup, globalAttributes); + for (int col = 0; col < nColumns; col++) { + Attributes tAtts = new Attributes(columnAttributes(col)); //use a copy + if (getColumn(col).elementClass() == String.class) + tAtts.add(String2.ENCODING, String2.ISO_8859_1); +// disabled until there is a standard +// else if (getColumn(col).elementClass() == char.class) +// tAtts.add(String2.CHARSET, String2.ISO_8859_1); + + NcHelper.setAttributes(nc3Mode, colVars[col], tAtts); + } //create the stringVariable Variable stringVar = null; @@ -21178,7 +22559,10 @@ public void saveAs4DNc(String fullName, int xColumn, int yColumn, Arrays.asList(lengthDimension)); //save the attributes - NcHelper.setAttributes(stringVar, stringVariableAttributes); + Attributes tAtts = new Attributes(stringVariableAttributes); //use a copy + tAtts.add(String2.ENCODING, String2.ISO_8859_1); + + NcHelper.setAttributes(nc3Mode, stringVar, tAtts); } //leave "define" mode @@ -21769,7 +23153,7 @@ public static void testMdb() throws Exception { String query = "SELECT * FROM " + tableName; Table table = new Table(); table.readSql(con, query); - String2.log(table.dataToCSVString(5)); + String2.log(table.dataToString(5)); } @@ -22177,7 +23561,7 @@ public static void testSql() throws Exception { * @throws Exception */ public void saveAsTabbedASCII(String fullFileName) throws Exception { - saveAsTabbedASCII(fullFileName, "ISO-8859-1"); + saveAsTabbedASCII(fullFileName, String2.ISO_8859_1); } /** @@ -22234,11 +23618,11 @@ public void saveAsTabbedASCII(String fullFileName, String charset) throws Except * @throws Exception */ public void saveAsTabbedASCII(OutputStream outputStream) throws Exception { - saveAsSeparatedAscii(outputStream, null, "\t", false); + saveAsSeparatedAscii(outputStream, null, "\t"); } public void saveAsTabbedASCII(OutputStream outputStream, String charset) throws Exception { - saveAsSeparatedAscii(outputStream, charset, "\t", false); + saveAsSeparatedAscii(outputStream, charset, "\t"); } /** @@ -22305,11 +23689,11 @@ public String saveAsCsvASCIIString() throws Exception { * @throws Exception */ public void saveAsCsvASCII(OutputStream outputStream) throws Exception { - saveAsSeparatedAscii(outputStream, null, ",", true); + saveAsSeparatedAscii(outputStream, null, ","); } public void saveAsCsvASCII(OutputStream outputStream, String charset) throws Exception { - saveAsSeparatedAscii(outputStream, charset, ",", true); + saveAsSeparatedAscii(outputStream, charset, ","); } /** @@ -22324,11 +23708,10 @@ public void saveAsCsvASCII(OutputStream outputStream, String charset) throws Exc * Afterwards, it is flushed, not closed. * @param charset e.g., ISO-8859-1 (default, used if charset is null or "") or UTF-8. * @param separator usually a tab or a comma - * @param quoted if true, strings will be quoted if needed (see String2.quote). * @throws Exception */ public void saveAsSeparatedAscii(OutputStream outputStream, String charset, - String separator, boolean quoted) throws Exception { + String separator) throws Exception { //ensure there is data if (nRows() == 0) { @@ -22338,17 +23721,17 @@ public void saveAsSeparatedAscii(OutputStream outputStream, String charset, long time = System.currentTimeMillis(); if (charset == null || charset.length() == 0) - charset = "ISO-8859-1"; + charset = String2.ISO_8859_1; BufferedWriter writer = new BufferedWriter( new OutputStreamWriter(outputStream, charset)); //write the column names + boolean tabMode = separator.equals("\t"); int nColumns = nColumns(); if (columnNames != null && columnNames.size() == nColumns) { //isn't this always true??? for (int col = 0; col < nColumns; col++) { - //quoteIfNeeded converts carriageReturns/newlines to (char)166 //'�' (#166) - String s = String2.quoteIfNeeded(quoted, getColumnName(col)); - writer.write(s); + writer.write(tabMode? columnNames.getTsvString(col) : + columnNames.getNccsvDataString(col)); writer.write(col == nColumns -1? "\n" : separator); } } @@ -22356,9 +23739,8 @@ public void saveAsSeparatedAscii(OutputStream outputStream, String charset, //write the units for (int col = 0; col < nColumns; col++) { String s = columnAttributes(col).getString("units"); - if (s == null) s = ""; - //quoteIfNeeded converts carriageReturns/newlines to (char)166) //'�' (#166) - writer.write(String2.quoteIfNeeded(quoted, s)); + writer.write(tabMode? String2.toTsvString(s) : + String2.toNccsvDataString(s)); writer.write(col == nColumns -1? "\n" : separator); } @@ -22372,15 +23754,8 @@ public void saveAsSeparatedAscii(OutputStream outputStream, String charset, int nRows = nRows(); for (int row = 0; row < nRows; row++) { for (int col = 0; col < nColumns; col++) { - String s = getColumn(col).getString(row); - if (isString[col]) { - if (s == null) - s = "null"; - //quoteIfNeeded converts carriageReturns/newlines to (char)166; //'�' (#166) - writer.write(String2.quoteIfNeeded(quoted, s)); - } else { - writer.write(s.length() == 0? "NaN" : s); - } + writer.write(tabMode? getColumn(col).getTsvString(row) : + getColumn(col).getNccsvDataString(row)); writer.write(col == nColumns -1? "\n" : separator); } } @@ -22454,7 +23829,7 @@ public String saveAsJsonString(int timeColumn, boolean writeUnits) throws Except public void saveAsJson(OutputStream outputStream, int timeColumn, boolean writeUnits) throws Exception { - BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(outputStream, "UTF-8")); + BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(outputStream, String2.UTF_8)); saveAsJson(writer, timeColumn, writeUnits); } @@ -22473,13 +23848,15 @@ public void saveAsJson(Writer writer, int timeColumn, //write the column names int nColumns = nColumns(); int nRows = nRows(); - boolean isString[] = new boolean[nColumns]; + boolean isCharOrString[] = new boolean[nColumns]; writer.write( "{\n" + " \"table\": {\n" + //begin main structure " \"columnNames\": ["); for (int col = 0; col < nColumns; col++) { - isString[col] = getColumn(col).elementClass() == String.class; + isCharOrString[col] = + getColumn(col).elementClass() == char.class || + getColumn(col).elementClass() == String.class; writer.write(String2.toJson(getColumnName(col))); writer.write(col == nColumns - 1? "],\n" : ", "); } @@ -22516,7 +23893,7 @@ public void saveAsJson(Writer writer, int timeColumn, String s = Double.isNaN(d)? "null" : "\"" + Calendar2.epochSecondsToIsoStringT(d) + "Z\""; writer.write(s); - } else if (isString[col]) { + } else if (isCharOrString[col]) { String s = getStringData(col, row); writer.write(String2.toJson(s)); } else { @@ -22553,7 +23930,7 @@ public void saveAsJson(Writer writer, int timeColumn, * @throws Exception if trouble */ public void readJson(String fileName) throws Exception { - String results[] = String2.readFromFile(fileName, "UTF-8", 2); + String results[] = String2.readFromFile(fileName, String2.UTF_8, 2); if (results[0].length() > 0) throw new Exception(results[0]); readJson(fileName, results[1]); @@ -22672,7 +24049,8 @@ else if (s2.endsWith("]")) if (sar.length != nCol) throw new IOException( errorInMethod + "JSON syntax error (incorrect number of data values?) on data row #" + pas[0].size() + "."); for (int col = 0; col < nCol; col++) { - String ts = sar[col]; + String ts = String2.fromJson(sar[col]); + //String2.log(">> col=" + col + " ts=" + String2.annotatedString(ts)); if (isUTC[col]) pas[col].addDouble(Calendar2.safeIsoStringToEpochSeconds(ts)); //returns NaN if trouble @@ -22722,266 +24100,11 @@ else if (s2.endsWith("]")) }*/ //String2.log(" place3 nColumns=" + nColumns() + " nRows=" + nRows() + " nCells=" + (nColumns() * nRows())); - //String2.log(toString("row", 10)); - if (verbose) String2.log(" Table.readJson done. nColumns=" + nColumns() + - " nRows=" + nRows() + " TIME=" + (System.currentTimeMillis() - time)); - } - - /* * - * This reads data from json table (of the type written by saveAsJson). - *

    - *
  • If no exception is thrown, the file was successfully read. - *
  • If "columnTypes" is not supplied, the columns are simplified. - *
  • If there is a String column with units="UTC", the ISO 8601 values - * in the column are converted to doubles (seconds since 1970-01-01). - *
- * - * @param fileName for diagnostic messages only - * @param source the json info - * @throws Exception if trouble - */ -/* //This retired 2010-10-13. It works, but new version above is faster - public void readJson(String fileName, String source) throws Exception { - - //validate parameters - if (verbose) String2.log("Table.readJson " + fileName); - long time = System.currentTimeMillis(); - String errorInMethod = String2.ERROR + " in Table.readJson(" + fileName + "):\n"; - - //clear everything - clear(); - - //for now, do it the simple but memory-expensive way - //There will be 3 copies of data in memory: source String, json objects, Table! - //Math2.gcAndWait(); String2.log("readJson start " + Math2.memoryString()); - //long tTime = System.currentTimeMillis(); - JSONObject mainObject = new JSONObject(source); - //String2.log(" json main time=" + (System.currentTimeMillis() - tTime)); //all the JSON time is here: ~8000ms - //Math2.gcAndWait(); String2.log(" place1 " + Math2.memoryString()); //max memory usage reached here - //tTime = System.currentTimeMillis(); - JSONObject tableObject = mainObject.getJSONObject("table"); - //String2.log(" json table time=" + (System.currentTimeMillis() - tTime)); - //Math2.gcAndWait(); String2.log(" place2 " + Math2.memoryString()); - - //read the parts of the table - JSONArray tNames = tableObject.getJSONArray("columnNames"); - int nColumns = tNames.length(); - JSONArray tTypes = tableObject.optJSONArray("columnTypes"); - JSONArray tUnits = tableObject.optJSONArray("columnUnits"); - //tTime = System.currentTimeMillis(); - JSONArray tData = tableObject.getJSONArray("rows"); - //String2.log(" json rows time=" + (System.currentTimeMillis() - tTime)); - int nRows = tData.length(); - - //create the table - boolean isStringCol[] = new boolean[nColumns]; - //Math2.gcAndWait(); String2.log(" place3 " + Math2.memoryString()); - for (int col = 0; col < nColumns; col++) { - addColumn(tNames.getString(col), - tTypes == null? new StringArray(nRows, false) : - PrimitiveArray.factory(PrimitiveArray.elementStringToClass(tTypes.getString(col)), nRows, false)); - isStringCol[col] = getColumn(col) instanceof StringArray; - if (tUnits != null) { - String ttUnits = tUnits.isNull(col)? null : tUnits.getString(col); - if (ttUnits != null && ttUnits.length() > 0) - columnAttributes(col).add("units", ttUnits); - } - } - - //read the data - //long rTime = System.currentTimeMillis(); - for (int row = 0; row < nRows; row++) { - JSONArray rowData = tData.getJSONArray(row); - //if (row % 1000 == 0) { - // String2.log("row=" + row + " time=" + (System.currentTimeMillis() - rTime) + " ms"); - // rTime = System.currentTimeMillis(); - //} - - for (int col = 0; col < nColumns; col++) { - if (rowData.isNull(col)) { //apparently, you have to ask or you get String value "null" - if (isStringCol[col]) - getColumn(col).addString(""); - else getColumn(col).addDouble(Double.NaN); - } else { - if (isStringCol[col]) - getColumn(col).addString(rowData.getString(col)); - else getColumn(col).addDouble(rowData.getDouble(col)); - } - } - } - //Math2.gcAndWait(); String2.log(" place4 " + Math2.memoryString()); - - //simplify - if (tTypes == null) - simplify(); - - //convert times to epoch seconds (after simplify, so dates are still Strings) - for (int col = 0; col < nColumns; col++) { - String ttUnits = columnAttributes(col).getString("units"); - if ((getColumn(col) instanceof StringArray) && - ttUnits != null && ttUnits.equals("UTC")) { - StringArray sa = (StringArray)getColumn(col); - DoubleArray da = new DoubleArray(nRows, false); - for (int row = 0; row < nRows; row++) { - String iso = sa.get(row); - da.add((iso == null || iso.length() == 0)? - Double.NaN : - Calendar2.isoStringToEpochSeconds(iso)); - } - setColumn(col, da); - columnAttributes(col).set("units", Calendar2.SECONDS_SINCE_1970); - } - } - //String2.log(" place3 nColumns=" + nColumns() + " nRows=" + nRows() + " nCells=" + (nColumns() * nRows())); - //String2.log(toString("row", 10)); + //String2.log(toString(10)); if (verbose) String2.log(" Table.readJson done. nColumns=" + nColumns() + " nRows=" + nRows() + " TIME=" + (System.currentTimeMillis() - time)); - } -*/ - /* * - * This reads data from json table (of the type written by saveAsJson). - *
    - *
  • If no exception is thrown, the table was successfully read. - *
  • If "columnTypes" is not supplied, the columns are simplified. - *
  • If there is a String column with units="UTC", the ISO 8601 values - * in the column are converted to doubles (seconds since 1970-01-01). - *
- * - * @param fileName for diagnostic messages only - * @param in the json info - * @throws Exception if trouble - */ -/* public void readJson(String fileName, Reader in) throws Exception { -/ * -{ - "table": { - "columnNames": ["longitude", "latitude", "time", "sea_surface_temperature"], - "columnTypes": ["float", "float", "String", "float"], - "columnUnits": ["degrees_east", "degrees_north", "UTC", "degree_C"], - "rows": [ - [180.099, 0.032, "2007-10-04T12:00:00Z", 27.66], - [180.099, 0.032, null, null], - [189.971, -7.98, "2007-10-04T12:00:00Z", 29.08] - ] - } -} -* / - //validate parameters - if (verbose) String2.log("Table.readJson " + fileName); - long time = System.currentTimeMillis(); - String errorInMethod = String2.ERROR + " in Table.readJson(" + fileName + "):\n"; - //clear everything - clear(); - boolean isStringCol[]; - boolean isIntCol[]; - int nColumns = -1; - - // - ParseJSON parseJson = new ParseJSON(in); - parseJson.readExpected('{'); - parseJson.readExpected('"'); - Test.ensureEqual(parseJson.readString('"'), "table", errorInMethod + "Missing \"table\"." + parseJson.onLine()); - parseJson.readExpected(':'); - parseJson.readExpected('{'); -not finished -touble: because table is JsonObject, info may not be in expected order - //it will for tables I write, but technically, not all valid tables - int chi = parseJson.readNonWhiteChar(); - while (chi == '"') { - String what = parseJson.readString('"'); - if (what.equals("columnNames")) { - parseJson.readExpected(':'); - ArrayList tNames = parseJson.readPrimitiveArray(); - for (int col = 0; col < tNames.size(); col++) { - addColumn(tNames.get(col).toString(), new StringArray()); - } - isStringCol = new boolean[tNames.size()]; - Arrays.fill(isStringCol, true); - - } else if (what.equals("columnTypes")) { - parseJson.readExpected(':'); - ArrayList tTypes = parseJson.readPrimitiveArray(); - for (int col = 0; col < nColumns; col++) { - addColumn(tNames.get(col).toString(), - PrimitiveArray.factory(PrimitiveArray.elementStringToClass(tTypes.get(col).toString()), 8, false)); - } - isStringCol[col] = getColumn(col) instanceof StringArray; - - } else if (what.equals("columnUnits")) { - parseJson.readExpected(':'); - ArrayList al = parseJson.readPrimitiveArray(); - for (int col = 0; col < nColumns; col++) { - if (sar[col] != null && sar[col].length() > 0) { - columnAttributes(col).add("units", sar[col]); - } - } - } else if (what.equals("rows")) { - Test.ensureEqual(parseJson.readNonWhiteChar(), '[', - errorInMethod + "Missing '[' after \"rows\"" + parseJson.onLine()); - chi = parseJson.readNonWhiteChar(); - while (chi == '[') { - //process a row - - //next char is ',' or ']' - chi = parseJson.readNonWhiteChar(); - if (chi != ',' && chi != ']') - throw new Exception("',' or ']' expected" + parseJson.onLine()); - - } - Test.ensureEqual(chi, ']', errorInMethod + "Missing ']' after last row of \"rows\"."); - - } else { //skip - } - - chi = parseJson.readNonWhiteChar(); - } - Test.ensureEqual(chi, '}', errorInMethod + "Missing '}' to close \"table\":{ ." + parseJson.onLine()); - Test.ensureEqual(parseJson.readNonWhiteChar(), '}', errorInMethod + "Missing final '}'." + parseJson.onLine()); - - //read the data - for (int row = 0; row < nRows; row++) { - JSONArray rowData = tData.getJSONArray(row); - for (int col = 0; col < nColumns; col++) { - if (rowData.isNull(col)) { //apparently, you have to ask or you get String value "null" - if (isStringCol[col]) - getColumn(col).addString(""); - else getColumn(col).addDouble(Double.NaN); - } else { - if (isStringCol[col]) - getColumn(col).addString(rowData.getString(col)); - else getColumn(col).addDouble(rowData.optDouble(col, Double.NaN)); - } - } - } - - //simplify - if (tTypes == null) - simplify(); - - //convert times to epoch seconds (after simplify, so dates are still Strings) - for (int col = 0; col < nColumns; col++) { - String ttUnits = columnAttributes(col).getString("units"); - if ((getColumn(col) instanceof StringArray) && - ttUnits != null && ttUnits.equals("UTC")) { - StringArray sa = (StringArray)getColumn(col); - DoubleArray da = new DoubleArray(nRows, false); - for (int row = 0; row < nRows; row++) { - String iso = sa.get(row); - da.add((iso == null || iso.length() == 0)? - Double.NaN : - Calendar2.isoStringToEpochSeconds(iso)); - } - setColumn(col, da); - columnAttributes(col).set("units", Calendar2.SECONDS_SINCE_1970); - } - } - if (verbose) String2.log(" Table.readJson done. nColumns=" + nColumns() + - " nRows=" + nRows() + " TIME=" + (System.currentTimeMillis() - time)); - - } -*/ /** * This returns an Igor-safe column name which doesn't match a name @@ -23029,22 +24152,23 @@ public static void writeIgorWave(Writer writer, String colName, double colMin = stats[PrimitiveArray.STATS_MIN]; //may be NaN double colMax = stats[PrimitiveArray.STATS_MAX]; //may be NaN - boolean isString = pa instanceof StringArray; + boolean isCharOrString = pa instanceof CharArray || + pa instanceof StringArray; String safeColName = String2.encodeMatlabNameSafe(colName); if (dimInfo == null) dimInfo = ""; writer.write( "WAVES/" + //byte short char int long float double string - //promote char to int, long to double, - "BWIIDSDT".charAt(pa.elementClassIndex()) + + //promote char to string, long to double, + "BWTIDSDT".charAt(pa.elementClassIndex()) + dimInfo + //don't use /O to overwrite existing waves. If conflict, user will be asked. " " + safeColName + IgorEndOfLine + "BEGIN" + IgorEndOfLine); //write the data - if (isString) { + if (isCharOrString) { for (int row = 0; row < nRows; row++) { //String data written as json strings (in double quotes with \ encoded chars) writer.write(String2.toJson(pa.getString(row))); @@ -23075,7 +24199,7 @@ public static void writeIgorWave(Writer writer, String colName, writer.write("END" + IgorEndOfLine); //SetScale - if (!isString) { + if (!isCharOrString) { if (units == null) units = ""; if (isTimeStamp) { units = "dat"; //special case in igor @@ -23548,27 +24672,26 @@ public static void testJson() throws Exception { //SSR.displayInBrowser("file://" + fileName); //read it from the file - String results[] = String2.readFromFile(fileName); - Test.ensureEqual(results[0], "", ""); - Test.ensureEqual(results[1], + String results = String2.directReadFromUtf8File(fileName); + Test.ensureEqual(results, "{\n" + " \"table\": {\n" + -" \"columnNames\": [\"Time\", \"Longitude\", \"Latitude\", \"Double Data\", \"Long Data\", \"Int Data\", \"Short Data\", \"Byte Data\", \"String Data\"],\n" + -" \"columnTypes\": [\"String\", \"int\", \"float\", \"double\", \"long\", \"int\", \"short\", \"byte\", \"String\"],\n" + -" \"columnUnits\": [\"UTC\", \"degrees_east\", \"degrees_north\", \"doubles\", \"longs\", \"ints\", \"shorts\", \"bytes\", \"Strings\"],\n" + +" \"columnNames\": [\"Time\", \"Longitude\", \"Latitude\", \"Double Data\", \"Long Data\", \"Int Data\", \"Short Data\", \"Byte Data\", \"Char Data\", \"String Data\"],\n" + +" \"columnTypes\": [\"String\", \"int\", \"float\", \"double\", \"long\", \"int\", \"short\", \"byte\", \"char\", \"String\"],\n" + +" \"columnUnits\": [\"UTC\", \"degrees_east\", \"degrees_north\", \"doubles\", \"longs\", \"ints\", \"shorts\", \"bytes\", \"chars\", \"Strings\"],\n" + " \"rows\": [\n" + -" [\"1970-01-01T00:00:00Z\", -3, 1.0, -1.0E300, -2000000000000000, -2000000000, -32000, -120, \"a\"],\n" + -" [\"2005-08-31T16:01:02Z\", -2, 1.5, 3.123, 2, 2, 7, 8, \"bb\"],\n" + -" [\"2005-11-02T18:04:09Z\", -1, 2.0, 1.0E300, 2000000000000000, 2000000000, 32000, 120, \"ccc\"],\n" + -" [null, null, null, null, null, null, null, null, \"\"]\n" + +" [\"1970-01-01T00:00:00Z\", -3, 1.0, -1.0E300, -2000000000000000, -2000000000, -32000, -120, \",\", \"a\"],\n" + +" [\"2005-08-31T16:01:02Z\", -2, 1.5, 3.123, 2, 2, 7, 8, \"\\\"\", \"bb\"],\n" + +" [\"2005-11-02T18:04:09Z\", -1, 2.0, 1.0E300, 2000000000000000, 2000000000, 32000, 120, \"\\u20ac\", \"ccc\"],\n" + +" [null, null, null, null, null, null, null, null, \"\", \"\"]\n" + " ]\n" + " }\n" + "}\n", - results[1]); + results); //read it Table table2 = new Table(); - table2.readJson(fileName, String2.readFromFile(fileName)[1]); + table2.readJson(fileName); Test.ensureTrue(table.equals(table2), ""); //finally @@ -23583,7 +24706,7 @@ public static void testJson() throws Exception { String ncHeader = table.getNCHeader("row"); Test.ensureEqual(table.globalAttributes.getString("cdm_data_type"), "TimeSeries", ncHeader); Test.ensureEqual(table.globalAttributes.getString("title"), - "TAO/TRITON, RAMA, and PIRATA Buoys, Daily, Sea Surface Temperature", + "TAO/TRITON, RAMA, and PIRATA Buoys, Daily, 1977-present, Sea Surface Temperature", ncHeader); Test.ensureEqual(table.globalAttributes.get("history").size(), 3, ncHeader); Test.ensureEqual(table.globalAttributes.get("history").getString(0), @@ -23671,11 +24794,12 @@ public void saveAs(String fullFileName, * This reads an input table file (or 1- or 2-level opendap sequence) * and saves it in a file (optionally zipped). * A test which reads data from an opendap 1-level sequence and writes it to an .nc file: - * convert("http://oceanwatch.pfeg.noaa.gov/opendap/GLOBEC/GLOBEC_bottle?t0,oxygen&month=\"5\"", 2, testDir + "convert.nc", 1, false); + * convert("https://oceanwatch.pfeg.noaa.gov/opendap/GLOBEC/GLOBEC_bottle?t0,oxygen&month=\"5\"", 2, testDir + "convert.nc", 1, false); * * @param inFullName the full name of the file (with the extension .zip * if it is zipped) or opendap sequence url (optionally with a query). *
    + *
  • 2016-12-07: With versions of Tomcat somewhere after 8.0, the url must be stongly percent-encoded. *
  • If it is zipped, the data file should be the only file in the .zip file * and the data file's name should be inFullName minus the directory * and the ".zip" at the end. @@ -23754,12 +24878,12 @@ public static void testConvert() throws Exception { //the original test from Roy //This is used as an example in various documentation. //If url changes, do search and replace to change all references to it. - url = "http://oceanwatch.pfeg.noaa.gov/opendap/GLOBEC/GLOBEC_bottle?t0,oxygen&month=\"5\""; + url = "https://oceanwatch.pfeg.noaa.gov/opendap/GLOBEC/GLOBEC_bottle?t0,oxygen&month=\"5\""; String2.log("\ntesting Table.convert \n url=" + url); fileName = testDir + "convertOriginal.nc"; convert(url, READ_OPENDAP_SEQUENCE, fileName, SAVE_AS_FLAT_NC, "row", false); table.readFlatNc(fileName, null, 0); //should be already unpacked - String2.log(table.toString("row", 3)); + String2.log(table.toString(3)); Test.ensureEqual(table.nColumns(), 2, ""); Test.ensureEqual(table.nRows(), 190, ""); Test.ensureEqual(table.getColumnName(0), "t0", ""); @@ -23784,7 +24908,7 @@ public static void testConvert() throws Exception { fileName = testDir + "convertOSU.nc"; convert(url, READ_OPENDAP_SEQUENCE, fileName, SAVE_AS_FLAT_NC, "row", false); table.readFlatNc(fileName, null, 0); //should be already unpacked - String2.log(table.toString("row", 3)); + String2.log(table.toString(3)); Test.ensureEqual(table.nColumns(), 5, ""); Test.ensureEqual(table.nRows(), 446, ""); Test.ensureEqual(table.getColumnName(0), "yearday", ""); @@ -23817,7 +24941,7 @@ public static void testConvert() throws Exception { fileName = testDir + "convertCIMT.nc"; convert(url, READ_OPENDAP_SEQUENCE, fileName, SAVE_AS_FLAT_NC, "row", false); table.readFlatNc(fileName, null, 0); //should be already unpacked - String2.log(table.toString("row", 3)); + String2.log(table.toString(3)); Test.ensureEqual(table.nColumns(), 5, ""); //Test.ensureEqual(table.nRows(), 1407, ""); //this changes; file is growing Test.ensureEqual(table.getColumnName(0), "latitude", ""); @@ -23865,37 +24989,41 @@ public static void testSortColumnsByName() { Test.ensureEqual(table.getColumnName(0), "Byte Data", ""); Test.ensureEqual(table.columnAttributes(0).getString("units"), "bytes", ""); + //char + Test.ensureEqual(table.getColumnName(1), "Char Data", ""); + Test.ensureEqual(table.columnAttributes(1).getString("units"), "chars", ""); + //double - Test.ensureEqual(table.getColumnName(1), "Double Data", ""); - Test.ensureEqual(table.columnAttributes(1).getString("units"), "doubles", ""); + Test.ensureEqual(table.getColumnName(2), "Double Data", ""); + Test.ensureEqual(table.columnAttributes(2).getString("units"), "doubles", ""); //int - Test.ensureEqual(table.getColumnName(2), "Int Data", ""); - Test.ensureEqual(table.columnAttributes(2).getString("units"), "ints", ""); + Test.ensureEqual(table.getColumnName(3), "Int Data", ""); + Test.ensureEqual(table.columnAttributes(3).getString("units"), "ints", ""); //Lat - Test.ensureEqual(table.getColumnName(3), "latitude", ""); - Test.ensureEqual(table.columnAttributes(3).getString("units"), "degrees_north", ""); + Test.ensureEqual(table.getColumnName(4), "latitude", ""); + Test.ensureEqual(table.columnAttributes(4).getString("units"), "degrees_north", ""); //long - Test.ensureEqual(table.getColumnName(4), "Long Data", ""); - Test.ensureEqual(table.columnAttributes(4).getString("units"), "longs", ""); + Test.ensureEqual(table.getColumnName(5), "Long Data", ""); + Test.ensureEqual(table.columnAttributes(5).getString("units"), "longs", ""); //Lon - Test.ensureEqual(table.getColumnName(5), "Longitude", ""); - Test.ensureEqual(table.columnAttributes(5).getString("units"), "degrees_east", ""); + Test.ensureEqual(table.getColumnName(6), "Longitude", ""); + Test.ensureEqual(table.columnAttributes(6).getString("units"), "degrees_east", ""); //short - Test.ensureEqual(table.getColumnName(6), "Short Data", ""); - Test.ensureEqual(table.columnAttributes(6).getString("units"), "shorts", ""); + Test.ensureEqual(table.getColumnName(7), "Short Data", ""); + Test.ensureEqual(table.columnAttributes(7).getString("units"), "shorts", ""); //String - Test.ensureEqual(table.getColumnName(7), "String Data", ""); - Test.ensureEqual(table.columnAttributes(7).getString("units"), "Strings", ""); + Test.ensureEqual(table.getColumnName(8), "String Data", ""); + Test.ensureEqual(table.columnAttributes(8).getString("units"), "Strings", ""); //Time - Test.ensureEqual(table.getColumnName(8), "Time", ""); - Test.ensureEqual(table.columnAttributes(8).getString("units"), Calendar2.SECONDS_SINCE_1970, ""); + Test.ensureEqual(table.getColumnName(9), "Time", ""); + Test.ensureEqual(table.columnAttributes(9).getString("units"), Calendar2.SECONDS_SINCE_1970, ""); } @@ -24241,6 +25369,11 @@ public static Table getTestTable(boolean includeLongs, boolean includeStrings) { //8=String if (includeStrings) { + char[] ac = {',', '"', '\u20ac', '\uffff'}; + col = table.addColumn("Char Data", new CharArray(ac)); + table.columnAttributes(col).set("units", "chars"); + table.columnAttributes(col).set("char_att2", new CharArray(new char[]{',', '"', '\u00fc', '\u20ac'})); + String[] aS = {"a", "bb", "ccc", ""}; col = table.addColumn("String Data", new StringArray(aS)); table.columnAttributes(col).set("units", "Strings"); @@ -24265,14 +25398,19 @@ public static void testASCII() throws Exception { //generate some data Table table = getTestTable(true, true); + Table table1 = getTestTable(true, true); + table1.removeRow(3); //remove the empty row at the end + //write it to a file String fileName = testDir + "tempTable.asc"; table.saveAsTabbedASCII(fileName); - String2.log(fileName + "=\n" + String2.readFromFile(fileName)[1]); + String2.log(fileName + "=\n" + String2.directReadFrom88591File(fileName)); //read it from the file Table table2 = new Table(); + //debugMode = true; table2.readASCII(fileName); + //debugMode = false; //check units on 1st data row Test.ensureEqual(table2.getStringData(1, 0), "degrees_east", ""); @@ -24283,13 +25421,14 @@ public static void testASCII() throws Exception { table2.simplify(); //are they the same (but column types may be different)? - Test.ensureTrue(table.equals(table2, false), - "\ntable=" + table.toCSVString() + "\ntable2=" + table2.toCSVString()); + Test.ensureTrue(table1.equals(table2, false), + "\ntable=" + table.toString() + "\ntable2=" + table2.toString()); //test simplification: see if column types are the same as original table int n = table.nColumns(); for (int col = 2; col < n; col++) //skip first 2 columns which are intentionally initially stored in bigger type - if (col != 4) //LongArray -> StringArray + if (col != 4 && //LongArray -> StringArray + col != 8) //CharArray -> StringArray Test.ensureEqual(table.columns.get(col).getClass(), table2.getColumn(col).getClass(), "test type of col#" + col); @@ -24298,7 +25437,7 @@ public static void testASCII() throws Exception { //read 2nd row from the file table2 = new Table(); - table2.readASCII(fileName, 0, 1, + table2.readASCII(fileName, 0, 1, "", new String[]{"Int Data"}, new double[]{0}, new double[]{4}, new String[]{"Short Data", "String Data"}); Test.ensureEqual(table2.nColumns(), 2, ""); @@ -24313,15 +25452,17 @@ public static void testASCII() throws Exception { String2.log("\n***** Table.testASCII read subset with no column names"); //read 3rd row from the file table2 = new Table(); - table2.readASCII(fileName, -1, 1, //-1=no column names + table2.readASCII(fileName, -1, 1, "", //-1=no column names new String[]{"Column#5"}, new double[]{0}, new double[]{4}, - new String[]{"Column#6", "Column#8"}); - Test.ensureEqual(table2.nColumns(), 2, ""); + new String[]{"Column#6", "Column#8", "Column#9"}); + Test.ensureEqual(table2.nColumns(), 3, ""); Test.ensureEqual(table2.nRows(), 1, ""); Test.ensureEqual(table2.getColumnName(0), "Column#6", ""); Test.ensureEqual(table2.getColumnName(1), "Column#8", ""); + Test.ensureEqual(table2.getColumnName(2), "Column#9", ""); Test.ensureEqual(table2.getDoubleData(0, 0), 7, ""); - Test.ensureEqual(table2.getStringData(1, 0), "bb", ""); + Test.ensureEqual(table2.getStringData(1, 0), "\"", ""); + Test.ensureEqual(table2.getStringData(2, 0), "bb", ""); //** finally File2.delete(fileName); @@ -24351,12 +25492,12 @@ public static void testReadStandardTabbedASCII() throws Exception { //read it from lines table.readStandardTabbedASCII("tFileName", lines, null, true); String2.log("nRows=" + table.nRows() + " nCols=" + table.nColumns()); - Test.ensureEqual(table.dataToCSVString(), + Test.ensureEqual(table.dataToString(), "colA,colB,colC\n" + "1a,1b,1c\n" + - "\"2\\na\",\"2\\nb\",2c\n" + + "2\\na,2\\nb,2c\n" + "3a,3b,3c\n", - "tFileName toCSVString=\n" + table.dataToCSVString()); + "tFileName toCSVString=\n" + table.dataToString()); //write it to a file String fileName = testDir + "tempTable.asc"; @@ -24366,23 +25507,23 @@ public static void testReadStandardTabbedASCII() throws Exception { Table table2 = new Table(); table2.readStandardTabbedASCII(fileName, null, true); String2.log("nRows=" + table2.nRows() + " nCols=" + table2.nColumns()); - Test.ensureEqual(table2.dataToCSVString(), + Test.ensureEqual(table2.dataToString(), "colA,colB,colC\n" + "1a,1b,1c\n" + - "\"2\\na\",\"2\\nb\",2c\n" + + "2\\na,2\\nb,2c\n" + "3a,3b,3c\n", - "table2 toCSVString=\n" + table2.dataToCSVString()); + "table2 toCSVString=\n" + table2.dataToString()); //just read cols B and C from the file table2 = new Table(); table2.readStandardTabbedASCII(fileName, new String[]{"colB", "colC"}, true); String2.log("nRows=" + table2.nRows() + " nCols=" + table2.nColumns()); - Test.ensureEqual(table2.dataToCSVString(), + Test.ensureEqual(table2.dataToString(), "colB,colC\n" + "1b,1c\n" + - "\"2\\nb\",2c\n" + + "2\\nb,2c\n" + "3b,3c\n", - "table2 toCSVString=\n" + table2.dataToCSVString()); + "table2 toCSVString=\n" + table2.dataToString()); //** finally File2.delete(fileName); @@ -24406,13 +25547,12 @@ public static void testHtml() throws Exception { String fileName = testDir + "tempTable.html"; table.saveAsHtml(fileName, "preTextHtml\n
    \n", "postTextHtml\n
    ", null, BGCOLOR, 1, true, 0, true, false); - //String2.log(fileName + "=\n" + String2.readFromFile(fileName)[1]); + //String2.log(fileName + "=\n" + String2.directReadFromUtf8File(fileName)); SSR.displayInBrowser("file://" + fileName); //read it from the file - String results[] = String2.readFromFile(fileName); - Test.ensureEqual(results[0], "", ""); - Test.ensureEqual(results[1], + String results = String2.directReadFromUtf8File(fileName); + Test.ensureEqual(results, "\n" + "\n" + @@ -24438,6 +25578,7 @@ public static void testHtml() throws Exception { "Int Data\n" + "Short Data\n" + "Byte Data\n" + +"Char Data\n" + "String Data\n" + "\n" + "\n" + @@ -24449,6 +25590,7 @@ public static void testHtml() throws Exception { "ints\n" + "shorts\n" + "bytes\n" + +"chars\n" + "Strings\n" + "\n" + "\n" + @@ -24460,6 +25602,7 @@ public static void testHtml() throws Exception { "-2000000000\n" + "-32000\n" + "-120\n" + +",\n" + "a\n" + "\n" + "\n" + @@ -24471,6 +25614,7 @@ public static void testHtml() throws Exception { "2\n" + "7\n" + "8\n" + +""\n" + "bb\n" + "\n" + "\n" + @@ -24482,6 +25626,7 @@ public static void testHtml() throws Exception { "2000000000\n" + "32000\n" + "120\n" + +"€\n" + "ccc\n" + "\n" + "\n" + @@ -24494,44 +25639,46 @@ public static void testHtml() throws Exception { " \n" + " \n" + " \n" + +" \n" + "\n" + "\n" + "postTextHtml\n" + "
    \n" + "\n", - results[1]); + results); //test readHtml - treat 2nd row as data Table table2 = new Table(); - table2.readHtml(fileName, results[1], 0, + table2.readHtml(fileName, results, 0, false, true); //secondRowHasUnits, simplify - String csv = String2.annotatedString(table2.dataToCSVString()); + String csv = String2.annotatedString(table2.dataToString()); Test.ensureEqual(csv, //so units appear here as a row of data -"Time,Longitude,Latitude,Double Data,Long Data,Int Data,Short Data,Byte Data,String Data[10]\n" + -"UTC,degrees_east,degrees_north,doubles,longs,ints,shorts,bytes,Strings[10]\n" + -"1970-01-01T00:00:00Z,-3,1.0,-1.0E300,-2000000000000000,-2000000000,-32000,-120,a[10]\n" + -"2005-08-31T16:01:02Z,-2,1.5,3.123,2,2,7,8,bb[10]\n" + -"2005-11-02T18:04:09Z,-1,2.0,1.0E300,2000000000000000,2000000000,32000,120,ccc[10]\n" + -"[160],[160],[160],[160],[160],[160],[160],[160],[160][10]\n" + +"Time,Longitude,Latitude,Double Data,Long Data,Int Data,Short Data,Byte Data,Char Data,String Data[10]\n" + +"UTC,degrees_east,degrees_north,doubles,longs,ints,shorts,bytes,chars,Strings[10]\n" + +"1970-01-01T00:00:00Z,-3,1.0,-1.0E300,-2000000000000000,-2000000000,-32000,-120,\",\",a[10]\n" + +"2005-08-31T16:01:02Z,-2,1.5,3.123,2,2,7,8,\"\"\"\",bb[10]\n" + +"2005-11-02T18:04:09Z,-1,2.0,1.0E300,2000000000000000,2000000000,32000,120,\\u20ac,ccc[10]\n" + +",,,,,,,,,[10]\n" + "[end]", csv); //test readHtml - treat 2nd row as units table2 = new Table(); - table2.readHtml(fileName, results[1], 0, + table2.readHtml(fileName, results, 0, true, true); //secondRowHasUnits, simplify - csv = String2.annotatedString(table2.dataToCSVString()); + csv = String2.annotatedString(table2.dataToString()); Test.ensureEqual(csv, //so units correctly stored as units -"Time,Longitude,Latitude,Double Data,Long Data,Int Data,Short Data,Byte Data,String Data[10]\n" + -"1970-01-01T00:00:00Z,-3,1.0,-1.0E300,-2000000000000000,-2000000000,-32000,-120,a[10]\n" + -"2005-08-31T16:01:02Z,-2,1.5,3.123,2,2,7,8,bb[10]\n" + -"2005-11-02T18:04:09Z,-1,2.0,1.0E300,2000000000000000,2000000000,32000,120,ccc[10]\n" + -"[160],[160],[160],[160],[160],[160],[160],[160],[160][10]\n" + +"Time,Longitude,Latitude,Double Data,Long Data,Int Data,Short Data,Byte Data,Char Data,String Data[10]\n" + +"1970-01-01T00:00:00Z,-3,1.0,-1.0E300,-2000000000000000,-2000000000,-32000,-120,\",\",a[10]\n" + +"2005-08-31T16:01:02Z,-2,1.5,3.123,2,2,7,8,\"\"\"\",bb[10]\n" + +"2005-11-02T18:04:09Z,-1,2.0,1.0E300,2000000000000000,2000000000,32000,120,\\u20ac,ccc[10]\n" + +",,,,,,,,,[10]\n" + "[end]", csv); Test.ensureEqual(table2.columnAttributes(0).getString("units"), "UTC", ""); Test.ensureEqual(table2.columnAttributes(1).getString("units"), "degrees_east", ""); - Test.ensureEqual(table2.columnAttributes(8).getString("units"), "Strings", ""); + Test.ensureEqual(table2.columnAttributes(8).getString("units"), "chars", ""); + Test.ensureEqual(table2.columnAttributes(9).getString("units"), "Strings", ""); //** finally Math2.gc(10000); //in a test. Do something useful while browser gets going to display the file. @@ -24553,7 +25700,7 @@ public static void testFlatNc() throws Exception { //generate some data Table table = getTestTable(false, true); //falses=.nc doesn't seem to take longs - String2.log("*******table=" + table.toString("row", Integer.MAX_VALUE)); + String2.log("*******table=" + table.toString()); //write it to a file String fileName = testDir + "tempTable.nc"; @@ -24562,16 +25709,26 @@ public static void testFlatNc() throws Exception { //read it from the file Table table2 = new Table(); table2.readFlatNc(fileName, null, 0); - String2.log("*********table2=" + table2.toString("row", Integer.MAX_VALUE)); + String2.log("*********table2=" + table2.toString()); //replace ' ' with '_' in column names for (int i = 0; i < table.columnNames.size(); i++) table.columnNames.set(i, String2.replaceAll(table.columnNames.get(i), " ", "_")); //do the test that the tables are equal - String2.log("testFlatNc table.nColAtt=" + table.columnAttributes.size() + - " table2.nColAtt=" + table2.columnAttributes.size()); - Test.ensureTrue(table.equals(table2), "Test table equality"); + String2.log("testFlatNc table.nColAtt=" + table.columnAttributes.size() + //? why columnAtt? + " table2.nColAtt=" + table2.columnAttributes.size()); + //except char \\u20ac becomes "?" in nc file, so reset it + Test.ensureEqual( + table2.columns.get(7).getString(2), "?", ""); + table2.columns.get(7).setString(2, "\u20ac"); + if (table2.columns.get(7).getString(3).equals("?")) + table2.columns.get(7).setString(3, ""); + try { + Test.ensureTrue(table.equals(table2), "Test table equality"); + } catch (Exception e) { + String2.pressEnterToContinue(MustBe.throwableToString(e)); + } //test if data types are the same int n = table.nColumns(); @@ -24745,15 +25902,15 @@ public static void test4DNc() throws Exception { * This is a test of readOpendapSequence. * Test cases from Roy: * GLOBEC VPT: - * stn_id=loaddods('http://oceanwatch.pfeg.noaa.gov/opendap/GLOBEC/GLOBEC_vpt?stn_id&unique()'); - * abund=loaddods('-F','http://oceanwatch.pfeg.noaa.gov/opendap/GLOBEC/GLOBEC_vpt?abund_m3&stn_id="NH05"'); + * stn_id=loaddods('https://oceanwatch.pfeg.noaa.gov/opendap/GLOBEC/GLOBEC_vpt?stn_id&unique()'); + * abund=loaddods('-F','https://oceanwatch.pfeg.noaa.gov/opendap/GLOBEC/GLOBEC_vpt?abund_m3&stn_id="NH05"'); * GLOBEC Bottle: - * month=loaddods('-F','http://oceanwatch.pfeg.noaa.gov/opendap/GLOBEC/GLOBEC_bottle?month&unique()'); - * [oxy temp]=loaddods('-F','http://oceanwatch.pfeg.noaa.gov/opendap/GLOBEC/GLOBEC_bottle?t0,oxygen&month="5"'); + * month=loaddods('-F','https://oceanwatch.pfeg.noaa.gov/opendap/GLOBEC/GLOBEC_bottle?month&unique()'); + * [oxy temp]=loaddods('-F','https://oceanwatch.pfeg.noaa.gov/opendap/GLOBEC/GLOBEC_bottle?t0,oxygen&month="5"'); * GLOBEC MOC1: - * [abund,lon,lat]=loaddods('-F','http://oceanwatch.pfeg.noaa.gov/opendap/GLOBEC/GLOBEC_MOC1?abund_m3,lat,long'); - * [abund1,lon1,lat1]=loaddods('-F','http://oceanwatch.pfeg.noaa.gov/opendap/GLOBEC/GLOBEC_MOC1?abund_m3,lat,long&program="MESO_1"'); - * I note that loaddods documentation is at http://www.opendap.org/user/mgui-html/mgui_36.html + * [abund,lon,lat]=loaddods('-F','https://oceanwatch.pfeg.noaa.gov/opendap/GLOBEC/GLOBEC_MOC1?abund_m3,lat,long'); + * [abund1,lon1,lat1]=loaddods('-F','https://oceanwatch.pfeg.noaa.gov/opendap/GLOBEC/GLOBEC_MOC1?abund_m3,lat,long&program="MESO_1"'); + * I note that loaddods documentation is at https://www.opendap.org/user/mgui-html/mgui_36.html * and -F says to convert all strings to floats. * "unique()" seems to just return unique values. * @@ -24776,7 +25933,7 @@ public static void testOpendapSequence() throws Exception { url = "http://ferret.pmel.noaa.gov/pmel/erddap/tabledap/ChukchiSea_454a_037a_fcf4?" + "prof,id,cast,cruise,time,longitude,lon360,latitude&time>=2012-09-04&time<=2012-09-07&distinct()"; table.readOpendapSequence(url, false); //boolean: skipDapSpacerRows - results = table.toString("row", 3); + results = table.toString(3); expected = "{\n" + "dimensions:\n" + @@ -24913,9 +26070,9 @@ public static void testOpendapSequence() throws Exception { "\t\t:WATER_MASS = \"A\" ;\n" + "\t\t:Westernmost_Easting = -174.6603 ;\n" + "}\n" + -" Row prof id cast cruise time longitude lon360 latitude\n" + -" 0 1 aq1201c069 069 aq1201 1346974560 -164.044693 195.955307 56.866001\n" + -" 1 1 aq1201c070 070 aq1201 1346975820 -164.048996 195.951004 56.863998\n"; +"prof,id,cast,cruise,time,longitude,lon360,latitude\n" + +"1.0,aq1201c069,069,aq1201,1.34697456E9,-164.0447,195.9553,56.866\n" + +"1.0,aq1201c070,070,aq1201,1.34697582E9,-164.049,195.951,56.864\n"; int po = results.indexOf(expected.substring(0, 60)); Test.ensureEqual(results.substring(Math.max(0, po)), expected, "results=\n" + results); } catch (Exception e) { @@ -24927,7 +26084,7 @@ public static void testOpendapSequence() throws Exception { url = "http://ferret.pmel.noaa.gov/pmel/erddap/tabledap/ChukchiSea_454a_037a_fcf4?" + "prof,id,cast,cruise,time,longitude,lon360,latitude&time>=2012-09-04&time<=2012-09-07"; table.readOpendapSequence(url, false); //boolean: skipDapSpacerRows - results = table.toString("row", 3); + results = table.toString(3); expected = "{\n" + "dimensions:\n" + @@ -25065,9 +26222,9 @@ public static void testOpendapSequence() throws Exception { "\t\t:WATER_MASS = \"A\" ;\n" + "\t\t:Westernmost_Easting = -174.6603 ;\n" + "}\n" + -" Row prof id cast cruise time longitude lon360 latitude\n" + -" 0 1 aq1201c069 069 aq1201 1346974560 -164.044693 195.955307 56.866001\n" + -" 1 1 aq1201c070 070 aq1201 1346975820 -164.048996 195.951004 56.863998\n"; +"prof,id,cast,cruise,time,longitude,lon360,latitude\n" + +"1.0,aq1201c069,069,aq1201,1.34697456E9,-164.0447,195.9553,56.866\n" + +"1.0,aq1201c070,070,aq1201,1.34697582E9,-164.049,195.951,56.864\n"; int po = results.indexOf(expected.substring(0, 60)); Test.ensureEqual(results.substring(Math.max(0, po)), expected, "results=\n" + results); } catch (Exception e) { @@ -25081,7 +26238,7 @@ public static void testOpendapSequence() throws Exception { //read data from opendap table.readOpendapSequence( "http://coastwatch.pfeg.noaa.gov/erddap/tabledap/erdGlobecMoc1?abund_m3,latitude,longitude", false); - results = table.toString("row", 5); + results = table.toString(5); String2.log(results); nRows = 3763; //2013-0620 was 3779; @@ -25141,20 +26298,22 @@ public static void testOpendapSequence() throws Exception { expected = "\t\t:time_coverage_end = \"2002-05-30T15:22:00Z\" ;\n" + "\t\t:time_coverage_start = \"2000-04-12T04:00:00Z\" ;\n" + -"\t\t:title = \"GLOBEC NEP MOCNESS Plankton (MOC1) Data\" ;\n" + +"\t\t:title = \"GLOBEC NEP MOCNESS Plankton (MOC1) Data, 2000-2002\" ;\n" + "\t\t:Westernmost_Easting = -125.1167 ;\n" + "}\n" + -" Row abund_m3 latitude longitude\n" + +//" Row abund_m3 latitude longitude\n" + //" 0 3.698225E-3 44.651699 -124.650002\n" + 2013-06-20 was //" 1 7.26257E-2 44.651699 -124.650002\n" + //" 2 1.100231E-3 42.504601 -125.011299\n" + //" 3 7.889546E-2 42.501801 -124.705803\n" + //" 4 3.416457 42.5033 -124.845001\n"; -" 0 3.688676E-3 44.651699 -124.650002\n" + -" 1 3.688676E-3 44.651699 -124.650002\n" + -" 2 1.106603E-2 44.651699 -124.650002\n" + -" 3 1.47547E-2 44.651699 -124.650002\n" + -" 4 1.47547E-2 44.651699 -124.650002\n"; +"abund_m3,latitude,longitude\n" + +"0.003688676,44.6517,-124.65\n" + +"0.003688676,44.6517,-124.65\n" + +"0.011066027,44.6517,-124.65\n" + +"0.014754703,44.6517,-124.65\n" + +"0.014754703,44.6517,-124.65\n" + +"...\n"; int po = results.indexOf(expected.substring(0, 19)); Test.ensureEqual(results.substring(Math.max(po, 0)), expected, "results=\n" + results); /* on oceanwatch, was @@ -25188,7 +26347,7 @@ public static void testOpendapSequence() throws Exception { try { url = "http://coastwatch.pfeg.noaa.gov/erddap/tabledap/erdGlobecVpt?station_id&distinct()"; table.readOpendapSequence(url, false); - String2.log(table.toString("row", 3)); + String2.log(table.toString(3)); //source has no global metadata Test.ensureEqual(table.nColumns(), 1, ""); Test.ensureEqual(table.nRows(), 77, ""); @@ -25205,7 +26364,7 @@ public static void testOpendapSequence() throws Exception { try { url = "http://coastwatch.pfeg.noaa.gov/erddap/tabledap/erdGlobecVpt?abund_m3&station_id=\"NH05\""; table.readOpendapSequence(url, false); - String2.log(table.toString("row", 3)); + String2.log(table.toString(3)); //source has no global metadata Test.ensureEqual(table.nColumns(), 1, ""); Test.ensureEqual(table.nRows(), 2400, ""); @@ -25222,7 +26381,7 @@ public static void testOpendapSequence() throws Exception { try { url = "http://coastwatch.pfeg.noaa.gov/erddap/tabledap/erdGlobecBottle?cruise_id&distinct()"; table.readOpendapSequence(url, false); - String2.log(table.toString("row", 1000000)); + String2.log(table.toString(1000000)); //source has no global metadata Test.ensureEqual(table.nColumns(), 1, ""); Test.ensureEqual(table.nRows(), 2, ""); @@ -25239,7 +26398,7 @@ public static void testOpendapSequence() throws Exception { try { url = "http://coastwatch.pfeg.noaa.gov/erddap/tabledap/erdGlobecMoc1?abund_m3,latitude,longitude&program=\"MESO_1\""; table.readOpendapSequence(url, false); - results = table.dataToCSVString(); + results = table.dataToString(); String2.log(results); expected = /* oceanwatch was @@ -25268,7 +26427,7 @@ public static void testOpendapSequence() throws Exception { //nRows=16507 nColumns=28 readTime=5219 ms processTime=94 ms url = "http://coastwatch.pfeg.noaa.gov/erddap/tabledap/erdGlobecVpt"; table.readOpendapSequence(url, false); - results = table.dataToCSVString(5); + results = table.dataToString(5); //on oceanwatch, was // Row datetime datetime_utc datetime_utc_e year program cruise_id cast_no stn_id //lat long lat1000 lon1000 water_depth sample_id min_sample_dep max_sample_dep month_local day_local @@ -25279,12 +26438,12 @@ public static void testOpendapSequence() throws Exception { // -9999 -9999 VPT 0.19635 0.202 14.46 WTP -9999 1.1 611 //8010204# 6118010204 CALANUS_MARSHA 3;_CIII 11.49 expected = -"row,cruise_id,longitude,latitude,time,cast_no,station_id,abund_m3,comments,counter_id,d_n_flag,gear_area,gear_mesh,gear_type,genus_species,life_stage,local_code,max_sample_depth,min_sample_depth,nodc_code,perc_counted,program,sample_id,vol_filt,water_depth\n" + -"0,EL010403,-124.17,44.65,9.88261731E8,0,NH05,11.49,-9999,WTP,-9999,0.19635,0.202,VPT,CALANUS_MARSHALLAE,3;_CIII,6118010204#,55,0,6118010204,1.1,NH,0,14.46,60\n" + -"1,EL010403,-124.17,44.65,9.88261731E8,0,NH05,74.72,-9999,WTP,-9999,0.19635,0.202,VPT,BIVALVIA,Veliger,55V,55,0,55,1.1,NH,0,14.46,60\n" + -"2,EL010403,-124.17,44.65,9.88261731E8,0,NH05,57.48,-9999,WTP,-9999,0.19635,0.202,VPT,POLYCHAETA,Larva,5001LV,55,0,5001,1.1,NH,0,14.46,60\n" + -"3,EL010403,-124.17,44.65,9.88261731E8,0,NH05,74.72,-9999,WTP,-9999,0.19635,0.202,VPT,GASTROPODA,Veliger,51V,55,0,51,1.1,NH,0,14.46,60\n" + -"4,EL010403,-124.17,44.65,9.88261731E8,0,NH05,11.49,-9999,WTP,-9999,0.19635,0.202,VPT,CALANUS_MARSHALLAE,1;_CI,6118010204!,55,0,6118010204,1.1,NH,0,14.46,60\n"; +"cruise_id,longitude,latitude,time,cast_no,station_id,abund_m3,comments,counter_id,d_n_flag,gear_area,gear_mesh,gear_type,genus_species,life_stage,local_code,max_sample_depth,min_sample_depth,nodc_code,perc_counted,program,sample_id,vol_filt,water_depth\n" + +"EL010403,-124.17,44.65,9.88261731E8,0,NH05,11.49,-9999,WTP,-9999,0.19635,0.202,VPT,CALANUS_MARSHALLAE,3;_CIII,6118010204#,55,0,6118010204,1.1,NH,0,14.46,60\n" + +"EL010403,-124.17,44.65,9.88261731E8,0,NH05,74.72,-9999,WTP,-9999,0.19635,0.202,VPT,BIVALVIA,Veliger,55V,55,0,55,1.1,NH,0,14.46,60\n" + +"EL010403,-124.17,44.65,9.88261731E8,0,NH05,57.48,-9999,WTP,-9999,0.19635,0.202,VPT,POLYCHAETA,Larva,5001LV,55,0,5001,1.1,NH,0,14.46,60\n" + +"EL010403,-124.17,44.65,9.88261731E8,0,NH05,74.72,-9999,WTP,-9999,0.19635,0.202,VPT,GASTROPODA,Veliger,51V,55,0,51,1.1,NH,0,14.46,60\n" + +"EL010403,-124.17,44.65,9.88261731E8,0,NH05,11.49,-9999,WTP,-9999,0.19635,0.202,VPT,CALANUS_MARSHALLAE,1;_CI,6118010204!,55,0,6118010204,1.1,NH,0,14.46,60\n"; Test.ensureEqual(results.substring(0, expected.length()), expected, "results=\n" + results); } catch (Exception e) { String2.log(MustBe.throwableToString(e)); @@ -25301,7 +26460,7 @@ public static void testOpendapSequence() throws Exception { table = new Table(); table.readOpendapSequence( //resulting url (for asc) is: - // http://oceanwatch.pfeg.noaa.gov/opendap/GLOBEC/GLOBEC_MOC1.asc?abund_m3,lat,long&abund_m3>=0.248962651&abund_m3<=0.248962653 + // https://oceanwatch.pfeg.noaa.gov/opendap/GLOBEC/GLOBEC_MOC1.asc?abund_m3,lat,long&abund_m3>=0.248962651&abund_m3<=0.248962653 // Opera browser changes > to %3E and < to %3C "http://coastwatch.pfeg.noaa.gov/erddap/tabledap/erdGlobecMoc1", new String[]{"abund_m3"}, @@ -25310,7 +26469,7 @@ public static void testOpendapSequence() throws Exception { new String[]{"abund_m3","latitude","longitude"}, false); - results = table.dataToCSVString(); + results = table.dataToString(); expected = "abund_m3,latitude,longitude\n" + "0.24896266,44.6517,-124.65\n" + @@ -25651,12 +26810,12 @@ public static void testReadASCIISpeed() throws Exception { table.readASCII(fileName); time = System.currentTimeMillis() - time; - String results = table.dataToCSVString(3); + String results = table.dataToString(3); String expected = -"row,YY,MM,DD,hh,WD,WSPD,GST,WVHT,DPD,APD,MWD,BAR,ATMP,WTMP,DEWP,VIS\n" + -"0,90,01,01,00,161,08.6,10.7,01.50,05.00,04.80,999,1017.2,22.7,22.0,999.0,99.0\n" + -"1,90,01,01,01,163,09.3,11.3,01.50,05.00,04.90,999,1017.3,22.7,22.0,999.0,99.0\n" + -"2,90,01,01,01,164,09.2,10.6,01.60,04.80,04.90,999,1017.3,22.7,22.0,999.0,99.0\n" + +"YY,MM,DD,hh,WD,WSPD,GST,WVHT,DPD,APD,MWD,BAR,ATMP,WTMP,DEWP,VIS\n" + +"90,01,01,00,161,08.6,10.7,01.50,05.00,04.80,999,1017.2,22.7,22.0,999.0,99.0\n" + +"90,01,01,01,163,09.3,11.3,01.50,05.00,04.90,999,1017.3,22.7,22.0,999.0,99.0\n" + +"90,01,01,01,164,09.2,10.6,01.60,04.80,04.90,999,1017.3,22.7,22.0,999.0,99.0\n" + "...\n"; Test.ensureEqual(results, expected, "results=\n" + results); Test.ensureEqual(table.nColumns(), 16, "nColumns=" + table.nColumns()); @@ -25694,9 +26853,9 @@ public static void testReadJsonSpeed() throws Exception { long fileLength = File2.length(fileName); //was 10,166KB Test.ensureTrue(fileLength > 9000000, "fileName=" + fileName + " length=" + fileLength); Table table=new Table(); - table.readJson(fileName, String2.readFromFile(fileName)[1]); + table.readJson(fileName); - String results = table.dataToCSVString(3); + String results = table.dataToString(3); String2.log("results=\n" + results); //row,dirIndex,fileName,lastMod,sortedSpacing,unique_tag_id_min_,unique_tag_id_max_,PI_min_,PI_max_,longitude_min_,longitude_max_,l //atitude_min_,latitude_max_,time_min_,time_max_,bottom_depth_min_,bottom_depth_max_,common_name_min_,common_name_max_,date_public_min @@ -25743,12 +26902,12 @@ public static void testReadNDNcSpeed() throws Exception { table = new Table(); table.readNDNc(fileName, null, null, 0, 0, true); - String results = table.dataToCSVString(3); + String results = table.dataToString(3); String expected = //before 2011-06-14 was 32.31, -75.35 -"row,TIME,DEPTH,LAT,LON,WD,WSPD,GST,WVHT,DPD,APD,MWD,BAR,ATMP,WTMP,DEWP,VIS,PTDY,TIDE,WSPU,WSPV,ID\n" + -"0,1.235556E8,0.0,32.309,-75.483,149,1.5,-9999999.0,-9999999.0,-9999999.0,-9999999.0,,1031.0,15.5,-9999999.0,5.4,-9999999.0,-9999999.0,-9999999.0,-0.8,1.3,41002\n" + -"1,1.235592E8,0.0,32.309,-75.483,145,0.3,-9999999.0,-9999999.0,-9999999.0,-9999999.0,,1031.0,13.9,-9999999.0,7.3,-9999999.0,-9999999.0,-9999999.0,-0.2,0.2,41002\n" + -"2,1.235628E8,0.0,32.309,-75.483,315,1.4,-9999999.0,-9999999.0,-9999999.0,-9999999.0,,1031.0,11.4,-9999999.0,6.5,-9999999.0,-9999999.0,-9999999.0,1.0,-1.0,41002\n" + +"TIME,DEPTH,LAT,LON,WD,WSPD,GST,WVHT,DPD,APD,MWD,BAR,ATMP,WTMP,DEWP,VIS,PTDY,TIDE,WSPU,WSPV,ID\n" + +"1.235556E8,0.0,32.309,-75.483,149,1.5,-9999999.0,-9999999.0,-9999999.0,-9999999.0,,1031.0,15.5,-9999999.0,5.4,-9999999.0,-9999999.0,-9999999.0,-0.8,1.3,41002\n" + +"1.235592E8,0.0,32.309,-75.483,145,0.3,-9999999.0,-9999999.0,-9999999.0,-9999999.0,,1031.0,13.9,-9999999.0,7.3,-9999999.0,-9999999.0,-9999999.0,-0.2,0.2,41002\n" + +"1.235628E8,0.0,32.309,-75.483,315,1.4,-9999999.0,-9999999.0,-9999999.0,-9999999.0,,1031.0,11.4,-9999999.0,6.5,-9999999.0,-9999999.0,-9999999.0,1.0,-1.0,41002\n" + "...\n"; Test.ensureEqual(results, expected, "results=\n" + results); Test.ensureEqual(table.nColumns(), 21, "nColumns=" + table.nColumns()); @@ -25786,12 +26945,12 @@ public static void testReadOpendapSequenceSpeed() throws Exception { time = System.currentTimeMillis(); Table table = new Table(); table.readOpendapSequence(url); - String results = table.dataToCSVString(3); + String results = table.dataToString(3); String expected = //before 2011-06-14 was -80.17, 28.5 -"row,station,longitude,latitude,time,wd,wspd,gst,wvht,dpd,apd,mwd,bar,atmp,wtmp,dewp,vis,ptdy,tide,wspu,wspv\n" + -"0,41009,-80.166,28.519,9.151488E8,0,1.9,2.7,1.02,11.11,6.49,,1021.0,20.4,24.2,-9999999.0,-9999999.0,-9999999.0,-9999999.0,0.0,-1.9\n" + -"1,41009,-80.166,28.519,9.151524E8,53,1.5,2.8,0.99,11.11,6.67,,1021.0,20.6,24.5,-9999999.0,-9999999.0,-9999999.0,-9999999.0,-1.2,-0.9\n" + -"2,41009,-80.166,28.519,9.15156E8,154,1.0,2.2,1.06,11.11,6.86,,1021.2,20.6,24.6,-9999999.0,-9999999.0,-9999999.0,-9999999.0,-0.4,0.9\n" + +"station,longitude,latitude,time,wd,wspd,gst,wvht,dpd,apd,mwd,bar,atmp,wtmp,dewp,vis,ptdy,tide,wspu,wspv\n" + +"41009,-80.166,28.519,9.151488E8,0,1.9,2.7,1.02,11.11,6.49,,1021.0,20.4,24.2,-9999999.0,-9999999.0,-9999999.0,-9999999.0,0.0,-1.9\n" + +"41009,-80.166,28.519,9.151524E8,53,1.5,2.8,0.99,11.11,6.67,,1021.0,20.6,24.5,-9999999.0,-9999999.0,-9999999.0,-9999999.0,-1.2,-0.9\n" + +"41009,-80.166,28.519,9.15156E8,154,1.0,2.2,1.06,11.11,6.86,,1021.2,20.6,24.6,-9999999.0,-9999999.0,-9999999.0,-9999999.0,-0.4,0.9\n" + "...\n"; Test.ensureEqual(results, expected, "results=\n" + results); @@ -25902,12 +27061,12 @@ public static void testOpendap() throws Exception { Table table = new Table(); int nRows = 3779; table.readFlatNc( - //read all via ascii: "http://oceanwatch.pfeg.noaa.gov/opendap/GLOBEC/GLOBEC_MOC1.asc?abund_m3,lat,long", null); - //or "http://oceanwatch.pfeg.noaa.gov/opendap/GLOBEC/GLOBEC_MOC1.asc?MOC1.abund_m3,MOC1.lat,MOC1.long", null); - "http://oceanwatch.pfeg.noaa.gov/opendap/GLOBEC/GLOBEC_MOC1", + //read all via ascii: "https://oceanwatch.pfeg.noaa.gov/opendap/GLOBEC/GLOBEC_MOC1.asc?abund_m3,lat,long", null); + //or "https://oceanwatch.pfeg.noaa.gov/opendap/GLOBEC/GLOBEC_MOC1.asc?MOC1.abund_m3,MOC1.lat,MOC1.long", null); + "https://oceanwatch.pfeg.noaa.gov/opendap/GLOBEC/GLOBEC_MOC1", new String[]{"MOC1.abund_m3", "MOC1.lat", "MOC1.long"}, //but "MOC1." is required here 2); //2=unpack to doubles - String2.log(table.toString("row", 5)); + String2.log(table.toString(5)); Test.ensureEqual(table.nColumns(), 3, ""); Test.ensureEqual(table.nRows(), nRows, ""); @@ -25929,10 +27088,10 @@ public static void testOpendap() throws Exception { //can it read with list of variables? table.readFlatNc( - "http://oceanwatch.pfeg.noaa.gov/opendap/GLOBEC/GLOBEC_MOC1?abund_m3,lat,long", + "https://oceanwatch.pfeg.noaa.gov/opendap/GLOBEC/GLOBEC_MOC1?abund_m3,lat,long", null, //read all variables 2); //2=unpack to doubles - String2.log(table.toString("row", 5)); + String2.log(table.toString(5)); Test.ensureEqual(table.nColumns(), 3, ""); Test.ensureEqual(table.nRows(), nRows, ""); @@ -25966,13 +27125,13 @@ public static void testLittleMethods() { Table table = getTestTable(true, true); table.ensureValid(); //throws Exception if not Test.ensureEqual(table.findColumnNumber("Time"), 0, ""); - Test.ensureEqual(table.findColumnNumber("String Data"), 8, ""); + Test.ensureEqual(table.findColumnNumber("String Data"), 9, ""); Test.ensureEqual(table.findColumnNumber("zz"), -1, ""); //toString table = getTestTable(true, true); - String2.log("toString: " + table.toString("row", Integer.MAX_VALUE)); + String2.log("toString: " + table.toString()); //ensureEqual Table table2 = getTestTable(true, true); @@ -26116,7 +27275,7 @@ public static void testJoin() { lut.columnAttributes(2).add("missing_value", -9999999L); //test lut before join - String results = lut.toCSVString(); + String results = lut.toString(); String expectedLut = "{\n" + "dimensions:\n" + @@ -26132,17 +27291,17 @@ public static void testJoin() { "\n" + "// global attributes:\n" + "}\n" + -"row,aa,bb,cc\n" + -"0,10,11,111\n" + -"1,20,22,222\n" + -"2,30,33,333\n" + -"3,40,44,444\n"; +"aa,bb,cc\n" + +"10,11,111\n" + +"20,22,222\n" + +"30,33,333\n" + +"40,44,444\n"; Test.ensureEqual(results, expectedLut, "lut results=\n" + results); //do the join table.join(1, 1, "10", lut); - results = table.toCSVString(); + results = table.toString(); String expected = "{\n" + "dimensions:\n" + @@ -26164,17 +27323,17 @@ public static void testJoin() { "\n" + "// global attributes:\n" + "}\n" + -"row,zero,one,bb,cc,two\n" + -"0,a,40,44,444,aa\n" + -"1,b,10,11,111,bb\n" + -"2,c,12,,-9999999,cc\n" + -"3,d,30,33,333,dd\n" + -"4,,,11,111,\n" + -"5,e,20,22,222,ee\n"; +"zero,one,bb,cc,two\n" + +"a,40,44,444,aa\n" + +"b,10,11,111,bb\n" + +"c,12,,-9999999,cc\n" + +"d,30,33,333,dd\n" + +",,11,111,\n" + +"e,20,22,222,ee\n"; Test.ensureEqual(results, expected, "join 1 results=\n" + results); //ensure lut unchanged - results = lut.toCSVString(); + results = lut.toString(); Test.ensureEqual(results, expectedLut, "lut 1 results=\n" + results); @@ -26193,7 +27352,7 @@ public static void testJoin() { //do the join table.join(2, 1, "10\t11", lut); - results = table.toCSVString(); + results = table.toString(); expected = "{\n" + "dimensions:\n" + @@ -26215,17 +27374,17 @@ public static void testJoin() { "\n" + "// global attributes:\n" + "}\n" + -"row,zero,one,two,cc,three\n" + -"0,a,40,44,444,aaa\n" + -"1,b,10,bad,-9999999,bbb\n" + -"2,c,12,1212,-9999999,ccc\n" + -"3,d,30,33,333,ddd\n" + -"4,,,,111,\n" + -"5,e,20,22,222,eee\n"; +"zero,one,two,cc,three\n" + +"a,40,44,444,aaa\n" + +"b,10,bad,-9999999,bbb\n" + +"c,12,1212,-9999999,ccc\n" + +"d,30,33,333,ddd\n" + +",,,111,\n" + +"e,20,22,222,eee\n"; Test.ensureEqual(results, expected, "join 2 results=\n" + results); //ensure lut unchanged - results = lut.toCSVString(); + results = lut.toString(); Test.ensureEqual(results, expectedLut, "lut 2 results=\n" + results); } @@ -26247,7 +27406,7 @@ public static void testUpdate() throws Exception { otherTable.addColumn("five", PrimitiveArray.csvFactory(int.class, " 1, 2, 3, 4")); int nMatched = table.update(new String[]{"zero", "one"}, otherTable); - String results = table.dataToCSVString(); + String results = table.dataToString(); String expected = "zero,one,two,three\n" + "a,10,111,1.1\n" + @@ -26355,7 +27514,7 @@ public static void testLastRowWithData() throws Exception { da.add(-99); da.add(-99); da.add(Double.NaN); da.add(5); da.add(-99); sa.add("hi"); sa.add("99"); sa.add(""); sa.add(""); sa.add("there"); table.removeRowsWithoutData(); - results = table.dataToCSVString(); + results = table.dataToString(); expected = "i,f,d,s\n" + "1,2.0,3.0,4\n" + @@ -26401,10 +27560,10 @@ public static void testReadNcCFMATimeSeriesReversed(boolean readAsNcCF) throws E else table.readMultidimNc(fileName, null, null, //read default dimensions - true, true, true, //getMetadata, trimStrings, removeMVRows, + true, true, //getMetadata, removeMVRows, StringArray.fromCSV("time"), StringArray.fromCSV(">"), StringArray.fromCSV("3426.69")); String2.log("time=" + (System.currentTimeMillis() - time)); - results = table.dataToCSVString(); + results = table.dataToString(); expected = //EEK! I don't think they should be different. //I think readMultidimNc is correct, because the -99999.0 values are from @@ -26432,10 +27591,10 @@ public static void testReadNcCFMATimeSeriesReversed(boolean readAsNcCF) throws E table.readMultidimNc(fileName, StringArray.fromCSV("station,latitude,longitude"), null, //dimensions - true, true, true, //getMetadata, trimStrings, removeMVRows, + true, true, //getMetadata, removeMVRows, null, null, null); String2.log("time=" + (System.currentTimeMillis() - time)); - results = table.dataToCSVString(); + results = table.dataToString(); expected = "station,latitude,longitude\n" + "1463500.0,40.22166667,-74.7780556\n" + @@ -26470,10 +27629,10 @@ public static void testReadNcCFMATimeSeriesReversed(boolean readAsNcCF) throws E table.readMultidimNc(fileName, StringArray.fromCSV("station,latitude,longitude"), null, //dimensions - true, true, true, //getMetadata, trimStrings, removeMVRows, + true, true, //getMetadata, removeMVRows, StringArray.fromCSV("latitude"), StringArray.fromCSV("<"), StringArray.fromCSV("39.1")); String2.log("time=" + (System.currentTimeMillis() - time)); - results = table.dataToCSVString(); + results = table.dataToString(); expected = "station,latitude,longitude\n" + "1484085.0,39.05830556,-75.3976111\n" + @@ -26492,10 +27651,10 @@ public static void testReadNcCFMATimeSeriesReversed(boolean readAsNcCF) throws E table.readMultidimNc(fileName, StringArray.fromCSV("time,discharge"), null, //dimensions - true, true, true, //getMetadata, trimStrings, removeMVRows, + true, true, //getMetadata, removeMVRows, StringArray.fromCSV("discharge"), StringArray.fromCSV(">"), StringArray.fromCSV("5400")); String2.log("time=" + (System.currentTimeMillis() - time)); - results = table.dataToCSVString(); + results = table.dataToString(); expected = "time,discharge\n" + "2076.5,5408.517777\n" + @@ -26523,10 +27682,10 @@ public static void testReadNcCFMATimeSeriesReversed(boolean readAsNcCF) throws E table.readMultidimNc(fileName, StringArray.fromCSV("station,latitude,longitude,time,discharge"), null, //dimensions - true, true, true, //getMetadata, trimStrings, removeMVRows, + true, true, //getMetadata, removeMVRows, StringArray.fromCSV("station"), StringArray.fromCSV("="), StringArray.fromCSV("1463500.0")); String2.log("time=" + (System.currentTimeMillis() - time)); - results = table.dataToCSVString(); + results = table.dataToString(); expected = //EEK! Again, readMultidimNc has additional rows with discharge=MV. //I think readMultidimNc is correct. @@ -26563,35 +27722,35 @@ public static void testReadNcCFMATimeSeriesReversed(boolean readAsNcCF) throws E else table.readMultidimNc(fileName, null, null, //read all dimensions - true, true, true, //getMetadata, trimStrings, removeMVRows, + true, true, //getMetadata, removeMVRows, null, null, null); String2.log("time=" + (System.currentTimeMillis() - time)); - results = table.dataToCSVString(10); + results = table.dataToString(10); expected = readAsNcCF? -"row,discharge,station,time,longitude,latitude\n" + -"0,92.02975275,1463500.0,638.1666666666279,-74.7780556,40.22166667\n" + -"1,92.02975275,1463500.0,638.1770833332557,-74.7780556,40.22166667\n" + -"2,92.02975275,1463500.0,638.1875,-74.7780556,40.22166667\n" + -"3,92.87925815999999,1463500.0,638.1979166666279,-74.7780556,40.22166667\n" + -"4,93.72876357,1463500.0,638.2083333332557,-74.7780556,40.22166667\n" + -"5,93.72876357,1463500.0,638.21875,-74.7780556,40.22166667\n" + -"6,94.86143745,1463500.0,638.2291666666279,-74.7780556,40.22166667\n" + -"7,95.71094286,1463500.0,638.2395833332557,-74.7780556,40.22166667\n" + -"8,95.71094286,1463500.0,638.25,-74.7780556,40.22166667\n" + -"9,95.71094286,1463500.0,638.2604166666279,-74.7780556,40.22166667\n" + +"discharge,station,time,longitude,latitude\n" + +"92.02975275,1463500.0,638.1666666666279,-74.7780556,40.22166667\n" + +"92.02975275,1463500.0,638.1770833332557,-74.7780556,40.22166667\n" + +"92.02975275,1463500.0,638.1875,-74.7780556,40.22166667\n" + +"92.87925815999999,1463500.0,638.1979166666279,-74.7780556,40.22166667\n" + +"93.72876357,1463500.0,638.2083333332557,-74.7780556,40.22166667\n" + +"93.72876357,1463500.0,638.21875,-74.7780556,40.22166667\n" + +"94.86143745,1463500.0,638.2291666666279,-74.7780556,40.22166667\n" + +"95.71094286,1463500.0,638.2395833332557,-74.7780556,40.22166667\n" + +"95.71094286,1463500.0,638.25,-74.7780556,40.22166667\n" + +"95.71094286,1463500.0,638.2604166666279,-74.7780556,40.22166667\n" + "...\n" : -"row,discharge,station,time,longitude,latitude\n" + -"0,92.02975275,1463500.0,638.1666666666279,-74.7780556,40.22166667\n" + -"1,92.02975275,1463500.0,638.1770833332557,-74.7780556,40.22166667\n" + -"2,92.02975275,1463500.0,638.1875,-74.7780556,40.22166667\n" + -"3,92.87925815999999,1463500.0,638.1979166666279,-74.7780556,40.22166667\n" + -"4,93.72876357,1463500.0,638.2083333332557,-74.7780556,40.22166667\n" + -"5,-99999.0,1463500.0,638.2083333333721,-74.7780556,40.22166667\n" + -"6,-99999.0,1463500.0,638.2125000000233,-74.7780556,40.22166667\n" + -"7,-99999.0,1463500.0,638.2166666666744,-74.7780556,40.22166667\n" + -"8,93.72876357,1463500.0,638.21875,-74.7780556,40.22166667\n" + -"9,-99999.0,1463500.0,638.2208333333256,-74.7780556,40.22166667\n" + +"discharge,station,time,longitude,latitude\n" + +"92.02975275,1463500.0,638.1666666666279,-74.7780556,40.22166667\n" + +"92.02975275,1463500.0,638.1770833332557,-74.7780556,40.22166667\n" + +"92.02975275,1463500.0,638.1875,-74.7780556,40.22166667\n" + +"92.87925815999999,1463500.0,638.1979166666279,-74.7780556,40.22166667\n" + +"93.72876357,1463500.0,638.2083333332557,-74.7780556,40.22166667\n" + +"-99999.0,1463500.0,638.2083333333721,-74.7780556,40.22166667\n" + +"-99999.0,1463500.0,638.2125000000233,-74.7780556,40.22166667\n" + +"-99999.0,1463500.0,638.2166666666744,-74.7780556,40.22166667\n" + +"93.72876357,1463500.0,638.21875,-74.7780556,40.22166667\n" + +"-99999.0,1463500.0,638.2208333333256,-74.7780556,40.22166667\n" + "...\n"; Test.ensureEqual(results, expected, "results=\n" + results); Test.ensureEqual(table.nRows(), readAsNcCF? 2315617 : 7539127, "wrong nRows"); @@ -26608,10 +27767,10 @@ public static void testReadNcCFMATimeSeriesReversed(boolean readAsNcCF) throws E table.readMultidimNc(fileName, StringArray.fromCSV("station,latitude,longitude,time,discharge"), null, //dimensions - true, true, true, //getMetadata, trimStrings, removeMVRows, + true, true, //getMetadata, removeMVRows, StringArray.fromCSV("discharge"), StringArray.fromCSV(">"), StringArray.fromCSV("5400")); String2.log("time=" + (System.currentTimeMillis() - time)); - results = table.dataToCSVString(); + results = table.dataToString(); expected = "station,latitude,longitude,time,discharge\n" + "1463500.0,40.22166667,-74.7780556,2076.5,5408.517777\n" + @@ -26639,10 +27798,10 @@ public static void testReadNcCFMATimeSeriesReversed(boolean readAsNcCF) throws E table.readMultidimNc(fileName, StringArray.fromCSV("station,latitude,longitude,time,discharge"), null, //dimensions - true, true, true, //getMetadata, trimStrings, removeMVRows, + true, true, //getMetadata, removeMVRows, StringArray.fromCSV("station,discharge"), StringArray.fromCSV("=,>"), StringArray.fromCSV("1463500.0,5400")); String2.log("time=" + (System.currentTimeMillis() - time)); - results = table.dataToCSVString(); + results = table.dataToString(); expected = "station,latitude,longitude,time,discharge\n" + "1463500.0,40.22166667,-74.7780556,2076.5,5408.517777\n" + @@ -26673,12 +27832,15 @@ public static void test() throws Exception { verbose = true; reallyVerbose = true; -/* */ + /* */ testLittleMethods(); testReorderColumns(); testSortColumnsByName(); testLastRowWithData(); + testEnhancedFlatNcFile(); testOrderByMinMax(); + testOrderByClosest(); + testOrderByLimit(); testGetDapQueryParts(); testParseDapQuery(); testSubsetViaDapQuery(); @@ -26688,6 +27850,7 @@ public static void test() throws Exception { testASCII(); testReadAsciiCsvFile(); testReadAsciiSsvFile(); + testNccsv(); testHtml(); testJson(); testFlatNc(); diff --git a/WEB-INF/classes/gov/noaa/pfel/coastwatch/pointdata/TableDataSet4DNc.java b/WEB-INF/classes/gov/noaa/pfel/coastwatch/pointdata/TableDataSet4DNc.java index 9032516e9..52f2d1ce2 100644 --- a/WEB-INF/classes/gov/noaa/pfel/coastwatch/pointdata/TableDataSet4DNc.java +++ b/WEB-INF/classes/gov/noaa/pfel/coastwatch/pointdata/TableDataSet4DNc.java @@ -25,13 +25,9 @@ import java.util.Vector; /** - * Get netcdf-X.X.XX.jar from - * http://www.unidata.ucar.edu/software/thredds/current/netcdf-java/index.html + * Get netcdfAll-......jar from ftp://ftp.unidata.ucar.edu/pub * and copy it to /WEB-INF/lib renamed as netcdf-latest.jar. - * Get slf4j-jdk14.jar from - * ftp://ftp.unidata.ucar.edu/pub/netcdf-java/slf4j-jdk14.jar - * and copy it to /WEB-INF/lib. - * Put both of these .jar files in the classpath for the compiler and for Java. + * Put it in the classpath for the compiler and for Java. */ import ucar.nc2.*; import ucar.nc2.dataset.NetcdfDataset; diff --git a/WEB-INF/classes/gov/noaa/pfel/coastwatch/pointdata/TwoTable.java b/WEB-INF/classes/gov/noaa/pfel/coastwatch/pointdata/TwoTable.java index f7cc957cf..8f10306f1 100644 --- a/WEB-INF/classes/gov/noaa/pfel/coastwatch/pointdata/TwoTable.java +++ b/WEB-INF/classes/gov/noaa/pfel/coastwatch/pointdata/TwoTable.java @@ -31,13 +31,9 @@ import java.util.Vector; /** - * Get netcdf-X.X.XX.jar from - * http://www.unidata.ucar.edu/software/thredds/current/netcdf-java/index.html + * Get netcdfAll-......jar from ftp://ftp.unidata.ucar.edu/pub * and copy it to /WEB-INF/lib renamed as netcdf-latest.jar. - * Get slf4j-jdk14.jar from - * ftp://ftp.unidata.ucar.edu/pub/netcdf-java/slf4j-jdk14.jar - * and copy it to /WEB-INF/lib. - * Put both of these .jar files in the classpath for the compiler and for Java. + * Put it in the classpath for the compiler and for Java. */ import ucar.nc2.*; import ucar.nc2.dataset.NetcdfDataset; diff --git a/WEB-INF/classes/gov/noaa/pfel/coastwatch/sgt/CompoundColorMap.java b/WEB-INF/classes/gov/noaa/pfel/coastwatch/sgt/CompoundColorMap.java index d718ec7b3..d5aa8483e 100644 --- a/WEB-INF/classes/gov/noaa/pfel/coastwatch/sgt/CompoundColorMap.java +++ b/WEB-INF/classes/gov/noaa/pfel/coastwatch/sgt/CompoundColorMap.java @@ -31,7 +31,7 @@ /** * This class mimics the behavior of a GMT Color Palette Table (.cpt) file. - * http://gmt.soest.hawaii.edu/gmt/html/GMT_Docs.html#x1-720004.15 + * https://gmt.soest.hawaii.edu/gmt/html/GMT_Docs.html#x1-720004.15 * Note that log ranges can be simulated by a series of ranges * (each of which is actually linearly interpolated). */ diff --git a/WEB-INF/classes/gov/noaa/pfel/coastwatch/util/FileVisitorDNLS.java b/WEB-INF/classes/gov/noaa/pfel/coastwatch/util/FileVisitorDNLS.java index 1c76c9db8..22b4863d5 100644 --- a/WEB-INF/classes/gov/noaa/pfel/coastwatch/util/FileVisitorDNLS.java +++ b/WEB-INF/classes/gov/noaa/pfel/coastwatch/util/FileVisitorDNLS.java @@ -223,9 +223,9 @@ public FileVisitResult visitFileFailed(Path file, IOException exc) { return FileVisitResult.CONTINUE; } - /** table.dataToCSVString(); */ + /** table.dataToString(); */ public String resultsToString() { - return table.dataToCSVString(); + return table.dataToString(); } /** @@ -376,6 +376,9 @@ public static Table oneStep(String tDir, String tFileNameRegex, boolean tRecursi matcher = HYRAX_PATTERN.matcher(tDir); if (matcher.matches()) { try { + if (verbose) + String2.log("FileVisitorDNLS.oneStep getting info from Hyrax at" + + "\nURL=" + tDir); Table table = makeEmptyTable(); StringArray directoryPA = (StringArray)table.getColumn(DIRECTORY); StringArray namePA = (StringArray)table.getColumn(NAME); @@ -404,6 +407,9 @@ public static Table oneStep(String tDir, String tFileNameRegex, boolean tRecursi matcher = THREDDS_PATTERN.matcher(tDir); if (matcher.matches()) { try { + if (verbose) + String2.log("FileVisitorDNLS.oneStep getting info from THREDDS at" + + "\nURL=" + tDir); Table table = makeEmptyTable(); StringArray directoryPA = (StringArray)table.getColumn(DIRECTORY); StringArray namePA = (StringArray)table.getColumn(NAME); @@ -430,6 +436,9 @@ public static Table oneStep(String tDir, String tFileNameRegex, boolean tRecursi //default: Apache-style WAF try { + if (verbose) + String2.log("FileVisitorDNLS.oneStep getting info from Apache-style WAF at" + + "\nURL=" + tDir); Table table = makeEmptyTable(); StringArray directorySA = (StringArray)table.getColumn(DIRECTORY); StringArray nameSA = (StringArray)table.getColumn(NAME); @@ -529,12 +538,36 @@ public static Table oneStepDouble(Table tTable) throws IOException { return tTable; } + /** + * This is like oneStep (a convenience method for using this class) + * but returns a url column instead of a directory column. + * + * @param tDir The starting directory, with \\ or /, with or without trailing slash, + * which will be removed. + * @param startOfUrl usually EDStatic.erddapUrl(loggedInAs) + "/files/" + datasetID() + "/" + * which will be prepended. + * @return a table with columns with DIRECTORY (always "/"), NAME, + * LASTMODIFIED (milliSeconds), and SIZE (long) columns. + */ + public static Table oneStepWithUrlsNotDirs(String tDir, + String tFileNameRegex, boolean tRecursive, String tPathRegex, String startOfUrl) + throws IOException { + + tDir = File2.addSlash(String2.replaceAll(tDir, "\\", "/")); //ensure forward/ and trailing/ + return oneStepDoubleWithUrlsNotDirs( + oneStep(tDir, tFileNameRegex, tRecursive, tPathRegex, false), //tDirectoriesToo + tDir, + startOfUrl); + } + /** * This is like oneStepDouble (a convenience method for using this class) * but returns a url column instead of a directory column. * - * @param tDir The starting directory, with \\ or /, with or without trailing slash. + * @param tDir The starting directory, with \\ or /, with or without trailing slash, + * which will be removed. * @param startOfUrl usually EDStatic.erddapUrl(loggedInAs) + "/files/" + datasetID() + "/" + * which will be prepended. * @return a table with columns with DIRECTORY (always "/"), NAME, * LASTMODIFIED (double epochSeconds), and SIZE (doubles) columns. */ @@ -630,6 +663,13 @@ public static String getSampleFileName(String tFileDir, String tFileNameRegex, //[DIR] 0 public final static Pattern wafDirPattern = Pattern.compile( ".* alt=\"\\[DIR\\]\".*>.*(.*?).*"); +//https://www.ncei.noaa.gov/data/global-precipitation-climatology-project-gpcp-daily/access/ +// doc/ +// 14-Mar-2017 08:34 -   + public final static Pattern wafDirPattern2 = Pattern.compile( + ".*href=\"(.*?/)\">" + + ".*>\\d{2}-[a-zA-Z]{3}-\\d{4} \\d{2}:\\d{2}(|:\\d{2})" + //date, note internal () + ".*"); //inport: //[   ] @@ -643,6 +683,13 @@ public static String getSampleFileName(String tFileDir, String tFileNameRegex, ".* alt=\"\\[.*?\\]\".*>.*(.*?)" + //name ".*(\\d{2}-[a-zA-Z]{3}-\\d{4} \\d{2}:\\d{2}(|:\\d{2}))" + //date, note internal () ".*\\W(\\d{1,15}\\.?\\d{0,10}[KMGTP]?).*"); //size +//https://www.ncei.noaa.gov/data/global-precipitation-climatology-project-gpcp-daily/access/ +// gpcp_1dd_v1.2_p1d.199610 +// 18-Sep-2012 12:55 7.7M  + public final static Pattern wafFilePattern2 = Pattern.compile( + ".* href=\"(.*?)\">" + //name + ".*>(\\d{2}-[a-zA-Z]{3}-\\d{4} \\d{2}:\\d{2}(|:\\d{2}))" + //date, note internal () + ".*>(\\d{1,15}\\.?\\d{0,10}[KMGTP]?).*"); //size public static String[] getUrlsFromWAF(String startUrl, String fileNameRegex, @@ -709,7 +756,7 @@ public static boolean addToWAFUrlList(String url, String fileNameRegex, //All non-ASCII chars should be entities. //But use common Linux to be consistent. is = SSR.getUrlInputStream(url); - in = new BufferedReader(new InputStreamReader(is, "ISO-8859-1")); + in = new BufferedReader(new InputStreamReader(is, String2.ISO_8859_1)); String s; //look for header line @@ -722,11 +769,16 @@ public static boolean addToWAFUrlList(String url, String fileNameRegex, while ((s = in.readLine()) != null) { //look for dirs before files (since dirs match filePattern, too) + if (s.indexOf("Parent Directory") >= 0) + continue; Matcher matcher = wafDirPattern.matcher(s); - if (matcher.matches()) { + boolean matched = matcher.matches(); + if (!matched) { + matcher = wafDirPattern2.matcher(s); + matched = matcher.matches(); + } + if (matched) { String name = XML.decodeEntities(matcher.group(1)); - if ("Parent Directory".equals(name)) - continue; String tUrl = File2.addSlash(url + name); if (tUrl.matches(pathRegex)) { if (dirsToo) { @@ -747,7 +799,12 @@ public static boolean addToWAFUrlList(String url, String fileNameRegex, //look for files matcher = wafFilePattern.matcher(s); - if (matcher.matches()) { + matched = matcher.matches(); + if (!matched) { + matcher = wafFilePattern2.matcher(s); + matched = matcher.matches(); + } + if (matched) { String name = XML.decodeEntities(matcher.group(1)); if (name.matches(fileNameRegex)) { @@ -777,6 +834,8 @@ public static boolean addToWAFUrlList(String url, String fileNameRegex, names.add(name); lastModified.add(millis); size.add(lSize); + } else if (debugMode) { + if (debugMode) String2.log("name matches=false: " + name); } continue; } else { @@ -816,7 +875,7 @@ public static void testWAF() throws Throwable { String url = "http://coastwatch.pfeg.noaa.gov/erddap/files/fedCalLandings/"; String tFileNameRegex = "194\\d\\.nc"; boolean tRecursive = true; - String tPathRegex = ".*/(3|4)/.*"; + String tPathRegex = ".*/(3|4)/.*"; boolean tDirsToo = true; Table table = makeEmptyTable(); StringArray dirs = (StringArray)table.getColumn(0); @@ -831,7 +890,7 @@ public static void testWAF() throws Throwable { addToWAFUrlList(url, tFileNameRegex, tRecursive, tPathRegex, tDirsToo, dirs, names, lastModifieds, sizes), ""); - results = table.dataToCSVString(); + results = table.dataToString(); expected = "directory,name,lastModified,size\n" + "http://coastwatch.pfeg.noaa.gov/erddap/files/fedCalLandings/3/,,,\n" + @@ -863,7 +922,7 @@ public static void testWAF() throws Throwable { //test via oneStep tTable = oneStep(url, tFileNameRegex, tRecursive, tPathRegex, tDirsToo); - results = tTable.dataToCSVString(); + results = tTable.dataToString(); Test.ensureEqual(results, expected, "results=\n" + results); @@ -873,7 +932,7 @@ public static void testWAF() throws Throwable { addToWAFUrlList(url, tFileNameRegex, tRecursive, tPathRegex, false, dirs, names, lastModifieds, sizes), ""); - results = table.dataToCSVString(); + results = table.dataToString(); expected = "directory,name,lastModified,size\n" + "http://coastwatch.pfeg.noaa.gov/erddap/files/fedCalLandings/3/,1940.nc,1262881740000,48128\n" + @@ -900,7 +959,7 @@ public static void testWAF() throws Throwable { //test via oneStep tTable = oneStep(url, tFileNameRegex, tRecursive, tPathRegex, false); - results = tTable.dataToCSVString(); + results = tTable.dataToString(); Test.ensureEqual(results, expected, "results=\n" + results); @@ -911,7 +970,7 @@ public static void testWAF() throws Throwable { tFileNameRegex, tRecursive, tPathRegex, tDirsToo, dirs, names, lastModifieds, sizes), ""); - results = table.dataToCSVString(); + results = table.dataToString(); expected = "directory,name,lastModified,size\n" + "http://coastwatch.pfeg.noaa.gov/erddap/files/fedCalLandings/3/,1940.nc,1262881740000,48128\n" + @@ -928,7 +987,7 @@ public static void testWAF() throws Throwable { //test via oneStep tTable = oneStep(url + "3", tFileNameRegex, tRecursive, tPathRegex, tDirsToo); - results = tTable.dataToCSVString(); + results = tTable.dataToString(); Test.ensureEqual(results, expected, "results=\n" + results); @@ -939,7 +998,7 @@ public static void testWAF() throws Throwable { "zztop", tRecursive, tPathRegex, tDirsToo, dirs, names, lastModifieds, sizes), ""); - results = table.dataToCSVString(); + results = table.dataToString(); expected = "directory,name,lastModified,size\n" + "http://coastwatch.pfeg.noaa.gov/erddap/files/fedCalLandings/3/,,,\n" + @@ -948,7 +1007,7 @@ public static void testWAF() throws Throwable { //test via oneStep tTable = oneStep(url, "zztop", tRecursive, tPathRegex, tDirsToo); - results = tTable.dataToCSVString(); + results = tTable.dataToString(); Test.ensureEqual(results, expected, "results=\n" + results); @@ -959,12 +1018,12 @@ public static void testWAF() throws Throwable { tFileNameRegex, false, tPathRegex, tDirsToo, dirs, names, lastModifieds, sizes), ""); - results = table.dataToCSVString(); + results = table.dataToString(); Test.ensureEqual(results, expected, "results=\n" + results); //test via oneStep tTable = oneStep(url, tFileNameRegex, false, tPathRegex, tDirsToo); - results = tTable.dataToCSVString(); + results = tTable.dataToString(); Test.ensureEqual(results, expected, "results=\n" + results); //* Test InPort WAF @@ -973,22 +1032,48 @@ public static void testWAF() throws Throwable { addToWAFUrlList("https://inport.nmfs.noaa.gov/inport-metadata/NOAA/NMFS/", "22...\\.xml", //pre 2016-03-04 I tested NWFSC/inport/xml, but it has been empty for a month! - true, ".*/NMFS/(|NEFSC/)(|inport/)(|xml/)", //tricky! + true, ".*/NMFS/(|NEFSC/)(|inport/)(|xml/)", //tricky! //should be slashStar, no space true, //tDirsToo, dirs, names, lastModifieds, sizes), ""); - results = table.dataToCSVString(); + results = table.dataToString(); expected = "directory,name,lastModified,size\n" + "https://inport.nmfs.noaa.gov/inport-metadata/NOAA/NMFS/NEFSC/,,,\n" + "https://inport.nmfs.noaa.gov/inport-metadata/NOAA/NMFS/NEFSC/inport/,,,\n" + "https://inport.nmfs.noaa.gov/inport-metadata/NOAA/NMFS/NEFSC/inport/xml/,,,\n" + -"https://inport.nmfs.noaa.gov/inport-metadata/NOAA/NMFS/NEFSC/inport/xml/,22560.xml,1455948120000,21504\n" + -"https://inport.nmfs.noaa.gov/inport-metadata/NOAA/NMFS/NEFSC/inport/xml/,22561.xml,1455948120000,21504\n" + -"https://inport.nmfs.noaa.gov/inport-metadata/NOAA/NMFS/NEFSC/inport/xml/,22562.xml,1455948120000,19456\n" + -"https://inport.nmfs.noaa.gov/inport-metadata/NOAA/NMFS/NEFSC/inport/xml/,22563.xml,1455948120000,21504\n" + -"https://inport.nmfs.noaa.gov/inport-metadata/NOAA/NMFS/NEFSC/inport/xml/,22564.xml,1456553280000,23552\n" + -"https://inport.nmfs.noaa.gov/inport-metadata/NOAA/NMFS/NEFSC/inport/xml/,22565.xml,1455948120000,25600\n"; +"https://inport.nmfs.noaa.gov/inport-metadata/NOAA/NMFS/NEFSC/inport/xml/,22560.xml,1489504860000,307200\n" + +"https://inport.nmfs.noaa.gov/inport-metadata/NOAA/NMFS/NEFSC/inport/xml/,22561.xml,1489504860000,307200\n" + +"https://inport.nmfs.noaa.gov/inport-metadata/NOAA/NMFS/NEFSC/inport/xml/,22562.xml,1489504860000,305152\n" + +"https://inport.nmfs.noaa.gov/inport-metadata/NOAA/NMFS/NEFSC/inport/xml/,22563.xml,1489504920000,307200\n" + +"https://inport.nmfs.noaa.gov/inport-metadata/NOAA/NMFS/NEFSC/inport/xml/,22564.xml,1489504920000,309248\n" + +"https://inport.nmfs.noaa.gov/inport-metadata/NOAA/NMFS/NEFSC/inport/xml/,22565.xml,1489504920000,311296\n"; + Test.ensureEqual(results, expected, "results=\n" + results); + + //* Test ncei WAF + table.removeAllRows(); + tTable = oneStep( + "https://www.ncei.noaa.gov/data/global-precipitation-climatology-project-gpcp-daily/", + "gpcp_1dd_v1\\.2_p1d\\.1997[0-9]{2}", + true, ".*", true); //tDirsToo, + results = tTable.dataToString(); + expected = +"directory,name,lastModified,size\n" + +"https://www.ncei.noaa.gov/data/global-precipitation-climatology-project-gpcp-daily/access/,,,\n" + +"https://www.ncei.noaa.gov/data/global-precipitation-climatology-project-gpcp-daily/access/,gpcp_1dd_v1.2_p1d.199701,1347972900000,8074035\n" + +"https://www.ncei.noaa.gov/data/global-precipitation-climatology-project-gpcp-daily/access/,gpcp_1dd_v1.2_p1d.199702,1347972900000,7235174\n" + +"https://www.ncei.noaa.gov/data/global-precipitation-climatology-project-gpcp-daily/access/,gpcp_1dd_v1.2_p1d.199703,1347972900000,8074035\n" + +"https://www.ncei.noaa.gov/data/global-precipitation-climatology-project-gpcp-daily/access/,gpcp_1dd_v1.2_p1d.199704,1347972900000,7759462\n" + +"https://www.ncei.noaa.gov/data/global-precipitation-climatology-project-gpcp-daily/access/,gpcp_1dd_v1.2_p1d.199705,1347972900000,8074035\n" + +"https://www.ncei.noaa.gov/data/global-precipitation-climatology-project-gpcp-daily/access/,gpcp_1dd_v1.2_p1d.199706,1347972900000,7759462\n" + +"https://www.ncei.noaa.gov/data/global-precipitation-climatology-project-gpcp-daily/access/,gpcp_1dd_v1.2_p1d.199707,1347972900000,8074035\n" + +"https://www.ncei.noaa.gov/data/global-precipitation-climatology-project-gpcp-daily/access/,gpcp_1dd_v1.2_p1d.199708,1347972720000,8074035\n" + +"https://www.ncei.noaa.gov/data/global-precipitation-climatology-project-gpcp-daily/access/,gpcp_1dd_v1.2_p1d.199709,1347972720000,7759462\n" + +"https://www.ncei.noaa.gov/data/global-precipitation-climatology-project-gpcp-daily/access/,gpcp_1dd_v1.2_p1d.199710,1347972720000,8074035\n" + +"https://www.ncei.noaa.gov/data/global-precipitation-climatology-project-gpcp-daily/access/,gpcp_1dd_v1.2_p1d.199711,1347972720000,7759462\n" + +"https://www.ncei.noaa.gov/data/global-precipitation-climatology-project-gpcp-daily/access/,gpcp_1dd_v1.2_p1d.199712,1347972720000,8074035\n" + +"https://www.ncei.noaa.gov/data/global-precipitation-climatology-project-gpcp-daily/doc/,,,\n" + +"https://www.ncei.noaa.gov/data/global-precipitation-climatology-project-gpcp-daily/src/,,,\n"; Test.ensureEqual(results, expected, "results=\n" + results); } catch (Throwable t) { @@ -1010,7 +1095,7 @@ public static void testWAF() throws Throwable { * can be added to see a hyrax catalog) e.g., http://dods.jpl.nasa.gov/opendap/ocean_wind/ccmp/L3.5a/data/flk/1988/ or - http://podaac-opendap.jpl.nasa.gov/opendap/hyrax/allData/avhrr/L4/reynolds_er/v3b/monthly/netcdf/2014/ + https://opendap.jpl.nasa.gov/opendap/hyrax/allData/avhrr/L4/reynolds_er/v3b/monthly/netcdf/2014/ * @param fileNameRegex e.g., "pentad.*flk\\.nc\\.gz" * @param recursive @@ -1086,11 +1171,20 @@ public static boolean addToHyraxUrlList(String url, String fileNameRegex, //skip header line and parent directory int po = responseLC.indexOf("parent directory"); //Lower Case - if (po < 0 ) { - if (reallyVerbose) String2.log("ERROR: \"parent directory\" not found in Hyrax response."); - return false; + if (po >= 0) { + po += 18; + } else { + if (verbose) String2.log("ERROR: \"parent directory\" not found in Hyrax response."); + po = responseLC.indexOf("", po); //Lower Case @@ -1101,9 +1195,9 @@ public static boolean addToHyraxUrlList(String url, String fileNameRegex, boolean diagnosticMode = false; while (true) { - //EXAMPLE http://data.nodc.noaa.gov/opendap/wod/monthly/ No longer available + //EXAMPLE https://data.nodc.noaa.gov/opendap/wod/monthly/ No longer available - //EXAMPLE http://podaac-opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1987/M07 + //EXAMPLE https://opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1987/M07 //(reformatted: look for tags, not formatting /* month_19870701_v11l35flk.nc.gz @@ -1236,10 +1330,15 @@ public static boolean addToHyraxUrlList(String url, String fileNameRegex, */ public static void testHyrax() throws Throwable { String2.log("\n*** FileVisitorDNLS.testHyrax()\n"); + //reallyVerbose = true; + //debugMode=true; try { - - String url = "http://podaac-opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/"; //contents.html + //podaac-opendap causes the + //"javax.net.ssl.SSLProtocolException: handshake alert: unrecognized_name" error + // String url = "https://podaac-opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/"; //contents.html + //so use domain name shown on digital certificate: opendap.jpl.nasa.gov + String url = "https://opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/"; //contents.html String fileNameRegex = "month_198(8|9).*flk\\.nc\\.gz"; boolean recursive = true; String pathRegex = null; @@ -1266,59 +1365,59 @@ public static void testHyrax() throws Throwable { table.addColumn("URL", childUrls); table.addColumn("lastModified", lastModified); table.addColumn("size", size); - String results = table.dataToCSVString(); + String results = table.dataToString(); String expected = "URL,lastModified,size\n" + -"http://podaac-opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/,,\n" + -"http://podaac-opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1987/,,\n" + -"http://podaac-opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1988/,,\n" + -"http://podaac-opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1988/month_19880101_v11l35flk.nc.gz,1.336863115E9,4981045\n" + -"http://podaac-opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1988/month_19880201_v11l35flk.nc.gz,1.336723222E9,5024372\n" + -"http://podaac-opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1988/month_19880301_v11l35flk.nc.gz,1.336546575E9,5006043\n" + -"http://podaac-opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1988/month_19880401_v11l35flk.nc.gz,1.336860015E9,4948285\n" + -"http://podaac-opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1988/month_19880501_v11l35flk.nc.gz,1.336835143E9,4914250\n" + -"http://podaac-opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1988/month_19880601_v11l35flk.nc.gz,1.336484405E9,4841084\n" + -"http://podaac-opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1988/month_19880701_v11l35flk.nc.gz,1.336815079E9,4837417\n" + -"http://podaac-opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1988/month_19880801_v11l35flk.nc.gz,1.336799789E9,4834242\n" + -"http://podaac-opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1988/month_19880901_v11l35flk.nc.gz,1.336676042E9,4801865\n" + -"http://podaac-opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1988/month_19881001_v11l35flk.nc.gz,1.336566352E9,4770289\n" + -"http://podaac-opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1988/month_19881101_v11l35flk.nc.gz,1.336568382E9,4769160\n" + -"http://podaac-opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1988/month_19881201_v11l35flk.nc.gz,1.336838712E9,4866335\n" + -"http://podaac-opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1989/,,\n" + -"http://podaac-opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1989/month_19890101_v11l35flk.nc.gz,1.336886548E9,5003981\n" + -"http://podaac-opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1989/month_19890201_v11l35flk.nc.gz,1.336268373E9,5054907\n" + -"http://podaac-opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1989/month_19890301_v11l35flk.nc.gz,1.336605483E9,4979393\n" + -"http://podaac-opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1989/month_19890401_v11l35flk.nc.gz,1.336350339E9,4960865\n" + -"http://podaac-opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1989/month_19890501_v11l35flk.nc.gz,1.336551575E9,4868541\n" + -"http://podaac-opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1989/month_19890601_v11l35flk.nc.gz,1.336177278E9,4790364\n" + -"http://podaac-opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1989/month_19890701_v11l35flk.nc.gz,1.336685187E9,4854943\n" + -"http://podaac-opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1989/month_19890801_v11l35flk.nc.gz,1.336534686E9,4859216\n" + -"http://podaac-opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1989/month_19890901_v11l35flk.nc.gz,1.33622953E9,4838390\n" + -"http://podaac-opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1989/month_19891001_v11l35flk.nc.gz,1.336853599E9,4820645\n" + -"http://podaac-opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1989/month_19891101_v11l35flk.nc.gz,1.336882933E9,4748166\n" + -"http://podaac-opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1989/month_19891201_v11l35flk.nc.gz,1.336748115E9,4922858\n" + -"http://podaac-opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1990/,,\n" + -"http://podaac-opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1991/,,\n" + -"http://podaac-opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1992/,,\n" + -"http://podaac-opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1993/,,\n" + -"http://podaac-opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1994/,,\n" + -"http://podaac-opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1995/,,\n" + -"http://podaac-opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1996/,,\n" + -"http://podaac-opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1997/,,\n" + -"http://podaac-opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1998/,,\n" + -"http://podaac-opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1999/,,\n" + -"http://podaac-opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/2000/,,\n" + -"http://podaac-opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/2001/,,\n" + -"http://podaac-opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/2002/,,\n" + -"http://podaac-opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/2003/,,\n" + -"http://podaac-opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/2004/,,\n" + -"http://podaac-opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/2005/,,\n" + -"http://podaac-opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/2006/,,\n" + -"http://podaac-opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/2007/,,\n" + -"http://podaac-opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/2008/,,\n" + -"http://podaac-opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/2009/,,\n" + -"http://podaac-opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/2010/,,\n" + -"http://podaac-opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/2011/,,\n"; +"https://opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/,,\n" + +"https://opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1987/,,\n" + +"https://opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1988/,,\n" + +"https://opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1988/month_19880101_v11l35flk.nc.gz,1.336863115E9,4981045\n" + +"https://opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1988/month_19880201_v11l35flk.nc.gz,1.336723222E9,5024372\n" + +"https://opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1988/month_19880301_v11l35flk.nc.gz,1.336546575E9,5006043\n" + +"https://opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1988/month_19880401_v11l35flk.nc.gz,1.336860015E9,4948285\n" + +"https://opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1988/month_19880501_v11l35flk.nc.gz,1.336835143E9,4914250\n" + +"https://opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1988/month_19880601_v11l35flk.nc.gz,1.336484405E9,4841084\n" + +"https://opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1988/month_19880701_v11l35flk.nc.gz,1.336815079E9,4837417\n" + +"https://opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1988/month_19880801_v11l35flk.nc.gz,1.336799789E9,4834242\n" + +"https://opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1988/month_19880901_v11l35flk.nc.gz,1.336676042E9,4801865\n" + +"https://opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1988/month_19881001_v11l35flk.nc.gz,1.336566352E9,4770289\n" + +"https://opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1988/month_19881101_v11l35flk.nc.gz,1.336568382E9,4769160\n" + +"https://opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1988/month_19881201_v11l35flk.nc.gz,1.336838712E9,4866335\n" + +"https://opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1989/,,\n" + +"https://opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1989/month_19890101_v11l35flk.nc.gz,1.336886548E9,5003981\n" + +"https://opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1989/month_19890201_v11l35flk.nc.gz,1.336268373E9,5054907\n" + +"https://opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1989/month_19890301_v11l35flk.nc.gz,1.336605483E9,4979393\n" + +"https://opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1989/month_19890401_v11l35flk.nc.gz,1.336350339E9,4960865\n" + +"https://opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1989/month_19890501_v11l35flk.nc.gz,1.336551575E9,4868541\n" + +"https://opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1989/month_19890601_v11l35flk.nc.gz,1.336177278E9,4790364\n" + +"https://opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1989/month_19890701_v11l35flk.nc.gz,1.336685187E9,4854943\n" + +"https://opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1989/month_19890801_v11l35flk.nc.gz,1.336534686E9,4859216\n" + +"https://opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1989/month_19890901_v11l35flk.nc.gz,1.33622953E9,4838390\n" + +"https://opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1989/month_19891001_v11l35flk.nc.gz,1.336853599E9,4820645\n" + +"https://opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1989/month_19891101_v11l35flk.nc.gz,1.336882933E9,4748166\n" + +"https://opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1989/month_19891201_v11l35flk.nc.gz,1.336748115E9,4922858\n" + +"https://opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1990/,,\n" + +"https://opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1991/,,\n" + +"https://opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1992/,,\n" + +"https://opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1993/,,\n" + +"https://opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1994/,,\n" + +"https://opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1995/,,\n" + +"https://opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1996/,,\n" + +"https://opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1997/,,\n" + +"https://opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1998/,,\n" + +"https://opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1999/,,\n" + +"https://opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/2000/,,\n" + +"https://opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/2001/,,\n" + +"https://opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/2002/,,\n" + +"https://opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/2003/,,\n" + +"https://opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/2004/,,\n" + +"https://opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/2005/,,\n" + +"https://opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/2006/,,\n" + +"https://opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/2007/,,\n" + +"https://opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/2008/,,\n" + +"https://opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/2009/,,\n" + +"https://opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/2010/,,\n" + +"https://opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/2011/,,\n"; Test.ensureEqual(results, expected, "results=\n" + results); Test.ensureTrue(allOk, ""); @@ -1326,37 +1425,37 @@ public static void testHyrax() throws Throwable { String resultsAr[] = getUrlsFromHyraxCatalog(url, fileNameRegex, recursive, pathRegex); String expectedAr[] = new String[]{ -"http://podaac-opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1988/month_19880101_v11l35flk.nc.gz", -"http://podaac-opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1988/month_19880201_v11l35flk.nc.gz", -"http://podaac-opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1988/month_19880301_v11l35flk.nc.gz", -"http://podaac-opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1988/month_19880401_v11l35flk.nc.gz", -"http://podaac-opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1988/month_19880501_v11l35flk.nc.gz", -"http://podaac-opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1988/month_19880601_v11l35flk.nc.gz", -"http://podaac-opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1988/month_19880701_v11l35flk.nc.gz", -"http://podaac-opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1988/month_19880801_v11l35flk.nc.gz", -"http://podaac-opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1988/month_19880901_v11l35flk.nc.gz", -"http://podaac-opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1988/month_19881001_v11l35flk.nc.gz", -"http://podaac-opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1988/month_19881101_v11l35flk.nc.gz", -"http://podaac-opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1988/month_19881201_v11l35flk.nc.gz", -"http://podaac-opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1989/month_19890101_v11l35flk.nc.gz", -"http://podaac-opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1989/month_19890201_v11l35flk.nc.gz", -"http://podaac-opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1989/month_19890301_v11l35flk.nc.gz", -"http://podaac-opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1989/month_19890401_v11l35flk.nc.gz", -"http://podaac-opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1989/month_19890501_v11l35flk.nc.gz", -"http://podaac-opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1989/month_19890601_v11l35flk.nc.gz", -"http://podaac-opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1989/month_19890701_v11l35flk.nc.gz", -"http://podaac-opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1989/month_19890801_v11l35flk.nc.gz", -"http://podaac-opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1989/month_19890901_v11l35flk.nc.gz", -"http://podaac-opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1989/month_19891001_v11l35flk.nc.gz", -"http://podaac-opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1989/month_19891101_v11l35flk.nc.gz", -"http://podaac-opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1989/month_19891201_v11l35flk.nc.gz"}; +"https://opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1988/month_19880101_v11l35flk.nc.gz", +"https://opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1988/month_19880201_v11l35flk.nc.gz", +"https://opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1988/month_19880301_v11l35flk.nc.gz", +"https://opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1988/month_19880401_v11l35flk.nc.gz", +"https://opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1988/month_19880501_v11l35flk.nc.gz", +"https://opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1988/month_19880601_v11l35flk.nc.gz", +"https://opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1988/month_19880701_v11l35flk.nc.gz", +"https://opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1988/month_19880801_v11l35flk.nc.gz", +"https://opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1988/month_19880901_v11l35flk.nc.gz", +"https://opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1988/month_19881001_v11l35flk.nc.gz", +"https://opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1988/month_19881101_v11l35flk.nc.gz", +"https://opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1988/month_19881201_v11l35flk.nc.gz", +"https://opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1989/month_19890101_v11l35flk.nc.gz", +"https://opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1989/month_19890201_v11l35flk.nc.gz", +"https://opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1989/month_19890301_v11l35flk.nc.gz", +"https://opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1989/month_19890401_v11l35flk.nc.gz", +"https://opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1989/month_19890501_v11l35flk.nc.gz", +"https://opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1989/month_19890601_v11l35flk.nc.gz", +"https://opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1989/month_19890701_v11l35flk.nc.gz", +"https://opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1989/month_19890801_v11l35flk.nc.gz", +"https://opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1989/month_19890901_v11l35flk.nc.gz", +"https://opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1989/month_19891001_v11l35flk.nc.gz", +"https://opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1989/month_19891101_v11l35flk.nc.gz", +"https://opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1989/month_19891201_v11l35flk.nc.gz"}; Test.ensureEqual(resultsAr, expectedAr, "results=\n" + results); //different test of addToHyraxUrlList childUrls = new StringArray(); lastModified = new DoubleArray(); LongArray fSize = new LongArray(); - url = "http://podaac-opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1987/"; //startUrl, + url = "https://opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1987/"; //startUrl, fileNameRegex = "month_[0-9]{8}_v11l35flk\\.nc\\.gz"; //fileNameRegex, recursive = true; addToHyraxUrlList(url, fileNameRegex, recursive, pathRegex, dirsToo, @@ -1364,13 +1463,13 @@ public static void testHyrax() throws Throwable { results = childUrls.toNewlineString(); expected = -"http://podaac-opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1987/\n" + -"http://podaac-opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1987/month_19870701_v11l35flk.nc.gz\n" + -"http://podaac-opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1987/month_19870801_v11l35flk.nc.gz\n" + -"http://podaac-opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1987/month_19870901_v11l35flk.nc.gz\n" + -"http://podaac-opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1987/month_19871001_v11l35flk.nc.gz\n" + -"http://podaac-opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1987/month_19871101_v11l35flk.nc.gz\n" + -"http://podaac-opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1987/month_19871201_v11l35flk.nc.gz\n"; +"https://opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1987/\n" + +"https://opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1987/month_19870701_v11l35flk.nc.gz\n" + +"https://opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1987/month_19870801_v11l35flk.nc.gz\n" + +"https://opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1987/month_19870901_v11l35flk.nc.gz\n" + +"https://opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1987/month_19871001_v11l35flk.nc.gz\n" + +"https://opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1987/month_19871101_v11l35flk.nc.gz\n" + +"https://opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1987/month_19871201_v11l35flk.nc.gz\n"; Test.ensureEqual(results, expected, "results=\n" + results); results = lastModified.toString(); @@ -1379,29 +1478,29 @@ public static void testHyrax() throws Throwable { //test via oneStep -- dirs table = oneStep(url, fileNameRegex, recursive, pathRegex, true); - results = table.dataToCSVString(); + results = table.dataToString(); expected = "directory,name,lastModified,size\n" + -"http://podaac-opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1987/,,,\n" + -"http://podaac-opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1987/,month_19870701_v11l35flk.nc.gz,1336609915,4807310\n" + -"http://podaac-opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1987/,month_19870801_v11l35flk.nc.gz,1336785444,4835774\n" + -"http://podaac-opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1987/,month_19870901_v11l35flk.nc.gz,1336673639,4809582\n" + -"http://podaac-opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1987/,month_19871001_v11l35flk.nc.gz,1336196561,4803285\n" + -"http://podaac-opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1987/,month_19871101_v11l35flk.nc.gz,1336881763,4787239\n" + -"http://podaac-opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1987/,month_19871201_v11l35flk.nc.gz,1336705731,4432696\n"; +"https://opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1987/,,,\n" + +"https://opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1987/,month_19870701_v11l35flk.nc.gz,1336609915,4807310\n" + +"https://opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1987/,month_19870801_v11l35flk.nc.gz,1336785444,4835774\n" + +"https://opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1987/,month_19870901_v11l35flk.nc.gz,1336673639,4809582\n" + +"https://opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1987/,month_19871001_v11l35flk.nc.gz,1336196561,4803285\n" + +"https://opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1987/,month_19871101_v11l35flk.nc.gz,1336881763,4787239\n" + +"https://opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1987/,month_19871201_v11l35flk.nc.gz,1336705731,4432696\n"; Test.ensureEqual(results, expected, "results=\n" + results); //test via oneStep -- no dirs table = oneStep(url, fileNameRegex, recursive, pathRegex, false); - results = table.dataToCSVString(); + results = table.dataToString(); expected = "directory,name,lastModified,size\n" + -"http://podaac-opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1987/,month_19870701_v11l35flk.nc.gz,1336609915,4807310\n" + -"http://podaac-opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1987/,month_19870801_v11l35flk.nc.gz,1336785444,4835774\n" + -"http://podaac-opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1987/,month_19870901_v11l35flk.nc.gz,1336673639,4809582\n" + -"http://podaac-opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1987/,month_19871001_v11l35flk.nc.gz,1336196561,4803285\n" + -"http://podaac-opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1987/,month_19871101_v11l35flk.nc.gz,1336881763,4787239\n" + -"http://podaac-opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1987/,month_19871201_v11l35flk.nc.gz,1336705731,4432696\n"; +"https://opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1987/,month_19870701_v11l35flk.nc.gz,1336609915,4807310\n" + +"https://opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1987/,month_19870801_v11l35flk.nc.gz,1336785444,4835774\n" + +"https://opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1987/,month_19870901_v11l35flk.nc.gz,1336673639,4809582\n" + +"https://opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1987/,month_19871001_v11l35flk.nc.gz,1336196561,4803285\n" + +"https://opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1987/,month_19871101_v11l35flk.nc.gz,1336881763,4787239\n" + +"https://opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/1987/,month_19871201_v11l35flk.nc.gz,1336705731,4432696\n"; Test.ensureEqual(results, expected, "results=\n" + results); } catch (Throwable t) { @@ -1423,7 +1522,7 @@ public static void testHyrax() throws Throwable { * @param url the url of the current Thredds directory * (which usually includes /thredds/catalog/) * to which catalog.html will be added, e.g., - *
    http://data.nodc.noaa.gov/thredds/catalog/pathfinder/Version5.1_CloudScreened/5day/FullRes/ + *
    https://data.nodc.noaa.gov/thredds/catalog/pathfinder/Version5.1_CloudScreened/5day/FullRes/ * (If url has a file name, it must be catalog.html or catalog.xml.) * @param fileNameRegex e.g., ".*\\.hdf" * @param recursive @@ -1485,7 +1584,7 @@ public static boolean addToThreddsUrlList(String url, String fileNameRegex, while (true) { /* EXAMPLE from TDS 4.2.10 at -http://data.nodc.noaa.gov/thredds/catalog/pathfinder/Version5.1_CloudScreened/5day/FullRes/1981/catalog.html +https://data.nodc.noaa.gov/thredds/catalog/pathfinder/Version5.1_CloudScreened/5day/FullRes/1981/catalog.html ... @@ -1622,7 +1721,7 @@ public static void testThredds() throws Throwable { reallyVerbose = true; try { - String url = "http://data.nodc.noaa.gov/thredds/catalog/aquarius/nodc_binned_V3.0/monthly/"; //catalog.html + String url = "https://data.nodc.noaa.gov/thredds/catalog/aquarius/nodc_binned_V3.0/monthly/"; //catalog.html String fileNameRegex = "sss_binned_L3_MON_SCI_V3.0_\\d{4}\\.nc"; boolean recursive = true; String pathRegex = null; @@ -1648,12 +1747,12 @@ public static void testThredds() throws Throwable { String results = childUrls.toNewlineString(); String expected = -"http://data.nodc.noaa.gov/thredds/fileServer/aquarius/nodc_binned_V3.0/monthly/\n" + -"http://data.nodc.noaa.gov/thredds/fileServer/aquarius/nodc_binned_V3.0/monthly/sss_binned_L3_MON_SCI_V3.0_2011.nc\n" + -"http://data.nodc.noaa.gov/thredds/fileServer/aquarius/nodc_binned_V3.0/monthly/sss_binned_L3_MON_SCI_V3.0_2012.nc\n" + -"http://data.nodc.noaa.gov/thredds/fileServer/aquarius/nodc_binned_V3.0/monthly/sss_binned_L3_MON_SCI_V3.0_2013.nc\n" + -"http://data.nodc.noaa.gov/thredds/fileServer/aquarius/nodc_binned_V3.0/monthly/sss_binned_L3_MON_SCI_V3.0_2014.nc\n" + -"http://data.nodc.noaa.gov/thredds/fileServer/aquarius/nodc_binned_V3.0/monthly/sss_binned_L3_MON_SCI_V3.0_2015.nc\n"; +"https://data.nodc.noaa.gov/thredds/fileServer/aquarius/nodc_binned_V3.0/monthly/\n" + +"https://data.nodc.noaa.gov/thredds/fileServer/aquarius/nodc_binned_V3.0/monthly/sss_binned_L3_MON_SCI_V3.0_2011.nc\n" + +"https://data.nodc.noaa.gov/thredds/fileServer/aquarius/nodc_binned_V3.0/monthly/sss_binned_L3_MON_SCI_V3.0_2012.nc\n" + +"https://data.nodc.noaa.gov/thredds/fileServer/aquarius/nodc_binned_V3.0/monthly/sss_binned_L3_MON_SCI_V3.0_2013.nc\n" + +"https://data.nodc.noaa.gov/thredds/fileServer/aquarius/nodc_binned_V3.0/monthly/sss_binned_L3_MON_SCI_V3.0_2014.nc\n" + +"https://data.nodc.noaa.gov/thredds/fileServer/aquarius/nodc_binned_V3.0/monthly/sss_binned_L3_MON_SCI_V3.0_2015.nc\n"; Test.ensureEqual(results, expected, "results=\n" + results); results = lastModified.toString(); @@ -1669,27 +1768,27 @@ public static void testThredds() throws Throwable { //test via oneStep -- dirs Table table = oneStep(url, fileNameRegex, recursive, pathRegex, true); - results = table.dataToCSVString(); + results = table.dataToString(); expected = "directory,name,lastModified,size\n" + -"http://data.nodc.noaa.gov/thredds/fileServer/aquarius/nodc_binned_V3.0/monthly/,,,\n" + -"http://data.nodc.noaa.gov/thredds/fileServer/aquarius/nodc_binned_V3.0/monthly/,sss_binned_L3_MON_SCI_V3.0_2011.nc,1405495932,2723152\n" + -"http://data.nodc.noaa.gov/thredds/fileServer/aquarius/nodc_binned_V3.0/monthly/,sss_binned_L3_MON_SCI_V3.0_2012.nc,1405492834,6528434\n" + -"http://data.nodc.noaa.gov/thredds/fileServer/aquarius/nodc_binned_V3.0/monthly/,sss_binned_L3_MON_SCI_V3.0_2013.nc,1405483892,6528434\n" + -"http://data.nodc.noaa.gov/thredds/fileServer/aquarius/nodc_binned_V3.0/monthly/,sss_binned_L3_MON_SCI_V3.0_2014.nc,1429802008,6528434\n" + -"http://data.nodc.noaa.gov/thredds/fileServer/aquarius/nodc_binned_V3.0/monthly/,sss_binned_L3_MON_SCI_V3.0_2015.nc,1429867829,1635779\n"; +"https://data.nodc.noaa.gov/thredds/fileServer/aquarius/nodc_binned_V3.0/monthly/,,,\n" + +"https://data.nodc.noaa.gov/thredds/fileServer/aquarius/nodc_binned_V3.0/monthly/,sss_binned_L3_MON_SCI_V3.0_2011.nc,1405495932,2723152\n" + +"https://data.nodc.noaa.gov/thredds/fileServer/aquarius/nodc_binned_V3.0/monthly/,sss_binned_L3_MON_SCI_V3.0_2012.nc,1405492834,6528434\n" + +"https://data.nodc.noaa.gov/thredds/fileServer/aquarius/nodc_binned_V3.0/monthly/,sss_binned_L3_MON_SCI_V3.0_2013.nc,1405483892,6528434\n" + +"https://data.nodc.noaa.gov/thredds/fileServer/aquarius/nodc_binned_V3.0/monthly/,sss_binned_L3_MON_SCI_V3.0_2014.nc,1429802008,6528434\n" + +"https://data.nodc.noaa.gov/thredds/fileServer/aquarius/nodc_binned_V3.0/monthly/,sss_binned_L3_MON_SCI_V3.0_2015.nc,1429867829,1635779\n"; Test.ensureEqual(results, expected, "results=\n" + results); //test via oneStep -- no dirs table = oneStep(url, fileNameRegex, recursive, pathRegex, false); - results = table.dataToCSVString(); + results = table.dataToString(); expected = "directory,name,lastModified,size\n" + -"http://data.nodc.noaa.gov/thredds/fileServer/aquarius/nodc_binned_V3.0/monthly/,sss_binned_L3_MON_SCI_V3.0_2011.nc,1405495932,2723152\n" + -"http://data.nodc.noaa.gov/thredds/fileServer/aquarius/nodc_binned_V3.0/monthly/,sss_binned_L3_MON_SCI_V3.0_2012.nc,1405492834,6528434\n" + -"http://data.nodc.noaa.gov/thredds/fileServer/aquarius/nodc_binned_V3.0/monthly/,sss_binned_L3_MON_SCI_V3.0_2013.nc,1405483892,6528434\n" + -"http://data.nodc.noaa.gov/thredds/fileServer/aquarius/nodc_binned_V3.0/monthly/,sss_binned_L3_MON_SCI_V3.0_2014.nc,1429802008,6528434\n" + -"http://data.nodc.noaa.gov/thredds/fileServer/aquarius/nodc_binned_V3.0/monthly/,sss_binned_L3_MON_SCI_V3.0_2015.nc,1429867829,1635779\n"; +"https://data.nodc.noaa.gov/thredds/fileServer/aquarius/nodc_binned_V3.0/monthly/,sss_binned_L3_MON_SCI_V3.0_2011.nc,1405495932,2723152\n" + +"https://data.nodc.noaa.gov/thredds/fileServer/aquarius/nodc_binned_V3.0/monthly/,sss_binned_L3_MON_SCI_V3.0_2012.nc,1405492834,6528434\n" + +"https://data.nodc.noaa.gov/thredds/fileServer/aquarius/nodc_binned_V3.0/monthly/,sss_binned_L3_MON_SCI_V3.0_2013.nc,1405483892,6528434\n" + +"https://data.nodc.noaa.gov/thredds/fileServer/aquarius/nodc_binned_V3.0/monthly/,sss_binned_L3_MON_SCI_V3.0_2014.nc,1429802008,6528434\n" + +"https://data.nodc.noaa.gov/thredds/fileServer/aquarius/nodc_binned_V3.0/monthly/,sss_binned_L3_MON_SCI_V3.0_2015.nc,1429867829,1635779\n"; Test.ensureEqual(results, expected, "results=\n" + results); } catch (Throwable t) { @@ -1715,18 +1814,18 @@ public static void testLocal(boolean doBigTest) throws Throwable { //recursive and dirToo and test \\ separator table = oneStep("c:\\erddapTest\\fileNames", ".*\\.png", true, tPathRegex, true); - results = table.dataToCSVString(); + results = table.dataToString(); expected = "directory,name,lastModified,size\n" + -"c:\\erddapTest\\fileNames\\,jplMURSST20150103090000.png,1421276044628,46482\n" + -"c:\\erddapTest\\fileNames\\,jplMURSST20150104090000.png,1420669338436,46586\n" + -"c:\\erddapTest\\fileNames\\sub\\,,1420735700318,0\n" + -"c:\\erddapTest\\fileNames\\sub\\,jplMURSST20150105090000.png,1420669304917,46549\n"; +"c:\\\\erddapTest\\\\fileNames\\\\,jplMURSST20150103090000.png,1421276044628,46482\n" + +"c:\\\\erddapTest\\\\fileNames\\\\,jplMURSST20150104090000.png,1420669338436,46586\n" + +"c:\\\\erddapTest\\\\fileNames\\\\sub\\\\,,1420735700318,0\n" + +"c:\\\\erddapTest\\\\fileNames\\\\sub\\\\,jplMURSST20150105090000.png,1420669304917,46549\n"; Test.ensureEqual(results, expected, "results=\n" + results); //recursive and !dirToo and test // separator table = oneStep(String2.unitTestDataDir + "fileNames", ".*\\.png", true, tPathRegex, false); - results = table.dataToCSVString(); + results = table.dataToString(); expected = "directory,name,lastModified,size\n" + String2.unitTestDataDir + "fileNames/,jplMURSST20150103090000.png,1421276044628,46482\n" + @@ -1736,7 +1835,7 @@ public static void testLocal(boolean doBigTest) throws Throwable { //!recursive and dirToo table = oneStep(String2.unitTestDataDir + "fileNames", ".*\\.png", false, tPathRegex, true); - results = table.dataToCSVString(); + results = table.dataToString(); expected = "directory,name,lastModified,size\n" + String2.unitTestDataDir + "fileNames/,jplMURSST20150103090000.png,1421276044628,46482\n" + @@ -1746,7 +1845,7 @@ public static void testLocal(boolean doBigTest) throws Throwable { //!recursive and !dirToo table = oneStep(String2.unitTestDataDir + "fileNames", ".*\\.png", false, tPathRegex, false); - results = table.dataToCSVString(); + results = table.dataToString(); expected = "directory,name,lastModified,size\n" + String2.unitTestDataDir + "fileNames/,jplMURSST20150103090000.png,1421276044628,46482\n" + @@ -1757,7 +1856,7 @@ public static void testLocal(boolean doBigTest) throws Throwable { //*** //oneStepDouble table = oneStepDouble(String2.unitTestDataDir + "fileNames", ".*\\.png", true, tPathRegex, true); - results = table.toCSVString(); + results = table.toString(); expected = "{\n" + "dimensions:\n" + @@ -1782,11 +1881,11 @@ public static void testLocal(boolean doBigTest) throws Throwable { "\n" + "// global attributes:\n" + "}\n" + -"row,directory,name,lastModified,size\n" + -"0," + String2.unitTestDataDir + "fileNames/,jplMURSST20150103090000.png,1.421276044628E9,46482.0\n" + -"1," + String2.unitTestDataDir + "fileNames/,jplMURSST20150104090000.png,1.420669338436E9,46586.0\n" + -"2," + String2.unitTestDataDir + "fileNames/sub/,,1.420735700318E9,0.0\n" + -"3," + String2.unitTestDataDir + "fileNames/sub/,jplMURSST20150105090000.png,1.420669304917E9,46549.0\n"; +"directory,name,lastModified,size\n" + +String2.unitTestDataDir + "fileNames/,jplMURSST20150103090000.png,1.421276044628E9,46482.0\n" + +String2.unitTestDataDir + "fileNames/,jplMURSST20150104090000.png,1.420669338436E9,46586.0\n" + +String2.unitTestDataDir + "fileNames/sub/,,1.420735700318E9,0.0\n" + +String2.unitTestDataDir + "fileNames/sub/,jplMURSST20150105090000.png,1.420669304917E9,46549.0\n"; Test.ensureEqual(results, expected, "results=\n" + results); @@ -1795,7 +1894,7 @@ public static void testLocal(boolean doBigTest) throws Throwable { table = oneStepDoubleWithUrlsNotDirs(String2.unitTestDataDir + "fileNames", ".*\\.png", true, tPathRegex, "http://localhost:8080/cwexperimental/files/testFileNames/"); - results = table.toCSVString(); + results = table.toString(); expected = "{\n" + "dimensions:\n" + @@ -1820,10 +1919,10 @@ public static void testLocal(boolean doBigTest) throws Throwable { "\n" + "// global attributes:\n" + "}\n" + -"row,url,name,lastModified,size\n" + -"0,http://localhost:8080/cwexperimental/files/testFileNames/jplMURSST20150103090000.png,jplMURSST20150103090000.png,1.421276044628E9,46482.0\n" + -"1,http://localhost:8080/cwexperimental/files/testFileNames/jplMURSST20150104090000.png,jplMURSST20150104090000.png,1.420669338436E9,46586.0\n" + -"2,http://localhost:8080/cwexperimental/files/testFileNames/sub/jplMURSST20150105090000.png,jplMURSST20150105090000.png,1.420669304917E9,46549.0\n"; +"url,name,lastModified,size\n" + +"http://localhost:8080/cwexperimental/files/testFileNames/jplMURSST20150103090000.png,jplMURSST20150103090000.png,1.421276044628E9,46482.0\n" + +"http://localhost:8080/cwexperimental/files/testFileNames/jplMURSST20150104090000.png,jplMURSST20150104090000.png,1.420669338436E9,46586.0\n" + +"http://localhost:8080/cwexperimental/files/testFileNames/sub/jplMURSST20150105090000.png,jplMURSST20150105090000.png,1.420669304917E9,46549.0\n"; Test.ensureEqual(results, expected, "results=\n" + results); @@ -1908,7 +2007,7 @@ public static void testAWSS3() throws Throwable { //recursive and dirToo table = oneStep(parent, ".*\\.nc", true, pathRegex, true); - results = table.dataToCSVString(); + results = table.dataToString(); expected = "directory,name,lastModified,size\n" + "http://nasanex.s3.amazonaws.com/NEX-DCP30/BCSD/rcp26/mon/atmos/tasmin/r1i1p1/v1.0/,,,\n" + @@ -1922,7 +2021,7 @@ public static void testAWSS3() throws Throwable { //recursive and !dirToo table = oneStep(parent, ".*\\.nc", true, pathRegex, false); - results = table.dataToCSVString(); + results = table.dataToString(); expected = "directory,name,lastModified,size\n" + "http://nasanex.s3.amazonaws.com/NEX-DCP30/BCSD/rcp26/mon/atmos/tasmin/r1i1p1/v1.0/CONUS/,tasmin_amon_BCSD_rcp26_r1i1p1_CONUS_bcc-csm1-1_200601-201012.nc,1380652638000,1368229240\n" + @@ -1934,7 +2033,7 @@ public static void testAWSS3() throws Throwable { //!recursive and dirToo table = oneStep(parent + child, ".*\\.nc", false, pathRegex, true); - results = table.dataToCSVString(); + results = table.dataToString(); expected = "directory,name,lastModified,size\n" + "http://nasanex.s3.amazonaws.com/NEX-DCP30/BCSD/rcp26/mon/atmos/tasmin/r1i1p1/v1.0/CONUS/,,,\n" + @@ -1947,7 +2046,7 @@ public static void testAWSS3() throws Throwable { //!recursive and !dirToo table = oneStep(parent + child, ".*\\.nc", false, pathRegex, false); - results = table.dataToCSVString(); + results = table.dataToString(); expected = "directory,name,lastModified,size\n" + "http://nasanex.s3.amazonaws.com/NEX-DCP30/BCSD/rcp26/mon/atmos/tasmin/r1i1p1/v1.0/CONUS/,tasmin_amon_BCSD_rcp26_r1i1p1_CONUS_bcc-csm1-1_200601-201012.nc,1380652638000,1368229240\n" + @@ -2010,8 +2109,8 @@ public static Table sync(String remoteDir, String localDir, tPathRegex, false); //dir too rTable.leftToRightSort(2); //lexical sort, so can walk through below lTable.leftToRightSort(2); //lexical sort, so can walk through below - //String2.log("\nremote table (max of 5)\n" + rTable.dataToCSVString(5) + - // "\nlocal table (max of 5)\n" + lTable.dataToCSVString(5)); + //String2.log("\nremote table (max of 5)\n" + rTable.dataToString(5) + + // "\nlocal table (max of 5)\n" + lTable.dataToString(5)); StringArray rDir = (StringArray)rTable.getColumn(DIRECTORY); StringArray lDir = (StringArray)lTable.getColumn(DIRECTORY); @@ -2161,19 +2260,19 @@ public static void testSync() throws Throwable { Table table = sync(rDir, lDir, fileRegex, recursive, pathRegex, doIt); String2.pressEnterToContinue("\nCheck above to ensure these numbers:\n" + "\"found nAlready=3 nToDownload=2 nTooRecent=1 nExtraLocal=1\"\n"); - String results = table.dataToCSVString(); + String results = table.dataToString(); String expected = //the lastModified values change periodically //these are the files which were downloaded "remote,local,lastModified\n" + -"https://inport.nmfs.noaa.gov/inport-metadata/NOAA/NMFS/NEFSC/inport/xml/22560.xml," + String2.unitTestDataDir + "sync/NMFS/NEFSC/inport/xml/22560.xml,1475767320000\n" + -"https://inport.nmfs.noaa.gov/inport-metadata/NOAA/NMFS/NEFSC/inport/xml/22563.xml," + String2.unitTestDataDir + "sync/NMFS/NEFSC/inport/xml/22563.xml,1475767380000\n"; +"https://inport.nmfs.noaa.gov/inport-metadata/NOAA/NMFS/NEFSC/inport/xml/22560.xml," + String2.unitTestDataDir + "sync/NMFS/NEFSC/inport/xml/22560.xml,1491319260000\n" + +"https://inport.nmfs.noaa.gov/inport-metadata/NOAA/NMFS/NEFSC/inport/xml/22563.xml," + String2.unitTestDataDir + "sync/NMFS/NEFSC/inport/xml/22563.xml,1491319320000\n"; Test.ensureEqual(results, expected, "results=\n" + results); //no changes, do the sync again table = sync(rDir, lDir, fileRegex, recursive, pathRegex, doIt); String2.pressEnterToContinue("\nCheck above to ensure these numbers:\n" + "\"found nAlready=5 nToDownload=0 nTooRecent=1 nExtraLocal=1\"\n"); - results = table.dataToCSVString(); + results = table.dataToString(); expected = "remote,local,lastModified\n"; Test.ensureEqual(results, expected, "results=\n" + results); @@ -2568,7 +2667,7 @@ public static void main(String args[]) throws Throwable { } else { Table table = oneStep(args[0], args[1], true, ".*", true); //tRecursive, tPathRegex, tDirectoriesToo - String2.log(table.dataToCSVString()); + String2.log(table.dataToString()); } System.exit(0); } diff --git a/WEB-INF/classes/gov/noaa/pfel/coastwatch/util/SSR.java b/WEB-INF/classes/gov/noaa/pfel/coastwatch/util/SSR.java index 327645e06..b239070ec 100644 --- a/WEB-INF/classes/gov/noaa/pfel/coastwatch/util/SSR.java +++ b/WEB-INF/classes/gov/noaa/pfel/coastwatch/util/SSR.java @@ -141,9 +141,12 @@ public class SSR { public static boolean reallyVerbose = false; public static boolean debugMode = false; + public static String windows7Zip = "c:\\progra~1\\7-Zip\\7z"; //on Bob's computer + private static String contextDirectory; //lazy creation by getContextDirectory private static String tempDirectory; //lazy creation by getTempDirectory + /** * Returns a String which is a substring of the current string. * This checks for and deals with bad first and last values. @@ -460,6 +463,22 @@ public static int shell(String cmd[], PipeTo outPipe, PipeTo errPipe, return exitValue; } + /** + * This zips the contents of a directory (recursively) + * and puts the results in a zip file of the same name. + * + * @param dir with or without trailing slash. Forward or backslashes are okay. + */ + public static void zipADirectory(String dir, int timeOutSeconds) throws Exception { + //remove trailing slash + char slash = dir.indexOf('/') >= 0? '/' : '\\'; + if (dir.endsWith("/") || dir.endsWith("\\")) + dir = dir.substring(0, dir.length() - 1); + + SSR.zip(dir + ".zip", new String[]{dir}, timeOutSeconds, true, + File2.getDirectory(dir)); + } + /** * Put the specified files in a zip file (without directory info). * See http://javaalmanac.com/egs/java.util.zip/CreateZip.html . @@ -468,6 +487,8 @@ public static int shell(String cmd[], PipeTo outPipe, PipeTo errPipe, * @param zipDirName the full name for the .zip file (path + name + ".zip") * @param dirNames the full names of the files to be put in the zip file. * These can use forward or backslashes as directory separators. + * If a dirName is a directory, all the files in the directory (recursively) + * will be included. * @param timeOutSeconds (use -1 for no time out) * @throws Exception if trouble */ @@ -485,6 +506,8 @@ public static void zip(String zipDirName, String dirNames[], * @param zipDirName the full name for the .zip file (path + name + ".zip") * @param dirNames the full names of the files to be put in the zip file. * These can use forward or backslashes as directory separators. + * If a dirName is a directory, all the files in the directory (recursively) + * will be included. * @param timeOutSeconds (use -1 for no time out) * @param removeDirPrefix the prefix to be removed from the start of * each dir name (ending with a slash) @@ -497,16 +520,20 @@ public static void zip(String zipDirName, String dirNames[], } /** - * Put the specified files in a zip file (without directory info). + * Put the specified files in a zip file. * See http://javaalmanac.com/egs/java.util.zip/CreateZip.html . * If a file named zipDirName already exists, it is overwritten. * * @param zipDirName the full name for the .zip file (path + name + ".zip") + * Don't include c:. * @param dirNames the full names of the files to be put in the zip file. + * Don't include c:. * These can use forward or backslashes as directory separators. + * If a dirName is a directory, all the files in the directory (recursively) + * will be included. * @param timeOutSeconds (use -1 for no time out) * @param includeDirectoryInfo set this to false if you don't want - * any dir invo stored with the files + * any dir info stored with the files * @param removeDirPrefix if includeDirectoryInfo is true, * this is the prefix to be removed from the start of * each dir name (ending with a slash). @@ -556,23 +583,34 @@ private static void zip(String zipDirName, String dirNames[], //compress the files for (int i = 0; i < dirNames.length; i++) { - FileInputStream in = new FileInputStream(dirNames[i]); - - //add ZIP entry to output stream - String tName = includeDirectoryInfo? - dirNames[i].substring(removeDirPrefix.length()): //already validated above - File2.getNameAndExtension(dirNames[i]); - out.putNextEntry(new ZipEntry(tName)); - - //transfer bytes from the file to the ZIP file - int len; - while ((len = in.read(buf)) > 0) { - out.write(buf, 0, len); + //if directory, get all file names + ArrayList al = new ArrayList(); + if (File2.isDirectory(dirNames[i])) { + RegexFilenameFilter.recursiveFullNameList(al, + dirNames[i], ".*", false); //directoriesToo + } else { + al.add(dirNames[i]); + } + + for (int i2 = 0; i2 < al.size(); i2++) { + FileInputStream in = new FileInputStream(al.get(i2)); + + //add ZIP entry to output stream + String tName = includeDirectoryInfo? + al.get(i2).substring(removeDirPrefix.length()): //already validated above + File2.getNameAndExtension(al.get(i2)); + out.putNextEntry(new ZipEntry(tName)); + + //transfer bytes from the file to the ZIP file + int len; + while ((len = in.read(buf)) > 0) { + out.write(buf, 0, len); + } + + //complete the entry + out.closeEntry(); + in.close(); } - - //complete the entry - out.closeEntry(); - in.close(); } //close the ZIP file @@ -699,6 +737,17 @@ private static void gzip(String gzipDirName, String dirNames[], (System.currentTimeMillis() - tTime) + "\n"); } + /** + * This handles the common case of unzipping a zip file (in place) that + * contains a directory with subdirectories and files. + */ + public static void unzipADirectory(String fullZipName, int timeOutSeconds, + StringArray resultingFullFileNames) throws Exception { + + unzip(fullZipName, File2.getDirectory(fullZipName), false, timeOutSeconds, + resultingFullFileNames); + } + /** * Extract all of the files from a zip file to * the base directory. @@ -712,7 +761,7 @@ private static void gzip(String gzipDirName, String dirNames[], * If false, new directories will be created as needed. * @param timeOutSeconds (use -1 for no time out) * @param resultingFullFileNames If this isn't null, - * that full names of unzipped files are added to this. + * the full names of unzipped files are added to this. * This method doesn't initially cleared this StringArray! * @throws Exception */ @@ -744,9 +793,9 @@ public static void unzip(String fullZipName, String baseDir, } } else { //open an output file - //???do I need to make the directory??? if (ignoreZipDirectories) name = File2.getNameAndExtension(name); //remove dir info + File2.makeDirectory(File2.getDirectory(baseDir + name)); //name may incude subdir names OutputStream out = new FileOutputStream(baseDir + name); //transfer bytes from the .zip file to the output file @@ -865,6 +914,45 @@ public static void unGzip(String fullGzName, String baseDir, } + /** + * This decompresses a .tar.gz file on Bob's Windows computer, in a + * directory with the name from the .tar.gz file. + * + * @throws Exception if trouble + */ + public static void windowsDecompressTargz(String sourceFullName, + boolean makeBaseDir, + int timeOutMinutes) throws Exception { + + String sourceDir = File2.getDirectory(sourceFullName); + String sourceTarName = File2.getNameNoExtension(sourceFullName); + String sourceJustName = File2.getNameNoExtension(sourceTarName); + + //extract tar from .gzip + String cmd = windows7Zip + " -y e " + sourceFullName + " -o" + sourceDir + + " -r"; + long cmdTime = System.currentTimeMillis(); + dosShell(cmd, timeOutMinutes*60); + String2.log(" cmd time=" + + Calendar2.elapsedTimeString(System.currentTimeMillis() - cmdTime)); + + //extract from the .tar file //gtspp4_at199001.tar + if (makeBaseDir) + File2.makeDirectory(sourceDir + sourceJustName); + File2.makeDirectory(sourceDir + sourceJustName + "/"); + cmd = windows7Zip + " -y x " + sourceDir + sourceTarName + //xtract with full dir names + " -o" + sourceDir + (makeBaseDir? sourceJustName + "/": "") + + " -r"; + cmdTime = System.currentTimeMillis(); + dosShell(cmd, timeOutMinutes*60); + String2.log(" cmd time=" + + Calendar2.elapsedTimeString(System.currentTimeMillis() - cmdTime)); + + //delete the .tar file + File2.delete(sourceDir + sourceTarName); + } + + /** * Unzip oldDir + oldName.zip (a zip containing one file: oldName) * and rename it to newDir + newName. @@ -1207,8 +1295,8 @@ public static void sendEmail(String smtpHost, int smtpPort, MimeMessage msg = new MimeMessage(session); msg.setFrom(new InternetAddress(fromAddress)); msg.setRecipients(Message.RecipientType.TO, InternetAddress.parse(toAddress, false)); - msg.setSubject(subject, "UTF-8"); - msg.setText(content, "UTF-8"); + msg.setSubject(subject, String2.UTF_8); + msg.setText(content, String2.UTF_8); msg.setHeader("X-Mailer", "msgsend"); msg.setSentDate(new Date()); msg.saveChanges(); //do last. don't forget this @@ -1344,7 +1432,7 @@ else if (ch < 127) public static String percentEncode(String query) throws Exception { if (query == null) return ""; - return URLEncoder.encode(query, "UTF-8"); + return URLEncoder.encode(query, String2.UTF_8); } /** @@ -1361,7 +1449,7 @@ public static String percentDecode(String query) throws Exception { if (query == null) return ""; //query = String2.replaceAll(query, "+", " "); //URLDecoder doesn't do this. 2010-10-27 Yes it does. - return URLDecoder.decode(query, "UTF-8"); + return URLDecoder.decode(query, String2.UTF_8); /*was StringBuilder sb = new StringBuilder(query); String2.replaceAll(sb, "+", " "); //do first @@ -1436,13 +1524,16 @@ public static InputStream getUncompressedUrlInputStream(String urlString) * @param urlString The query MUST be already percentEncoded as needed. *
    See https://en.wikipedia.org/wiki/Percent-encoding . *
    Note that reserved characters only need to be percent encoded in special circumstances (not always). + * @param charset default is ISO-8859-1. * @return a String with the response. Lines will always be separated by \n only. * @throws Exception if error occurs */ - public static String getUncompressedUrlResponseString(String urlString) throws Exception { + public static String getUncompressedUrlResponseString(String urlString, String charset) throws Exception { + if (!String2.isSomething(charset)) + charset = String2.ISO_8859_1; try { InputStream is = getUncompressedUrlInputStream(urlString); - BufferedReader in = new BufferedReader(new InputStreamReader(is)); + BufferedReader in = new BufferedReader(new InputStreamReader(is, charset)); StringBuilder sb = new StringBuilder(); String s; while ((s = in.readLine()) != null) { @@ -1599,7 +1690,7 @@ public static InputStream getUrlInputStream(String urlString) throws Exception { public static String[] getUrlResponse(String urlString) throws Exception { try { if (!String2.isUrl(urlString)) - return String2.readLinesFromFile(urlString, "ISO-8859-1", 2); + return String2.readLinesFromFile(urlString, String2.ISO_8859_1, 2); InputStream is = getUrlInputStream(urlString); BufferedReader in = new BufferedReader(new InputStreamReader(is)); @@ -2473,7 +2564,7 @@ public static void zipEach(String dir) { * * @param urlString where the content will be sent * @param mimeType e.g., "text/xml" - * @param encoding e.g., "UTF-8" + * @param encoding e.g., String2.UTF_8 * @param content the content to be sent * @throws Exception if trouble */ @@ -2538,7 +2629,7 @@ public static InputStream getPostInputStream(String urlString, "text/xml; subtype=tml/1.0 \n" + "\n"; - copy(getPostInputStream(url, "text/xml", "UTF-8", content), System.out); + copy(getPostInputStream(url, "text/xml", String2.UTF_8, content), System.out); }*/ diff --git a/WEB-INF/classes/gov/noaa/pfel/coastwatch/util/SimpleXMLReader.java b/WEB-INF/classes/gov/noaa/pfel/coastwatch/util/SimpleXMLReader.java index 398e96b52..4114112ce 100644 --- a/WEB-INF/classes/gov/noaa/pfel/coastwatch/util/SimpleXMLReader.java +++ b/WEB-INF/classes/gov/noaa/pfel/coastwatch/util/SimpleXMLReader.java @@ -12,6 +12,7 @@ import com.cohort.util.XML; import java.io.BufferedReader; import java.io.ByteArrayInputStream; +import java.io.FileInputStream; import java.io.InputStream; import java.io.InputStreamReader; import java.io.Reader; @@ -694,19 +695,27 @@ else if (tTag.equals("/literalLayout")) } } - /** NOT FINISHED - public static void testValidity(String fileName, String firstTagName) throws Throwable { - String2.log("\nSimpleXMLReader.testValidity..."); + /** + * This tests validity of an XML file by running through the file printing all the tags. + * If there is an error, you can see that the last few tags read were. + * + * @param rootTag e.g., "erddapDatasets" + * @throws Throwable if trouble (e.g., file not valid) + */ + public static void testValidity(String fileName, String rootTag) throws Throwable { + String2.log("\n*** SimpleXMLReader.testValidity..."); SimpleXMLReader xmlReader = new SimpleXMLReader( - new FileInputStream(fileName), firstTag); + new FileInputStream(fileName), rootTag); while (true) { xmlReader.nextTag(); - if (xmlReader.stackSize() == 1 && xmlReader.allTags().equals("")) { + String at = xmlReader.allTags(); + String2.log("line=" + xmlReader.lineNumber() + " " + at); + if (xmlReader.stackSize() == 1 && at.equals("")) { xmlReader.close(); return; } } - } */ + } /** * This performs a unit test of this class. diff --git a/WEB-INF/classes/gov/noaa/pfel/coastwatch/util/TestSSR.java b/WEB-INF/classes/gov/noaa/pfel/coastwatch/util/TestSSR.java index fb01d519c..c111452bd 100644 --- a/WEB-INF/classes/gov/noaa/pfel/coastwatch/util/TestSSR.java +++ b/WEB-INF/classes/gov/noaa/pfel/coastwatch/util/TestSSR.java @@ -254,7 +254,7 @@ public static void runNonUnixTests() throws Throwable { //future: test various compressed url's String2.log("test getURLResponse"); try { - sar = SSR.getUrlResponse("http://www.pfeg.noaa.gov/"); //"http://www.cohort.com"); + sar = SSR.getUrlResponse("https://www.pfeg.noaa.gov/"); //"http://www.cohort.com"); Test.ensureEqual( String2.lineContaining(sar, "Disclaimer and Privacy Policy") == -1, //"A free RPN scientific calculator applet") == -1, false, "Response=" + String2.toNewlineString(sar)); diff --git a/WEB-INF/classes/gov/noaa/pfel/erddap/ArchiveADataset.java b/WEB-INF/classes/gov/noaa/pfel/erddap/ArchiveADataset.java index aabefafa4..b0d78fd2d 100644 --- a/WEB-INF/classes/gov/noaa/pfel/erddap/ArchiveADataset.java +++ b/WEB-INF/classes/gov/noaa/pfel/erddap/ArchiveADataset.java @@ -5,6 +5,7 @@ package gov.noaa.pfel.erddap; import com.cohort.array.IntArray; +import com.cohort.array.PrimitiveArray; import com.cohort.array.StringArray; import com.cohort.util.Calendar2; import com.cohort.util.File2; @@ -29,13 +30,23 @@ import java.io.Writer; import java.util.GregorianCalendar; +import ucar.nc2.NetcdfFileWriter; /** * This is a command line program to run ArchiveADataset. - * This is geared toward meeting the recommendations for submitting data to NOAA NCEI. + * This is geared toward meeting the IOOS and BagIt recommendations + * for submitting data to NOAA NCEI. * https://sites.google.com/a/noaa.gov/ncei-ioos-archive/cookbook * https://sites.google.com/a/noaa.gov/ncei-ioos-archive/cookbook/data-integrity + * https://en.wikipedia.org/wiki/BagIt + * https://tools.ietf.org/html/draft-kunze-bagit-14 If change, change BagIt-Version below. * + * Bob has Bagger (GUI program from Library of Congress) downloaded from + * https://github.com/LibraryOfCongress/bagger/releases/ + * To run, double click on /programs/bagger-2.7.4/bagger-2.7.4/bin/bagger.bat + * (I specified JAVA_HOME in the .bat file.) + * To verify a bag: File : Open existing bag + * * @author Bob Simons (bob.simons@noaa.gov) 2015-12-15 */ public class ArchiveADataset { @@ -73,7 +84,7 @@ else if (s.equals("\"\"") || s.equals("nothing") || s.equals("\"nothing\"")) //e * @return the full name of the tgz file. */ public String doIt(String args[]) throws Throwable { - GregorianCalendar gcZ = Calendar2.newGCalendarLocal(); + GregorianCalendar gcZ = Calendar2.newGCalendarZulu(); String isoTime = Calendar2.formatAsISODateTimeT(gcZ) + "Z"; String compactTime = Calendar2.formatAsCompactDateTime(gcZ) + "Z"; String aadDir = EDStatic.bigParentDirectory + "ArchiveADataset/"; @@ -95,10 +106,13 @@ public String doIt(String args[]) throws Throwable { String def; //default String error = ""; String digestDefault = "SHA-256"; + String bagitDigestDefault = "SHA-256"; //SHA-1, but NOAA wants SHA-256 String digestPrompt = "Which type of file digest (checksum) do you want\n" + - "(one of " + String2.toCSSVString(String2.FILE_DIGEST_OPTIONS) + ")"; - String digestType, digestExtension; + "(specify one of " + String2.toCSSVString(String2.FILE_DIGEST_OPTIONS) + ")"; + String bagitDigestPrompt = digestPrompt + "\n" + + "(BagIt spec recommends MD5 and SHA-1. NCEI prefers SHA-256.)"; + String digestType, digestExtension, digestExtension1; if (args == null) args = new String[0]; @@ -128,6 +142,9 @@ public String doIt(String args[]) throws Throwable { String newCommandLine = (String2.OSIsWindows? "ArchiveADataset " : "./ArchiveADataset.sh ") + (reallyVerbose? "-verbose " : ""); + String manifestFullFileName = null; + Writer manifestFileWriter = null; + String aadSettings = null; EDD.verbose = true; EDD.reallyVerbose = reallyVerbose; NcHelper.verbose = reallyVerbose; @@ -146,13 +163,13 @@ public String doIt(String args[]) throws Throwable { " Press Enter or enter \"\" (two double quotes) or the word \"nothing\"\n" + " (without quotes) to specify a 0-length string.\n" + " Or, you can put all the answers as parameters on the command line.\n" + - "* Make a series of requests to the dataset and stage the files in\n" + + "* Make a series of requests to the dataset and stage the netcdf-3 files in\n" + " " + aadDir + "\n" + " Each of those files must be <2GB.\n" + - "* Make a .tar.gz file from all of the staged files.\n" + + "* Make related files (e.g., a file with a list of data files).\n" + + "* Make a container (e.g., .zip file) from all of the staged files.\n" + " It may be any size (limited only by disk space).\n" + - "* Make a file (e.g., .sha256) with the digest of the .tar.gz file.\n" + - "* Make an .listOfFiles.txt file with the list of files in the .tar.gz file.\n" + + "* Make a file (e.g., .md5.txt) with the digest of the container.\n" + "* Delete all of the staged files.\n" + "\n" + "Diagnostic information is shown on the screen and put in\n" + @@ -161,8 +178,25 @@ public String doIt(String args[]) throws Throwable { "For detailed information, see\n" + "http://coastwatch.pfeg.noaa.gov/erddap/download/setup.html#ArchiveADataset"); - //get email address + //get bagitMode int whichArg = 0; + String mode = get(args, whichArg++, "BagIt", //default + "Which type of container (original or BagIt)\n" + + "(NCEI prefers BagIt)"); + String modeLC = mode.toLowerCase(); + if (!modeLC.equals("original") && !modeLC.equals("bagit")) + throw new RuntimeException("You must specify 'original' or 'BagIt'."); + boolean bagitMode = modeLC.equals("bagit"); + String textFileEncoding = bagitMode? String2.UTF_8 : String2.ISO_8859_1; + + //compression + String compression = get(args, whichArg++, "tar.gz", //default + "Which type of compression (zip or tar.gz)\n" + + "(NCEI prefers tar.gz)").toLowerCase(); + if (!compression.equals("zip") && !compression.equals("tar.gz")) + throw new RuntimeException("You must specify 'zip' or 'tar.gz'."); + + //get email address String contactEmail = get(args, whichArg++, EDStatic.adminEmail, //default "What is a contact email address for this archive\n" + "(it will be written in the READ_ME.txt file in the archive)"); @@ -178,7 +212,7 @@ public String doIt(String args[]) throws Throwable { EDV dataVars[] = edd.dataVariables(); int ndv = dataVars.length; - tgzName = aadDir + datasetID + "_" + compactTime + ".tar.gz"; + tgzName = aadDir + datasetID + "_" + compactTime + "." + compression; String archiveDir = aadDir + datasetID + "_" + compactTime + "/"; String2.log("The files to be archived will be staged in\n " + archiveDir); @@ -246,7 +280,7 @@ public String doIt(String args[]) throws Throwable { "].\n" + "You probably won't be able to archive a large gridded dataset all at once.\n" + "It is too likely that something will go wrong,\n" + - "or the resulting .tar.gz file will be too large to transmit.\n" + + "or the resulting ." + compression + " file will be too large to transmit.\n" + "Instead, try archiving a week or month's worth.\n" + "The default shown below gets everything -- change it.\n" + "What subset do you want to archive"); @@ -264,11 +298,14 @@ public String doIt(String args[]) throws Throwable { String rightConstraints = constraintsString.substring(po + 1); //which type of file digest? - digestType = get(args, whichArg++, digestDefault, digestPrompt); + digestType = get(args, whichArg++, + bagitMode? bagitDigestDefault : digestDefault, + bagitMode? bagitDigestPrompt : digestPrompt); int whichDigest = String2.indexOf(String2.FILE_DIGEST_OPTIONS, digestType); if (whichDigest < 0) throw new RuntimeException("Invalid file digest type."); - digestExtension = String2.FILE_DIGEST_EXTENSIONS[whichDigest]; + digestExtension = String2.FILE_DIGEST_EXTENSIONS[whichDigest]; + digestExtension1 = digestExtension.substring(1); //*** write info about this archiving to archiveDir String2.log( @@ -276,33 +313,55 @@ public String doIt(String args[]) throws Throwable { " This may take a long time.\n"); Math2.sleep(5000); - error = String2.writeToFile(archiveDir + "READ_ME.txt", - "This archive was created by the ArchiveADataset script\n" + - "(which is part of ERDDAP) starting at " + isoTime + "\n" + - "based on these settings:\n" + - "Contact email=" + contactEmail + "\n" + - "ERDDAP datasetID=" + datasetID + "\n" + - "Data variables=" + dataVarsCSV + "\n" + - "Constraints=" + constraintsString + "\n" + - "Digest type=" + digestType + "\n", - "ISO-8859-1"); - if (error.length() > 0) - throw new RuntimeException(error); + if (bagitMode) { + manifestFullFileName = archiveDir + "manifest-" + + digestExtension1 + ".txt"; //md5 or sha256 + manifestFileWriter = new OutputStreamWriter( + new FileOutputStream(manifestFullFileName), String2.UTF_8); + + aadSettings = + "ArchiveADataset_container_type: " + mode + "\n" + + "ArchiveADataset_compression: " + compression + "\n" + + "ArchiveADataset_contact_email: " + contactEmail + "\n" + + "ArchiveADataset_ERDDAP_datasetID: " + datasetID + "\n" + + "ArchiveADataset_data_variables: " + dataVarsCSV + "\n" + + "ArchiveADataset_constraints: " + constraintsString + "\n" + + "ArchiveADataset_digest_type: " + digestType + "\n"; + + } else { + error = String2.writeToFile(archiveDir + "READ_ME.txt", + "This archive was created by the ArchiveADataset script\n" + + "(which is part of ERDDAP v" + EDStatic.erddapVersion + + ") starting at " + isoTime + "\n" + + "based on these settings:\n" + + "Container type=" + mode + "\n" + + "Compression=" + compression + "\n" + + "Contact email=" + contactEmail + "\n" + + "ERDDAP datasetID=" + datasetID + "\n" + + "Data variables=" + dataVarsCSV + "\n" + + "Constraints=" + constraintsString + "\n" + + "Digest type=" + digestType + "\n", + textFileEncoding); + if (error.length() > 0) + throw new RuntimeException(error); + + //save .das to archiveDir + resultName = eddGrid.makeNewFileForDapQuery(null, null, "", + archiveDir, datasetID, ".das"); + + //save .dds to archiveDir + resultName = eddGrid.makeNewFileForDapQuery(null, null, "", + archiveDir, datasetID, ".dds"); + } + newCommandLine += + String2.quoteParameterIfNeeded(mode) + " " + String2.quoteParameterIfNeeded(contactEmail) + " " + String2.quoteParameterIfNeeded(datasetID) + " " + String2.quoteParameterIfNeeded(dataVarsCSV) + " " + String2.quoteParameterIfNeeded(constraintsString) + " " + String2.quoteParameterIfNeeded(digestType); - //save .das to archiveDir - resultName = eddGrid.makeNewFileForDapQuery(null, null, "", - archiveDir, datasetID, ".das"); - - //save .dds to archiveDir - resultName = eddGrid.makeNewFileForDapQuery(null, null, "", - archiveDir, datasetID, ".dds"); - //write the data files to archiveDataDir int axis0start = constraints.get(0); int axis0stride = constraints.get(1); @@ -328,17 +387,23 @@ public String doIt(String args[]) throws Throwable { } else { try { String fullName = archiveDataDir + fileName + ".nc"; - eddGrid.saveAsNc(baseRequestUrl + ".nc", query.toString(), + eddGrid.saveAsNc(NetcdfFileWriter.Version.netcdf3, + baseRequestUrl + ".nc", query.toString(), fullName, true, 0); //keepUnusedAxes, lonAdjust nDataFilesCreated++; - //make the file digest - error = String2.writeToFile(fullName + digestExtension, - String2.fileDigest(digestType, fullName) + - " " + fileName + ".nc\n", - "ISO-8859-1"); - if (error.length() > 0) - throw new RuntimeException(error); + //write the file digest info + String digest = String2.fileDigest(digestType, fullName); + if (bagitMode) { + manifestFileWriter.write( + digest + " data/" + fileName + ".nc\n"); + } else { + error = String2.writeToFile(fullName + digestExtension, + digest + " " + fileName + ".nc\n", + textFileEncoding); + if (error.length() > 0) + throw new RuntimeException(error); + } } catch (Exception e) { String2.log("ERROR #" + nErrors++ + "\n" + @@ -413,12 +478,12 @@ public String doIt(String args[]) throws Throwable { fileTypeOptions.add(".nc"); fileTypeOptions.add(".csv"); fileTypeOptions.add(".json"); - def = accNcCF && subsetByCSV.length() > 0? ".ncCFMA" : - accNcCF? ".ncCF" : ".nc"; + def = accNcCF && subsetByCSV.length() > 0? ".ncCFMA" : ".nc"; //NCEI prefers .ncCFMA String fileType = ""; while (fileType.length() == 0) { fileType = get(args, whichArg, def, - "Create which file type (" + fileTypeOptions.toString() + ")"); + "Create which file type (" + fileTypeOptions.toString() + ")\n" + + "(NCEI prefers .ncCFMA if it is an option)"); if (fileTypeOptions.indexOf(fileType) < 0) { String msg = "fileType=" + fileType + " is not a valid option."; if (args.length > whichArg) { @@ -432,11 +497,14 @@ public String doIt(String args[]) throws Throwable { whichArg++; //which type of file digest? - digestType = get(args, whichArg++, digestDefault, digestPrompt); + digestType = get(args, whichArg++, + bagitMode? bagitDigestDefault : digestDefault, + bagitMode? bagitDigestPrompt : digestPrompt); int whichDigest = String2.indexOf(String2.FILE_DIGEST_OPTIONS, digestType); if (whichDigest < 0) throw new RuntimeException("Invalid file digest type."); digestExtension = String2.FILE_DIGEST_EXTENSIONS[whichDigest]; + digestExtension1 = digestExtension.substring(1); //*** write info about this archiving to archiveDir String2.log( @@ -444,21 +512,53 @@ public String doIt(String args[]) throws Throwable { " This may take a long time.\n"); Math2.sleep(5000); - error = String2.writeToFile(archiveDir + "READ_ME.txt", - "This archive was created by the ArchiveADataset script\n" + - "(which is part of ERDDAP) starting at " + isoTime + "\n" + - "based on these settings:\n" + - "Contact email=" + contactEmail + "\n" + - "ERDDAP datasetID=" + datasetID + "\n" + - "Data variables=" + dataVarsCSV + "\n" + - "Extra constraints=" + extraConstraints + "\n" + - "Subset by=" + subsetByCSV + "\n" + - "Data file type=" + fileType + "\n" + - "Digest type=" + digestType + "\n", - "ISO-8859-1"); - if (error.length() > 0) - throw new RuntimeException(error); + if (bagitMode) { + manifestFullFileName = archiveDir + "manifest-" + + digestExtension1 + ".txt"; //md5 or sha256 + manifestFileWriter = new OutputStreamWriter( + new FileOutputStream(manifestFullFileName), String2.UTF_8); + + aadSettings = + "ArchiveADataset_container_type: " + mode + "\n" + + "ArchiveADataset_compression: " + compression + "\n" + + "ArchiveADataset_contact_email: " + contactEmail + "\n" + + "ArchiveADataset_ERDDAP_datasetID: " + datasetID + "\n" + + "ArchiveADataset_data_variables: " + dataVarsCSV + "\n" + + "ArchiveADataset_extra_constraints: " + extraConstraints + "\n" + + "ArchiveADataset_subset_by: " + subsetByCSV + "\n" + + "ArchiveADataset_data_file_type: " + fileType + "\n" + + "ArchiveADataset_digest_type: " + digestType + "\n"; + + } else { + error = String2.writeToFile(archiveDir + "READ_ME.txt", + "This archive was created by the ArchiveADataset script\n" + + "(which is part of ERDDAP v" + EDStatic.erddapVersion + + ") starting at " + isoTime + "\n" + + "based on these settings:\n" + + "Container type=" + mode + "\n" + + "Compression=" + compression + "\n" + + "Contact email=" + contactEmail + "\n" + + "ERDDAP datasetID=" + datasetID + "\n" + + "Data variables=" + dataVarsCSV + "\n" + + "Extra constraints=" + extraConstraints + "\n" + + "Subset by=" + subsetByCSV + "\n" + + "Data file type=" + fileType + "\n" + + "Digest type=" + digestType + "\n", + textFileEncoding); + if (error.length() > 0) + throw new RuntimeException(error); + + //save .das to archiveDir + resultName = eddTable.makeNewFileForDapQuery(null, null, "", + archiveDir, datasetID, ".das"); + + //save .dds to archiveDir + resultName = eddTable.makeNewFileForDapQuery(null, null, "", + archiveDir, datasetID, ".dds"); + } + newCommandLine += + String2.quoteParameterIfNeeded(mode) + " " + String2.quoteParameterIfNeeded(contactEmail) + " " + String2.quoteParameterIfNeeded(datasetID) + " " + String2.quoteParameterIfNeeded(dataVarsCSV) + " " + @@ -467,14 +567,6 @@ public String doIt(String args[]) throws Throwable { String2.quoteParameterIfNeeded(fileType) + " " + String2.quoteParameterIfNeeded(digestType); - //save .das to archiveDir - resultName = eddTable.makeNewFileForDapQuery(null, null, "", - archiveDir, datasetID, ".das"); - - //save .dds to archiveDir - resultName = eddTable.makeNewFileForDapQuery(null, null, "", - archiveDir, datasetID, ".dds"); - if (subsetBySA.size() == 0) { //deal with all in one file String fileName = "allData"; @@ -488,15 +580,21 @@ public String doIt(String args[]) throws Throwable { tQuery, archiveDataDir, datasetID, fileType); nDataFilesCreated++; - //make the .md5file - error = String2.writeToFile( - archiveDataDir + resultName + digestExtension, - String2.fileDigest(digestType, archiveDataDir + resultName) + - " " + resultName + "\n", - "ISO-8859-1"); - if (error.length() > 0) - throw new RuntimeException(error); - + //write the file digest info + String digest = String2.fileDigest( + digestType, archiveDataDir + resultName); + if (bagitMode) { + manifestFileWriter.write( + digest + " data/" + resultName + "\n"); + } else { + error = String2.writeToFile( + archiveDataDir + resultName + digestExtension, + digest + " " + resultName + "\n", + textFileEncoding); + if (error.length() > 0) + throw new RuntimeException(error); + } + } catch (Exception e) { String2.log("ERROR #" + nErrors++ + "\n" + MustBe.throwableToString(e)); @@ -521,7 +619,7 @@ public String doIt(String args[]) throws Throwable { //write the data files to archiveDataDir for (int row = 0; row < nComboRows; row++) { //make directory tree from nComboCols-1 - StringBuilder tDir = new StringBuilder(archiveDataDir); + StringBuilder tDir = new StringBuilder(); StringBuilder tQuery = new StringBuilder(dataVarsCSV + extraConstraints); String fileName = null; for (int col = 0; col < nComboCols; col++) { @@ -533,27 +631,34 @@ public String doIt(String args[]) throws Throwable { tQuery.append("&" + combos.getColumnName(col) + "=" + (isString[col]? String2.toJson(s) : s)); } + String fullDir = archiveDataDir + tDir; //write the file and the .md5 file String2.log("writing data file for combo #" + row + "\n" + - " fileName=" + tDir + fileName + fileType + "\n" + + " fileName=" + fullDir + fileName + fileType + "\n" + " tQuery=" + tQuery); - File2.makeDirectory(tDir.toString()); + File2.makeDirectory(fullDir); if (!dryRun) { try { resultName = eddTable.makeNewFileForDapQuery(null, null, - tQuery.toString(), tDir.toString(), fileName, fileType); + tQuery.toString(), fullDir, fileName, fileType); nDataFilesCreated++; - //make the .md5file - error = String2.writeToFile( - tDir.toString() + resultName + digestExtension, - String2.fileDigest(digestType, tDir.toString() + resultName) + - " " + resultName + "\n", - "ISO-8859-1"); - if (error.length() > 0) - throw new RuntimeException(error); - + //write the file digest info + String digest = String2.fileDigest( + digestType, fullDir + resultName); + if (bagitMode) { + manifestFileWriter.write( + digest + " data/" + tDir + resultName + "\n"); + } else { + error = String2.writeToFile( + fullDir + resultName + digestExtension, + digest + " " + resultName + "\n", + textFileEncoding); + if (error.length() > 0) + throw new RuntimeException(error); + } + } catch (Exception e) { String2.log("ERROR #" + nErrors++ + "\n" + MustBe.throwableToString(e)); @@ -563,27 +668,66 @@ public String doIt(String args[]) throws Throwable { } } - //make the .tgz file - String2.log("\n*** making " + tgzName); - FileVisitorDNLS.makeTgz(archiveDir, ".*", true, ".*", - tgzName); + if (bagitMode) { + //close manifestFileWriter + manifestFileWriter.close(); + manifestFileWriter = null; + + //create required bagit.txt + Writer tw = new OutputStreamWriter( + new FileOutputStream(archiveDir + "bagit.txt"), String2.UTF_8); + tw.write( + "BagIt-Version: 0.97\n" + + "Tag-File-Character-Encoding: UTF-8\n"); + tw.close(); + + //create optional bag-info.txt + tw = new OutputStreamWriter( + new FileOutputStream(archiveDir + "bag-info.txt"), String2.UTF_8); + tw.write( + "Contact-Email: " + contactEmail + "\n" + + "Created_By: ArchiveADataset in ERDDAP v" + EDStatic.erddapVersion + "\n" + + aadSettings); + tw.close(); + + //create optional tagmanifest-md5.txt + tw = new OutputStreamWriter( + new FileOutputStream(archiveDir + "tagmanifest-" + digestExtension1 + ".txt"), String2.UTF_8); + tw.write( + String2.fileDigest(digestType, archiveDir + "bag-info.txt") + + " bag-info.txt\n" + + String2.fileDigest(digestType, archiveDir + "bagit.txt") + + " bagit.txt\n" + + String2.fileDigest(digestType, archiveDir + "manifest-" + digestExtension1 + ".txt") + + " manifest-" + digestExtension1 + ".txt\n"); + tw.close(); + } - //make the .md5file of the tgzName - String2.log("\n*** making " + tgzName + digestExtension); - error = String2.writeToFile( tgzName + digestExtension, + //make the zip or .tgz file + String2.log("\n*** making " + tgzName); + Math2.sleep(3000); //take a deep breath, let file system settle down + if (compression.equals("zip")) + SSR.zipADirectory(archiveDir, 30 * 60); //timeoutSeconds: 30 minutes + else FileVisitorDNLS.makeTgz(archiveDir, ".*", true, ".*", tgzName); + + //make the .md5.txt file of the tgzName + String2.log("\n*** making " + tgzName + digestExtension + ".txt"); + error = String2.writeToFile( tgzName + digestExtension + ".txt", String2.fileDigest(digestType, tgzName) + - " " + tgzName + "\n", - "ISO-8859-1"); + " " + File2.getNameAndExtension(tgzName) + "\n", + textFileEncoding); if (error.length() > 0) throw new RuntimeException(error); //make the .listOfFiles.txt of the tgzName - String2.log("\n*** making " + tgzName + ".listOfFiles.txt"); - error = String2.writeToFile( tgzName + ".listOfFiles.txt", - FileVisitorDNLS.oneStepToString(archiveDir, ".*", true, ".*"), - "ISO-8859-1"); - if (error.length() > 0) - throw new RuntimeException(error); + if (!bagitMode) { + String2.log("\n*** making " + tgzName + ".listOfFiles.txt"); + error = String2.writeToFile( tgzName + ".listOfFiles.txt", + FileVisitorDNLS.oneStepToString(archiveDir, ".*", true, ".*"), + textFileEncoding); + if (error.length() > 0) + throw new RuntimeException(error); + } //delete the staged files String2.log("\n*** deleting staged files in " + archiveDir); @@ -597,21 +741,33 @@ public String doIt(String args[]) throws Throwable { "A command line with all of these settings is\n" + newCommandLine + "\n" + "nDataFilesCreated=" + nDataFilesCreated + " nErrors=" + nErrors + - " time=" + Calendar2.elapsedTimeString(System.currentTimeMillis() - startTime)); + " time=" + Calendar2.elapsedTimeString(System.currentTimeMillis() - startTime) + "\n"); } catch (Throwable t) { + if (manifestFileWriter != null) { + try { + manifestFileWriter.close(); + File2.delete(manifestFullFileName); + } catch (Throwable mft) { + String2.log( + "ERROR while closing manifestFile=" + manifestFullFileName + ":\n" + + MustBe.throwableToString(mft)); + } + } + String msg = MustBe.throwableToString(t); if (msg.indexOf("ControlC") >= 0) { String2.flushLog(); return null; } + String2.log(msg); String2.log("\n\n********** ArchiveADataset failed.\n" + "A command line with all of these settings is\n" + newCommandLine + "\n" + "nDataFilesCreated=" + nDataFilesCreated + " nErrors=" + nErrors + - " time=" + Calendar2.elapsedTimeString(System.currentTimeMillis() - startTime)); + " time=" + Calendar2.elapsedTimeString(System.currentTimeMillis() - startTime) + "\n"); throw t; } String2.flushLog(); @@ -619,44 +775,266 @@ public String doIt(String args[]) throws Throwable { return tgzName; } - public static void testNcCF() throws Throwable { - String2.log("*** ArchiveADataset.testNcCF()"); + public static void testOriginalNcCF() throws Throwable { + String2.log("*** ArchiveADataset.testOriginalNcCF()"); - //make the tgz - String tgzName = (new ArchiveADataset()).doIt(new String[]{ + //make the targz + String targzName = (new ArchiveADataset()).doIt(new String[]{ "-verbose", + "original", + "tar.gz", "bob.simons@noaa.gov", "cwwcNDBCMet", "default", //all data vars "&station=~\"3.*\"", // &station=~"3.*" "nothing", //should be station, but use "nothing" to test save as points - "default", //.ncCF + "default", //.ncCFMA "MD5"}); + Test.ensureTrue(targzName.endsWith(".tar.gz"), "targzName=" + targzName); //display it (in 7zip) - if (tgzName != null) - SSR.displayInBrowser("file://" + tgzName); + Math2.sleep(5000); + SSR.displayInBrowser("file://" + targzName); + Math2.sleep(5000); String today = Calendar2.getCurrentISODateTimeStringZulu().substring(0, 10); - String results = String2.readFromFile(tgzName + ".listOfFiles.txt")[1]; + String ra[] = String2.readFromFile(targzName + ".listOfFiles.txt"); + Test.ensureEqual(ra[0], "", ""); + String results = ra[1]; String expected = -"cwwcNDBCMet.das " + today + "T.{8}Z 1462.\n" + -"cwwcNDBCMet.dds " + today + "T.{8}Z 394\n" + -"READ_ME.txt " + today + "T.{8}Z 300\n" + +"cwwcNDBCMet.das " + today + "T.{8}Z 147..\n" + +"cwwcNDBCMet.dds " + today + "T.{8}Z 3..\n" + +"READ_ME.txt " + today + "T.{8}Z 3..\n" + "data/\n" + -" cwwcNDBCMet.nc " + today + "T.{8}Z 12351220\n" + +" cwwcNDBCMet.nc " + today + "T.{8}Z 14......\n" + " cwwcNDBCMet.nc.md5 " + today + "T.{8}Z 49\n"; Test.ensureLinesMatch(results, expected, "results=\n" + results); + + //look at external ...tar.gz.md5.txt + ra = String2.readFromFile(targzName + ".md5.txt"); + Test.ensureEqual(ra[0], "", ""); + results = ra[1]; + expected = +"[0-9a-f]{32} " + File2.getNameAndExtension(targzName) + "\n"; + Test.ensureLinesMatch(results, expected, "results=\n" + results); + + + //String2.pressEnterToContinue("\n"); + } + + public static void testBagItNcCF() throws Throwable { + String2.log("*** ArchiveADataset.testBagItNcCF()"); + + //make the targz + String targzName = (new ArchiveADataset()).doIt(new String[]{ + "-verbose", + "BagIt", + "default", //zip + "bob.simons@noaa.gov", + "cwwcNDBCMet", + "default", //all data vars + "&station=~\"3.*\"", // &station=~"3.*" + "nothing", //should be station, but use "nothing" as test of ncCFMA + ".ncCF", //default is .ncCFMA + "MD5"}); + Test.ensureTrue(targzName.endsWith(".zip"), "targzName=" + targzName); + + //display it (in 7zip) + Math2.sleep(5000); + SSR.displayInBrowser("file://" + targzName); + Math2.sleep(5000); + + //decompress and look at contents + SSR.unzipADirectory(targzName, 60, null); //timeoutSeconds + String tempDir = targzName.substring(0, targzName.length() - 4) + "/"; + int tempDirLen = tempDir.length(); + Table table = FileVisitorDNLS.oneStepWithUrlsNotDirs(tempDir, ".*", + true, ".*", ""); + table.removeColumn(FileVisitorDNLS.LASTMODIFIED); + table.removeColumn(FileVisitorDNLS.NAME); + String results = table.dataToString(); + String expected = +"url,size\n" + +"bag-info.txt,4..\n" + +"bagit.txt,55\n" + +"manifest-md5.txt,54\n" + +"tagmanifest-md5.txt,142\n" + +"data/cwwcNDBCMet.nc,12351...\n"; //will change periodically + Test.ensureLinesMatch(results, expected, "results=\n" + results); + + //look at manifest + String ra[] = String2.readFromFile(tempDir + "manifest-md5.txt", String2.UTF_8); + Test.ensureEqual(ra[0], "", ""); + results = ra[1]; + expected = +"[0-9a-f]{32} data/cwwcNDBCMet.nc\n"; //2017-03-07 actual md5 verified by hand + Test.ensureLinesMatch(results, expected, "results=\n" + results); + + //look at bagit.txt + ra = String2.readFromFile(tempDir + "bagit.txt", String2.UTF_8); + Test.ensureEqual(ra[0], "", ""); + results = ra[1]; + expected = +"BagIt-Version: 0.97\n" + +"Tag-File-Character-Encoding: UTF-8\n"; + Test.ensureLinesMatch(results, expected, "results=\n" + results); + + //look at optional bag-info.txt + ra = String2.readFromFile(tempDir + "bag-info.txt", String2.UTF_8); + Test.ensureEqual(ra[0], "", ""); + results = ra[1]; + expected = +"Contact-Email: bob.simons@noaa.gov\n" + +"Created_By: ArchiveADataset in ERDDAP v" + EDStatic.erddapVersion + "\n" + +"ArchiveADataset_container_type: BagIt\n" + +"ArchiveADataset_compression: zip\n" + +"ArchiveADataset_contact_email: bob.simons@noaa.gov\n" + +"ArchiveADataset_ERDDAP_datasetID: cwwcNDBCMet\n" + +"ArchiveADataset_data_variables: \n" + +"ArchiveADataset_extra_constraints: &station=~\"3.*\"\n" + +"ArchiveADataset_subset_by: \n" + +"ArchiveADataset_data_file_type: .ncCF\n" + +"ArchiveADataset_digest_type: MD5\n"; + Test.ensureLinesMatch(results, expected, "results=\n" + results); + + //look at optional tagmanifest-md5.txt + ra = String2.readFromFile(tempDir + "tagmanifest-md5.txt", String2.UTF_8); + Test.ensureEqual(ra[0], "", ""); + results = ra[1]; + expected = //2017-03-07 actual md5's verified by hand +"[0-9a-f]{32} bag-info.txt\n" + +"[0-9a-f]{32} bagit.txt\n" + +"[0-9a-f]{32} manifest-md5.txt\n"; + Test.ensureLinesMatch(results, expected, "results=\n" + results); + + //look at external cwwcNDBCMet_20170307183959Z.tar.gz.md5.txt + ra = String2.readFromFile(targzName + ".md5.txt"); + Test.ensureEqual(ra[0], "", ""); + results = ra[1]; + expected = //2017-03-07 actual md5 verified by hand +"[0-9a-f]{32} " + File2.getNameAndExtension(targzName) + "\n"; + Test.ensureLinesMatch(results, expected, "results=\n" + results); + + + //String2.pressEnterToContinue("\n"); + } + + /** A test of NCEI-preferences */ + public static void testBagItNcCFMA() throws Throwable { + String2.log("*** ArchiveADataset.testBagItNcCFMA()"); + + //make the targz + String targzName = (new ArchiveADataset()).doIt(new String[]{ + "-verbose", + "BagIt", + "tar.gz", + "bob.simons@noaa.gov", + "cwwcNDBCMet", + "default", //all data vars + "&station=~\"3.*\"", // &station=~"3.*" + "station", //should be station, but use "nothing" as test of ncCFMA + ".ncCFMA", + "SHA-256"}); + Test.ensureTrue(targzName.endsWith(".tar.gz"), "targzName=" + targzName); + + //display it (in 7zip) + Math2.sleep(5000); + SSR.displayInBrowser("file://" + targzName); + Math2.sleep(5000); + + //decompress and look at contents + SSR.windowsDecompressTargz(targzName, false, 5); //timeout minutes + String tempDir = targzName.substring(0, targzName.length() - 7) + "/"; + int tempDirLen = tempDir.length(); + Table table = FileVisitorDNLS.oneStepWithUrlsNotDirs(tempDir, ".*", + true, ".*", ""); + table.removeColumn(FileVisitorDNLS.LASTMODIFIED); + table.removeColumn(FileVisitorDNLS.NAME); + String results = table.dataToString(); + String expected = +"url,size\n" + +"bag-info.txt,4..\n" + +"bagit.txt,55\n" + +"manifest-sha256.txt,4..\n" + +"tagmanifest-sha256.txt,2..\n" + +"data/31201.nc,21....\n" + +"data/32012.nc,461....\n" + +"data/32301.nc,95....\n" + +"data/32302.nc,548....\n" + +"data/32487.nc,64....\n" + +"data/32488.nc,50....\n"; //will change periodically + Test.ensureLinesMatch(results, expected, "results=\n" + results); + + //look at manifest + String ra[] = String2.readFromFile(tempDir + "manifest-sha256.txt", String2.UTF_8); + Test.ensureEqual(ra[0], "", ""); + results = ra[1]; + expected = +"[0-9a-f]{64} data/31201.nc\n" + +"[0-9a-f]{64} data/32012.nc\n" + +"[0-9a-f]{64} data/32301.nc\n" + +"[0-9a-f]{64} data/32302.nc\n" + +"[0-9a-f]{64} data/32487.nc\n" + +"[0-9a-f]{64} data/32488.nc\n"; + Test.ensureLinesMatch(results, expected, "results=\n" + results); + + //look at bagit.txt + ra = String2.readFromFile(tempDir + "bagit.txt", String2.UTF_8); + Test.ensureEqual(ra[0], "", ""); + results = ra[1]; + expected = +"BagIt-Version: 0.97\n" + +"Tag-File-Character-Encoding: UTF-8\n"; + Test.ensureLinesMatch(results, expected, "results=\n" + results); + + //look at optional bag-info.txt + ra = String2.readFromFile(tempDir + "bag-info.txt", String2.UTF_8); + Test.ensureEqual(ra[0], "", ""); + results = ra[1]; + expected = +"Contact-Email: bob.simons@noaa.gov\n" + +"Created_By: ArchiveADataset in ERDDAP v" + EDStatic.erddapVersion + "\n" + +"ArchiveADataset_container_type: BagIt\n" + +"ArchiveADataset_compression: tar.gz\n" + +"ArchiveADataset_contact_email: bob.simons@noaa.gov\n" + +"ArchiveADataset_ERDDAP_datasetID: cwwcNDBCMet\n" + +"ArchiveADataset_data_variables: \n" + +"ArchiveADataset_extra_constraints: &station=~\"3.*\"\n" + +"ArchiveADataset_subset_by: station\n" + +"ArchiveADataset_data_file_type: .ncCFMA\n" + +"ArchiveADataset_digest_type: SHA-256\n"; + Test.ensureLinesMatch(results, expected, "results=\n" + results); + + //look at optional tagmanifest-sha256.txt + ra = String2.readFromFile(tempDir + "tagmanifest-sha256.txt", String2.UTF_8); + Test.ensureEqual(ra[0], "", ""); + results = ra[1]; + expected = //2017-03-07 actual sha256's verified by hand +"[0-9a-f]{64} bag-info.txt\n" + +"[0-9a-f]{64} bagit.txt\n" + +"[0-9a-f]{64} manifest-sha256.txt\n"; + Test.ensureLinesMatch(results, expected, "results=\n" + results); + + //look at external cwwcNDBCMet_20170307183959Z.tar.gz.sha256.txt + ra = String2.readFromFile(targzName + ".sha256.txt"); + Test.ensureEqual(ra[0], "", ""); + results = ra[1]; + expected = +"[0-9a-f]{64} " + File2.getNameAndExtension(targzName) + "\n"; + Test.ensureLinesMatch(results, expected, "results=\n" + results); + //String2.pressEnterToContinue("\n"); } - public static void testTrajectoryProfile() throws Throwable { - String2.log("*** ArchiveADataset.testTrajectoryProfile()"); + public static void testOriginalTrajectoryProfile() throws Throwable { + String2.log("*** ArchiveADataset.testOriginalTrajectoryProfile()"); - //make the tgz - String tgzName = (new ArchiveADataset()).doIt(new String[]{ + //make the targz + String targzName = (new ArchiveADataset()).doIt(new String[]{ //"-verbose", //verbose is really verbose for this test + "original", + "tar.gz", "bob.simons@noaa.gov", "scrippsGliders", "default", //all data vars @@ -664,85 +1042,508 @@ public static void testTrajectoryProfile() throws Throwable { "&trajectory=~\"sp05.*\"&time>=2015-01-01&time<=2015-01-05", "default", "default", //trajectory, .ncCFMA "default"}); //SHA-256 + Test.ensureTrue(targzName.endsWith(".tar.gz"), "targzName=" + targzName); //display it (in 7zip) - if (tgzName != null) - SSR.displayInBrowser("file://" + tgzName); + Math2.sleep(5000); + SSR.displayInBrowser("file://" + targzName); + Math2.sleep(5000); String today = Calendar2.getCurrentISODateTimeStringZulu().substring(0, 10); - String results = String2.readFromFile(tgzName + ".listOfFiles.txt")[1]; + String ra[] = String2.readFromFile(targzName + ".listOfFiles.txt"); + Test.ensureEqual(ra[0], "", ""); + String results = ra[1]; String expected = -"READ_ME.txt " + today + "T.{8}Z 3..\n" + -"scrippsGliders.das " + today + "T.{8}Z 124..\n" + -"scrippsGliders.dds " + today + "T.{8}Z 6..\n" + +"READ_ME.txt " + today + "T.{8}Z 4..\n" + +"scrippsGliders.das " + today + "T.{8}Z 14...\n" + +"scrippsGliders.dds " + today + "T.{8}Z 7..\n" + "data/\n" + -" sp051-20141112.nc " + today + "T.{8}Z 1337..\n" + +" sp051-20141112.nc " + today + "T.{8}Z 1.....\n" + " sp051-20141112.nc.sha256 " + today + "T.{8}Z 84\n" + -" sp052-20140814.nc " + today + "T.{8}Z 4510..\n" + +" sp052-20140814.nc " + today + "T.{8}Z 4.....\n" + " sp052-20140814.nc.sha256 " + today + "T.{8}Z 84\n"; Test.ensureLinesMatch(results, expected, "results=\n" + results); + //look at external ...tar.gz.sha256.txt + ra = String2.readFromFile(targzName + ".sha256.txt"); + Test.ensureEqual(ra[0], "", ""); + results = ra[1]; + expected = +"[0-9a-f]{64} " + File2.getNameAndExtension(targzName) + "\n"; + Test.ensureLinesMatch(results, expected, "results=\n" + results); + //String2.pressEnterToContinue("\n"); } - public static void testGridAll() throws Throwable { - String2.log("*** ArchiveADataset.testGridAll()"); + public static void testBagItTrajectoryProfile() throws Throwable { + String2.log("*** ArchiveADataset.testBagItTrajectoryProfile()"); - //make the tgz - String tgzName = (new ArchiveADataset()).doIt(new String[]{ + //make the targz + String targzName = (new ArchiveADataset()).doIt(new String[]{ + //"-verbose", //verbose is really verbose for this test + "bagit", + "zip", + "bob.simons@noaa.gov", + "scrippsGliders", + "default", //all data vars + // &trajectory=~"sp05.*"&time>=2015-01-01&time<=2015-01-05 + "&trajectory=~\"sp05.*\"&time>=2015-01-01&time<=2015-01-05", + "default", "default", //trajectory, .ncCFMA + "default"}); //SHA-256 + Test.ensureTrue(targzName.endsWith(".zip"), "targzName=" + targzName); + + //display it (in 7zip) + Math2.sleep(5000); + SSR.displayInBrowser("file://" + targzName); + Math2.sleep(5000); + + //decompress and look at contents + SSR.unzipADirectory(targzName, 60, null); //timeoutSeconds + String tempDir = targzName.substring(0, targzName.length() - 4) + "/"; + int tempDirLen = tempDir.length(); + Table table = FileVisitorDNLS.oneStepWithUrlsNotDirs(tempDir, ".*", + true, ".*", ""); + table.removeColumn(FileVisitorDNLS.LASTMODIFIED); + table.removeColumn(FileVisitorDNLS.NAME); + String results = table.dataToString(); + String expected = +"url,size\n" + +"bag-info.txt,4..\n" + +"bagit.txt,55\n" + +"manifest-sha1.txt,130\n" + +"tagmanifest-sha1.txt,167\n" + +"data/sp051-20141112.nc,148...\n" + +"data/sp052-20140814.nc,499...\n"; //will change periodically + Test.ensureLinesMatch(results, expected, "results=\n" + results); + + //look at manifest + String ra[] = String2.readFromFile(tempDir + "manifest-sha1.txt", String2.UTF_8); + Test.ensureEqual(ra[0], "", ""); + results = ra[1]; + expected = +"[0-9a-f]{40} data/sp051-20141112.nc\n" + +"[0-9a-f]{40} data/sp052-20140814.nc\n"; + Test.ensureLinesMatch(results, expected, "results=\n" + results); + + //look at bagit.txt + ra = String2.readFromFile(tempDir + "bagit.txt", String2.UTF_8); + Test.ensureEqual(ra[0], "", ""); + results = ra[1]; + expected = +"BagIt-Version: 0.97\n" + +"Tag-File-Character-Encoding: UTF-8\n"; + Test.ensureLinesMatch(results, expected, "results=\n" + results); + + //look at optional bag-info.txt + ra = String2.readFromFile(tempDir + "bag-info.txt", String2.UTF_8); + Test.ensureEqual(ra[0], "", ""); + results = ra[1]; + expected = +"Contact-Email: bob.simons@noaa.gov\n" + +"Created_By: ArchiveADataset in ERDDAP v" + EDStatic.erddapVersion + "\n" + +"ArchiveADataset_container_type: bagit\n" + +"ArchiveADataset_compression: zip\n" + +"ArchiveADataset_contact_email: bob.simons@noaa.gov\n" + +"ArchiveADataset_ERDDAP_datasetID: scrippsGliders\n" + +"ArchiveADataset_data_variables: \n" + +"ArchiveADataset_extra_constraints: &trajectory=~\"sp05.*\"&time>=2015-01-01&time<=2015-01-05\n" + +"ArchiveADataset_subset_by: trajectory\n" + +"ArchiveADataset_data_file_type: .ncCFMA\n" + +"ArchiveADataset_digest_type: SHA-1\n"; + Test.ensureLinesMatch(results, expected, "results=\n" + results); + + //look at optional tagmanifest-sha1.txt + ra = String2.readFromFile(tempDir + "tagmanifest-sha1.txt", String2.UTF_8); + Test.ensureEqual(ra[0], "", ""); + results = ra[1]; + expected = +"[0-9a-f]{40} bag-info.txt\n" + +"[0-9a-f]{40} bagit.txt\n" + +"[0-9a-f]{40} manifest-sha1.txt\n"; + Test.ensureLinesMatch(results, expected, "results=\n" + results); + + //look at external cwwcNDBCMet_20170307183959Z.tar.gz.sha1.txt + ra = String2.readFromFile(targzName + ".sha1.txt"); + Test.ensureEqual(ra[0], "", ""); + results = ra[1]; + expected = +"[0-9a-f]{40} " + File2.getNameAndExtension(targzName) + "\n"; + Test.ensureLinesMatch(results, expected, "results=\n" + results); + + + //String2.pressEnterToContinue("\n"); + } + + public static void testOriginalGridAll() throws Throwable { + String2.log("*** ArchiveADataset.testOriginalGridAll()"); + + //make the targz + String targzName = (new ArchiveADataset()).doIt(new String[]{ "-verbose", + "original", + "tar.gz", "bob.simons@noaa.gov", "erdVHNchla8day", //datasetID "default", //dataVarsCSV "default", //constraintsString "SHA-256"}); //SHA-256 + Test.ensureTrue(targzName.endsWith(".tar.gz"), "targzName=" + targzName); + //display it (in 7zip) - if (tgzName != null) - SSR.displayInBrowser("file://" + tgzName); + Math2.sleep(5000); + SSR.displayInBrowser("file://" + targzName); + Math2.sleep(5000); String today = Calendar2.getCurrentISODateTimeStringZulu().substring(0, 10); - String results = String2.readFromFile(tgzName + ".listOfFiles.txt")[1]; + String ra[] = String2.readFromFile(targzName + ".listOfFiles.txt"); + Test.ensureEqual(ra[0], "", ""); + String results = ra[1]; String expected = -"erdVHNchla8day.das " + today + "T.{8}Z 5954\n" + +"erdVHNchla8day.das " + today + "T.{8}Z 6...\n" + "erdVHNchla8day.dds " + today + "T.{8}Z 438\n" + -"READ_ME.txt " + today + "T.{8}Z 307\n" + +"READ_ME.txt " + today + "T.{8}Z 3..\n" + "data/\n" + -" 20150301000000Z.nc " + today + "T.{8}Z 447840428\n" + +" 20150301000000Z.nc " + today + "T.{8}Z 44784....\n" + " 20150301000000Z.nc.sha256 " + today + "T.{8}Z 85\n" + -" 20150302000000Z.nc " + today + "T.{8}Z 447840428\n" + +" 20150302000000Z.nc " + today + "T.{8}Z 44784....\n" + " 20150302000000Z.nc.sha256 " + today + "T.{8}Z 85\n"; Test.ensureLinesMatch(results, expected, "results=\n" + results); + //look at external ...tar.gz.sha256.txt + ra = String2.readFromFile(targzName + ".sha256.txt"); + Test.ensureEqual(ra[0], "", ""); + results = ra[1]; + expected = +"[0-9a-f]{64} " + File2.getNameAndExtension(targzName) + "\n"; + Test.ensureLinesMatch(results, expected, "results=\n" + results); + + //String2.pressEnterToContinue("\n"); + } + + public static void testBagItGridAll() throws Throwable { + String2.log("*** ArchiveADataset.testBagItGridAll()"); + + //make the targz + String targzName = (new ArchiveADataset()).doIt(new String[]{ + "-verbose", + "BagIt", + "ZIP", + "bob.simons@noaa.gov", + "erdVHNchla8day", //datasetID + "default", //dataVarsCSV + "default", //constraintsString + "SHA-256"}); //SHA-256 + Test.ensureTrue(targzName.endsWith(".zip"), "targzName=" + targzName); + + //display it (in 7zip) + Math2.sleep(5000); + SSR.displayInBrowser("file://" + targzName); + Math2.sleep(5000); + + //decompress and look at contents + SSR.unzipADirectory(targzName, 60, null); //timeoutSeconds + String tempDir = targzName.substring(0, targzName.length() - 4) + "/"; + int tempDirLen = tempDir.length(); + Table table = FileVisitorDNLS.oneStepWithUrlsNotDirs(tempDir, ".*", + true, ".*", ""); + table.removeColumn(FileVisitorDNLS.LASTMODIFIED); + table.removeColumn(FileVisitorDNLS.NAME); + String results = table.dataToString(); + String expected = +"url,size\n" + +"bag-info.txt,40.\n" + +"bagit.txt,55\n" + +"manifest-sha256.txt,180\n" + +"tagmanifest-sha256.txt,241\n" + +"data/20150301000000Z.nc,447840...\n" + +"data/20150302000000Z.nc,447840...\n"; //will change periodically + Test.ensureLinesMatch(results, expected, "results=\n" + results); + + //look at manifest + String ra[] = String2.readFromFile(tempDir + "manifest-sha256.txt", String2.UTF_8); + Test.ensureEqual(ra[0], "", ""); + results = ra[1]; + expected = +"[0-9a-f]{64} data/20150301000000Z.nc\n" + +"[0-9a-f]{64} data/20150302000000Z.nc\n"; + Test.ensureLinesMatch(results, expected, "results=\n" + results); + + //look at bagit.txt + ra = String2.readFromFile(tempDir + "bagit.txt", String2.UTF_8); + Test.ensureEqual(ra[0], "", ""); + results = ra[1]; + expected = +"BagIt-Version: 0.97\n" + +"Tag-File-Character-Encoding: UTF-8\n"; + Test.ensureLinesMatch(results, expected, "results=\n" + results); + + //look at optional bag-info.txt + ra = String2.readFromFile(tempDir + "bag-info.txt", String2.UTF_8); + Test.ensureEqual(ra[0], "", ""); + results = ra[1]; + expected = +"Contact-Email: bob.simons@noaa.gov\n" + +"Created_By: ArchiveADataset in ERDDAP v" + EDStatic.erddapVersion + "\n" + +"ArchiveADataset_container_type: BagIt\n" + +"ArchiveADataset_compression: zip\n" + +"ArchiveADataset_contact_email: bob.simons@noaa.gov\n" + +"ArchiveADataset_ERDDAP_datasetID: erdVHNchla8day\n" + +"ArchiveADataset_data_variables: \n" + +"ArchiveADataset_constraints: \\[\\(2015-03-01T00:00:00Z\\):\\(2015-03-02T00:00:00Z\\)\\]\\[\\]\\[\\]\\[\\]\n" + +"ArchiveADataset_digest_type: SHA-256\n"; + Test.ensureLinesMatch(results, expected, "results=\n" + results); + + //look at optional tagmanifest-sha256.txt + ra = String2.readFromFile(tempDir + "tagmanifest-sha256.txt", String2.UTF_8); + Test.ensureEqual(ra[0], "", ""); + results = ra[1]; + expected = +"[0-9a-f]{64} bag-info.txt\n" + +"[0-9a-f]{64} bagit.txt\n" + +"[0-9a-f]{64} manifest-sha256.txt\n"; + Test.ensureLinesMatch(results, expected, "results=\n" + results); + + //look at external cwwcNDBCMet_20170307183959Z.tar.gz.sha256.txt + ra = String2.readFromFile(targzName + ".sha256.txt"); + Test.ensureEqual(ra[0], "", ""); + results = ra[1]; + expected = +"[0-9a-f]{64} " + File2.getNameAndExtension(targzName) + "\n"; + Test.ensureLinesMatch(results, expected, "results=\n" + results); + + //String2.pressEnterToContinue("\n"); } - public static void testGridSubset() throws Throwable { - String2.log("*** ArchiveADataset.testGridSubset()"); + public static void testOriginalGridSubset() throws Throwable { + String2.log("*** ArchiveADataset.testOriginalGridSubset()"); - //make the tgz - String tgzName = (new ArchiveADataset()).doIt(new String[]{ + //make the targz + String targzName = (new ArchiveADataset()).doIt(new String[]{ "-verbose", + "original", + "tar.gz", "bob.simons@noaa.gov", "erdVHNchla8day", //datasetID "default", //dataVarsCSV "[(2015-03-02T00:00:00Z)][][][]", //constraintsString "SHA-1"}); + Test.ensureTrue(targzName.endsWith(".tar.gz"), "targzName=" + targzName); //display it (in 7zip) - if (tgzName != null) - SSR.displayInBrowser("file://" + tgzName); + Math2.sleep(5000); + SSR.displayInBrowser("file://" + targzName); + Math2.sleep(5000); String today = Calendar2.getCurrentISODateTimeStringZulu().substring(0, 10); - String results = String2.readFromFile(tgzName + ".listOfFiles.txt")[1]; + String ra[] = String2.readFromFile(targzName + ".listOfFiles.txt"); + Test.ensureEqual(ra[0], "", ""); + String results = ra[1]; String expected = -"erdVHNchla8day.das " + today + "T.{8}Z 5954\n" + -"erdVHNchla8day.dds " + today + "T.{8}Z 438\n" + -"READ_ME.txt " + today + "T.{8}Z 282\n" + +"erdVHNchla8day.das " + today + "T.{8}Z 6...\n" + +"erdVHNchla8day.dds " + today + "T.{8}Z 4..\n" + +"READ_ME.txt " + today + "T.{8}Z 3..\n" + "data/\n" + -" 20150302000000Z.nc " + today + "T.{8}Z 447840428\n" + +" 20150302000000Z.nc " + today + "T.{8}Z 44784....\n" + " 20150302000000Z.nc.sha1 " + today + "T.{8}Z 61\n"; Test.ensureLinesMatch(results, expected, "results=\n" + results); + //look at external ...tar.gz.sha1.txt + ra = String2.readFromFile(targzName + ".sha1.txt"); + Test.ensureEqual(ra[0], "", ""); + results = ra[1]; + expected = +"[0-9a-f]{40} " + File2.getNameAndExtension(targzName) + "\n"; + Test.ensureLinesMatch(results, expected, "results=\n" + results); + + //String2.pressEnterToContinue("\n"); + } + + public static void testBagItGridSubset() throws Throwable { + String2.log("*** ArchiveADataset.testBagItGridSubset()"); + + //make the targz + String targzName = (new ArchiveADataset()).doIt(new String[]{ + "-verbose", + "BagIt", + "zip", + "bob.simons@noaa.gov", + "erdVHNchla8day", //datasetID + "default", //dataVarsCSV + "[(2015-03-02T00:00:00Z)][][][]", //constraintsString + "SHA-1"}); + Test.ensureTrue(targzName.endsWith(".zip"), "targzName=" + targzName); + + //display it (in 7zip) + Math2.sleep(5000); + SSR.displayInBrowser("file://" + targzName); + Math2.sleep(5000); + + //decompress and look at contents + SSR.unzipADirectory(targzName, 60, null); //timeoutSeconds + String tempDir = targzName.substring(0, targzName.length() - 4) + "/"; + int tempDirLen = tempDir.length(); + Table table = FileVisitorDNLS.oneStepWithUrlsNotDirs(tempDir, ".*", + true, ".*", ""); + table.removeColumn(FileVisitorDNLS.LASTMODIFIED); + table.removeColumn(FileVisitorDNLS.NAME); + String results = table.dataToString(); + String expected = +"url,size\n" + +"bag-info.txt,3..\n" + +"bagit.txt,55\n" + +"manifest-sha1.txt,66\n" + +"tagmanifest-sha1.txt,167\n" + +"data/20150302000000Z.nc,447840...\n"; //will change periodically + Test.ensureLinesMatch(results, expected, "results=\n" + results); + + //look at manifest + String ra[] = String2.readFromFile(tempDir + "manifest-sha1.txt", String2.UTF_8); + Test.ensureEqual(ra[0], "", ""); + results = ra[1]; + expected = +"[0-9a-f]{40} data/20150302000000Z.nc\n"; + Test.ensureLinesMatch(results, expected, "results=\n" + results); + + //look at bagit.txt + ra = String2.readFromFile(tempDir + "bagit.txt", String2.UTF_8); + Test.ensureEqual(ra[0], "", ""); + results = ra[1]; + expected = +"BagIt-Version: 0.97\n" + +"Tag-File-Character-Encoding: UTF-8\n"; + Test.ensureLinesMatch(results, expected, "results=\n" + results); + + //look at optional bag-info.txt + ra = String2.readFromFile(tempDir + "bag-info.txt", String2.UTF_8); + Test.ensureEqual(ra[0], "", ""); + results = ra[1]; + expected = +"Contact-Email: bob.simons@noaa.gov\n" + +"Created_By: ArchiveADataset in ERDDAP v" + EDStatic.erddapVersion + "\n" + +"ArchiveADataset_container_type: BagIt\n" + +"ArchiveADataset_compression: zip\n" + +"ArchiveADataset_contact_email: bob.simons@noaa.gov\n" + +"ArchiveADataset_ERDDAP_datasetID: erdVHNchla8day\n" + +"ArchiveADataset_data_variables: \n" + +"ArchiveADataset_constraints: \\[\\(2015-03-02T00:00:00Z\\)\\]\\[\\]\\[\\]\\[\\]\n" + +"ArchiveADataset_digest_type: SHA-1\n"; + Test.ensureLinesMatch(results, expected, "results=\n" + results); + + //look at optional tagmanifest-sha1.txt + ra = String2.readFromFile(tempDir + "tagmanifest-sha1.txt", String2.UTF_8); + Test.ensureEqual(ra[0], "", ""); + results = ra[1]; + expected = +"[0-9a-f]{40} bag-info.txt\n" + +"[0-9a-f]{40} bagit.txt\n" + +"[0-9a-f]{40} manifest-sha1.txt\n"; + Test.ensureLinesMatch(results, expected, "results=\n" + results); + + //look at external ....tar.gz.sha1.txt + ra = String2.readFromFile(targzName + ".sha1.txt"); + Test.ensureEqual(ra[0], "", ""); + results = ra[1]; + expected = +"[0-9a-f]{40} " + File2.getNameAndExtension(targzName) + "\n"; + Test.ensureLinesMatch(results, expected, "results=\n" + results); + + + //String2.pressEnterToContinue("\n"); + } + + public static void testBagItGridSubset2() throws Throwable { + String2.log("*** ArchiveADataset.testBagItGridSubset2()"); + + //make the targz + String targzName = (new ArchiveADataset()).doIt(new String[]{ + "-verbose", + "BagIt", + "tar.gz", + "bob.simons@noaa.gov", + "erdVHNchla8day", //datasetID + "default", //dataVarsCSV + "[(2015-03-01T00:00:00Z):(2015-03-02T00:00:00Z)][][][]", //constraintsString + "SHA-256"}); + Test.ensureTrue(targzName.endsWith(".tar.gz"), "targzName=" + targzName); + + //display it (in 7zip) + Math2.sleep(5000); + SSR.displayInBrowser("file://" + targzName); + Math2.sleep(5000); + + //decompress and look at contents + SSR.windowsDecompressTargz(targzName, false, 5); //timeout minutes + String tempDir = targzName.substring(0, targzName.length() - 7) + "/"; + int tempDirLen = tempDir.length(); + Table table = FileVisitorDNLS.oneStepWithUrlsNotDirs(tempDir, ".*", + true, ".*", ""); + table.removeColumn(FileVisitorDNLS.LASTMODIFIED); + table.removeColumn(FileVisitorDNLS.NAME); + String results = table.dataToString(); + String expected = +"url,size\n" + +"bag-info.txt,4..\n" + +"bagit.txt,55\n" + +"manifest-sha256.txt,1..\n" + +"tagmanifest-sha256.txt,2..\n" + +"data/20150301000000Z.nc,447......\n" + //will change periodically +"data/20150302000000Z.nc,447......\n"; //will change periodically + Test.ensureLinesMatch(results, expected, "results=\n" + results); + + //look at manifest + String ra[] = String2.readFromFile(tempDir + "manifest-sha256.txt", String2.UTF_8); + Test.ensureEqual(ra[0], "", ""); + results = ra[1]; + expected = +"[0-9a-f]{64} data/20150301000000Z.nc\n" + +"[0-9a-f]{64} data/20150302000000Z.nc\n"; + Test.ensureLinesMatch(results, expected, "results=\n" + results); + + //look at bagit.txt + ra = String2.readFromFile(tempDir + "bagit.txt", String2.UTF_8); + Test.ensureEqual(ra[0], "", ""); + results = ra[1]; + expected = +"BagIt-Version: 0.97\n" + +"Tag-File-Character-Encoding: UTF-8\n"; + Test.ensureLinesMatch(results, expected, "results=\n" + results); + + //look at optional bag-info.txt + ra = String2.readFromFile(tempDir + "bag-info.txt", String2.UTF_8); + Test.ensureEqual(ra[0], "", ""); + results = ra[1]; + expected = +"Contact-Email: bob.simons@noaa.gov\n" + +"Created_By: ArchiveADataset in ERDDAP v" + EDStatic.erddapVersion + "\n" + +"ArchiveADataset_container_type: BagIt\n" + +"ArchiveADataset_compression: tar.gz\n" + +"ArchiveADataset_contact_email: bob.simons@noaa.gov\n" + +"ArchiveADataset_ERDDAP_datasetID: erdVHNchla8day\n" + +"ArchiveADataset_data_variables: \n" + +"ArchiveADataset_constraints: \\[\\(2015-03-01T00:00:00Z\\):\\(2015-03-02T00:00:00Z\\)\\]\\[\\]\\[\\]\\[\\]\n" + +"ArchiveADataset_digest_type: SHA-256\n"; + Test.ensureLinesMatch(results, expected, "results=\n" + results); + + //look at optional tagmanifest-sha256.txt + ra = String2.readFromFile(tempDir + "tagmanifest-sha256.txt", String2.UTF_8); + Test.ensureEqual(ra[0], "", ""); + results = ra[1]; + expected = +"[0-9a-f]{64} bag-info.txt\n" + +"[0-9a-f]{64} bagit.txt\n" + +"[0-9a-f]{64} manifest-sha256.txt\n"; + Test.ensureLinesMatch(results, expected, "results=\n" + results); + + //look at external ....tar.gz.sha256.txt + ra = String2.readFromFile(targzName + ".sha256.txt"); + Test.ensureEqual(ra[0], "", ""); + results = ra[1]; + expected = +"[0-9a-f]{64} " + File2.getNameAndExtension(targzName) + "\n"; + Test.ensureLinesMatch(results, expected, "results=\n" + results); + + //String2.pressEnterToContinue("\n"); } @@ -751,10 +1552,18 @@ public static void test() throws Throwable { String2.log("*** ArchiveADataset.test()"); /* - testNcCF(); -*/ testTrajectoryProfile(); - testGridAll(); - testGridSubset(); + testOriginalNcCF(); + testOriginalTrajectoryProfile(); + testOriginalGridAll(); + testOriginalGridSubset(); + + testBagItNcCF(); + testBagItTrajectoryProfile(); +*/ testBagItGridAll(); + testBagItGridSubset(); + testBagItNcCFMA(); //w NCEI preferences + testBagItGridSubset2(); //w NCEI preferences + /* */ } diff --git a/WEB-INF/classes/gov/noaa/pfel/erddap/DasDds.java b/WEB-INF/classes/gov/noaa/pfel/erddap/DasDds.java index f5724827d..d657819a7 100644 --- a/WEB-INF/classes/gov/noaa/pfel/erddap/DasDds.java +++ b/WEB-INF/classes/gov/noaa/pfel/erddap/DasDds.java @@ -121,20 +121,8 @@ public String doIt(String args[], boolean loop) throws Throwable { return String2.readFromFile(outFileName)[1]; } - //delete the datasetInfo files for this datasetID (in case incorrect info) try { - String dir = EDD.datasetDir(datasetID); - String2.log("dataset dir=" + dir + "\n" + - "dataset n files not deleted = " + - RegexFilenameFilter.regexDelete(dir, ".*", false)); - - } catch (Throwable t) { - String2.log("\n*** An error occurred while deleting the old info for " + datasetID + ":\n" + - MustBe.throwableToString(t)); - } - - try { - printToBoth(EDD.testDasDds(datasetID, verbose)); + printToBoth(EDD.testDasDds(true, datasetID, verbose)); //clearCache } catch (Throwable t) { String2.log( "\n*** An error occurred while trying to load " + datasetID + ":\n" + diff --git a/WEB-INF/classes/gov/noaa/pfel/erddap/Erddap.java b/WEB-INF/classes/gov/noaa/pfel/erddap/Erddap.java index 0cec445e8..8d4137f0b 100644 --- a/WEB-INF/classes/gov/noaa/pfel/erddap/Erddap.java +++ b/WEB-INF/classes/gov/noaa/pfel/erddap/Erddap.java @@ -142,9 +142,18 @@ public class Erddap extends HttpServlet { */ public static String plainFileTypes[] = { //no need for .csvp or .tsvp, because plainFileTypes never write units - ".csv", ".htmlTable", ".itx", ".json", ".mat", ".nc", ".tsv", ".xhtml"}; + ".csv", ".htmlTable", ".itx", ".json", ".jsonlCSV", ".jsonlKVP", + ".mat", ".nc", ".nccsv", ".tsv", ".xhtml"}; public static String plainFileTypesString = String2.toCSSVString(plainFileTypes); + //version when new file types added + public final static String FILE_TYPES_124[] = + //for old remote erddaps, make .png locally so pngInfo is available + {".csvp", ".tsvp", "odvTxt", ".png"}; + public final static String FILE_TYPES_148[] = + {".csv0", ".tsv0"}; + public final static String FILE_TYPES_176[] = + {".jsonlCSV", ".jsonlKVP", ".nccsv", ".nccsvMetadata"}; // ************** END OF STATIC VARIABLES ***************************** @@ -892,7 +901,7 @@ public void doIndex(HttpServletRequest request, HttpServletResponse response, "
    FGDC Web Accessible Folder (WAF)\n" + - "with FGDC‑STD‑001‑1998" + EDStatic.externalLinkHtml(tErddapUrl) + ""; String isoLink = @@ -1504,7 +1513,11 @@ public void doLogin(HttpServletRequest request, HttpServletResponse response, "onclick='window.location.assign(\"" + loginUrl + "\")'") + //don't say succeeded. It only succeeds if user successfully signed into Google. "\n" + - tProblems); + tProblems + "\n" + + "

    Accessing Private Datasets via Scripts\n" + + "

    For instructions on logging into ERDDAP and accessing private datasets via scripts, see\n" + + "
    Access to Private Datasets in ERDDAP.\n" + + "\n"); } else { //tell user he is logged in @@ -2489,7 +2502,7 @@ public void doDataProviderForm2(HttpServletRequest request, HttpServletResponse "

    +
    Dataset " + EDStatic.htmlTooltipImage(tLoggedInAs, "Optional: This is a list of the actions (one per line) which led to the creation of this data." + "
    Ideally, each line includes a timestamp and a description of the action. (<=500 characters) For example," + - "
    Datafiles are downloaded ASAP from http://oceandata.sci.gsfc.nasa.gov/MODISA/L3SMI/ to NOAA NMFS SWFSC ERD." + + "
    Datafiles are downloaded ASAP from https://oceandata.sci.gsfc.nasa.gov/MODISA/L3SMI/ to NOAA NMFS SWFSC ERD." + "
    NOAA NMFS SWFSC ERD (erd.data@noaa.gov) uses NCML to add the time dimension and slightly modify the metadata.
    ") + " \n" + "
    + + + +
  • The above 3 lines, when run sequentially from a command line, will log you +into ERDDAP. In order to use these in a script you need to capture the id_token +from the first line, feed it to the second line, and write a cookie to be read +by subsequent lines. +
      + +
  • To develop a script, run the first ('https://accounts.google.com) +curl line exactly as it was copied from the developer tools, and capture the response +(you may get a curl error about the flag "--2.0" just remove it). +In php it looks like the following: + +

    + +

    Log in to Google by executing the second line using $id_token, first removing the +"-H 'Cookie: stuff'" parameter and instead telling curl to write a cookie: + +

    + +

    Log in to ERDDAP, again removing the "-H 'Cookie: stuff'" parameter, and +using the previously written cookie: + +

    + +

    You should now be able to request data from the server, using the same cookie: + +

    + + + + +

    Contact

    +Questions, comments, suggestions? Please send an email to + bob dot simons at noaa dot gov +and include the ERDDAP URL directly related to your question or comment. + +
      +
    +

    ERDDAP, Version 1.76 +
    Disclaimers | + Privacy Policy +

      +

      + +

  • + + + diff --git a/download/EDDTableFromEML.html b/download/EDDTableFromEML.html index 02c7f6e8f..62f6b4950 100644 --- a/download/EDDTableFromEML.html +++ b/download/EDDTableFromEML.html @@ -380,15 +380,15 @@

    Issues with the EML Files

  • DateTime Format Strings
    EML has a standard way to describe date time formats. but there is considerable variation in its use in EML files. - (I was previously wrong about this. See the EML documentation - for formatString (which appears to match the + (I was previously wrong about this. I see the EML documentation + for formatString which appears to match the Java/Joda time formatJava DateTimeFormatter specification (external link)) + title="This link to an external web site does not constitute an endorsement."/>, but which lacks the important guidelines about its use, - with the result that formatString is often/usually improperly used.) + with the result that formatString is often/usually improperly used.) There are several instances with incorrect case, and/or incorrect duplication of a letter, and/or non-standard formatting. @@ -572,7 +572,7 @@

    Contact


     
    -

    ERDDAP, Version 1.74 +

    ERDDAP, Version 1.76
    Disclaimers | Privacy Policy

      diff --git a/download/NCCSV.html b/download/NCCSV.html new file mode 100644 index 000000000..5e0474205 --- /dev/null +++ b/download/NCCSV.html @@ -0,0 +1,857 @@ + + + +NCCSV + + + + + + +
    +

    NCCSV - +
    A NetCDF-Compatible ASCII CSV File Specification, +
    Version 1.00

    + +
    Bob Simons <bob.simons at noaa.gov> and Steve Hankin +
    "NCCSV" by Bob Simons and Steve Hankin is licensed under +CC BY 4.0
    + +

    Contents

    + + + +

      + +

    Introduction

    + +This document specifies an ASCII CSV text file format that can contain +all of the information (metadata and data) that can be found in a NetCDF .nc +file that contains a CSV-file-like table of data. The file extension for an +ASCII CSV text file following this specification must be .csv so that +it can be read easily and correctly into spreadsheet programs like +Excel and Google Sheets. Bob Simons will write software to convert an +NCCSV file into a NetCDF-3 (and perhaps also a NetCDF-4) .nc file, +and the reverse, with no loss of information. +Bob Simons has modified +ERDDAP +to support reading and writing this type of file. + + +

    The NCCSV format is designed so that spreadsheet software such as Excel +and Google Sheets can import an NCCSV file as a csv file, with all of the +information in the spreadsheet's cells ready for editing. Or, a spreadsheet +can be created from scratch following the NCCSV conventions. +Regardless of the source of the spreadsheet, +if it is then exported as a .csv file, +it will conform to the NCCSV specification and no information will be lost. +The only differences between NCCSV files and the analogous spreadsheet +files which follow these conventions are: + +

      +
    • NCCSV files have values on a line separated by commas. +
      Spreadsheets have values on a line in adjacent cells. +
    • Strings in NCCSV files are often surrounded by double quotes. +
      Strings in spreadsheets are never surrounded by double quotes. +
    • Internal double quotes ( " ) in Strings in NCCSV files appear + as 2 double quotes. +
      Internal double quotes in spreadsheets appear as 1 double quote. +
    +

    See the Spreadsheet + section below for more information. + +

    Streamable - +Like CSV files in general, NCCSV files are streamable. +Thus, if an NCSV is generated on-the-fly by a data server such as +ERDDAP, +the server can start to stream data to the requester before all of the data +has been gathered. This is a useful and desireable feature. NetCDF files, +by contrast, are not streamable. + +

    ERDDAP - +This specification is designed so that NCCSV files and the .nc files +that can be created from them can be used by an +ERDDAP data server +(via the +EDDTableFromNccsvFiles +and +EDDTableFromNcFiles dataset types), +but this specification is external to ERDDAP. +ERDDAP has several required global attributes and many recommended global +and variable attributes, mostly based on CF and ACDD attributes (see +
    https://coastwatch.pfeg.noaa.gov/erddap/download/setupDatasetsXml.html#globalAttributes). + +

    Balance - +The design of the NCCSV format is a balance of several requirements: + +

      +
    • The files must contain all of the data and metadata that would be in a +tabular NetCDF file, including specific data types. +
    • The files must be able to be read into and then written out of a +spreadsheet with no loss of information. +
    • The files must be easy for humans to create, edit, read, and understand. +
    • The files must be able to be unambiguously parsed by computer programs. +
    +If some requirement in this document seems odd or picky, +it is probably needed to meet one of these requirements. + +

    Other Specifications - +This specification refers to several other specifications and libraries +that it is designed to work with, but this specification is not a part of +any of those other specifications, nor does it need any changes to them, +nor does it conflict with them. +If a detail related to one of these standards is not specified here, +see the related specification. Notably, this includes: + +

    + +

    Notation - +In this specification, brackets, [ ], denote optional items. + + +

      + +

    File Structure

    +A complete NCCSV file consists of two sections: the metadata section, +followed by the data section. + +

    NCCSV files must contain only 7-bit ASCII characters. +Because of this, the character set or encoding used to write and +read the file may be any character set or encoding which is compatible +with the 7-bit ASCII character set, e.g., ISO-8859-1. +ERDDAP reads and writes NCCSV files with the ISO-8859-1 charset. + +

    NCCSV files may use either newline (\n) +(which is common on Linux and Mac OS X computers) +or carriageReturn plus newline (\r\n) (which is common on Windows computers) +as end-of-line markers, but not both. + +

    .nccsvMetadata - +When both the creator and the reader are expecting it, +it is also possible and sometimes useful to make a variant of an NCCSV file +which contains just the metadata section (including the *END_METADATA* line). +The result provides a complete description of the file's attributes, +variable names, and data types, +thus serving the same purpose as the .das plus .dds responses +from an OPeNDAP server. ERDDAP will return this variation +if you request fileType=.nccsvMetadata from an ERDDAP dataset. + + +

      + +

    The Metadata Section

    + +In an NCCSV file, each line of the metadata section +uses the format +
    variableName,attributeName,value1[,value2][,value3][,value4][...] +
    Spaces before or after items are not allowed because they cause problems +when importing the file into spreadsheet programs. + +

    The +first line of an NCCSV file is the first line of the metadata section and +must have a *GLOBAL* +Conventions attribute listing all of the +conventions used in the file as a String containing a CSV list, for example: +
    *GLOBAL*,Conventions,"COARDS, CF-1.6, ACDD-1.3, NCCSV-1.0" +
    One of the conventions listed must be NCCSV-1.0, +which refers to the current version of this specification. + +

    The +end of the metadata section of an NCCSV file must be denoted by a +line with only +
    *END_METADATA* + +

    It is recommended but not required that all of the attributes for a +given variable appear on adjacent lines of the metadata section. +If an NCCSV file is converted into a NetCDF file, the order that the +variableNames first appear in the metadata section will be the order of +the variables in the NetCDF file. + +

    Optional blank lines are allowed in the metadata section after the required +first line with +*GLOBAL* Conventions +information (see below) +and before the required last line with *END_METADATA*. + +

    If a spreadsheet is created from an NCCSV file, +the metadata data section will appear with variable names in column A, +attribute names in column B, and values in column C. + +

    If a spreadsheet following these conventions is saved as a CSV file, +there will often be extra commas at the end of the lines in the metadata +section. The software that converts NCCSV files into .nc files will ignore +the extra commas. + + +

    variableName

    +variableName is the case-sensitive name of a variable in the data file. +All variable names must begin with a 7-bit ASCII letter or underscore and +be composed of 7-bit ASCII letters, underscores, and 7-bit ASCII digits. +The special variableName +*GLOBAL* is used to denote global metadata. + + +

    attributeName

    +attributeName is the case-sensitive name of an attribute +associated with a variable or *GLOBAL*. +All attribute names must begin with a 7-bit ASCII letter or +underscore and be composed of 7-bit ASCII letters, underscores, +and 7-bit ASCII digits. + +

    *SCALAR* - +The special attributeName *SCALAR* +can be used to create a +scalar data variable and define its value. +The data type of the *SCALAR* defines the data type +for the variable, +so do not specify a *DATA_TYPE* attribute for scalar variables. +Note that there must not be data for the scalar variable in the Data Section +of the NCCSV file. + +

    For example, to create a scalar variable named "ship" +with the value "Okeanos Explorer" and a cf_role attribute, use: +
    ship,*SCALAR*,"Okeanos Explorer" +
    ship,cf_role,trajectory_id
    +
    When a scalar data variable is read into ERDDAP, +the scalar value is converted into a column in the data table with the +same value on every row. + + +

    value

    +

    value is the value of the metadata attribute +and must be an array with one or more of either a +byte, short, int, long, float, double, String, or char. +No other data types are supported. Attributes with no value will be ignored. +If there is more than one sub-value, the sub-values must all be of the +same data type and separated by commas, for example: +
    sst,actual_range,0.17f,23.58f +
    If there are multiple String values, use a single String with +\n (newline) characters separating the substrings. + +

    The definitions of the attribute data types are: + +

      +
    • byte + attribute values (8-bit, signed) must be written with the + suffix 'b', e.g., -7b, 0b, 7b . The range of valid byte values is -128 to 127. + A number that looks like a byte but is invalid (e.g., 128b) + will generate an error message. +
        +
    • short + attribute values (16-bit, signed) must be written with the + suffix 's', e.g., -30000s, 0s, 30000s. The range of valid short values is + -32768 to 32767. A number that looks like a short but is invalid + (e.g., 21768s) will generate an error message. +
        +
    • int + attribute values (32-bit, signed) must be written as JSON ints + without a decimal point or exponent, but with the suffix 'i', + e.g., -12067978i, 0i, 12067978i. The range of valid int values is + -2147483648 to 2147483647. A number that looks like an int but is invalid + (e.g., 2147483648i) will generate an error message. +
        +
    • long + attribute values (64-bit, signed, currently supported by + NUG and ERDDAP but not yet supported by CF) must be written without + a decimal point and with the suffix 'L', + e.g., -12345678987654321L, 0L, 12345678987654321L . + If you use the converting software to convert an NCCSV file with long + values into a NetCDF-3 file, any long values will be converted to double values. + The range of valid long values is -9223372036854775808 to 9223372036854775807. + A number that looks like a long but is invalid (e.g., 9223372036854775808L) + will generate an error message. +
        +
    • float + attribute values (32-bit) must be written with the suffix 'f' + and may have a decimal point and/or an exponent, + e.g., 0f, 1f, 12.34f, 1e12f, 1.23e+12f, 1.23e12f, 1.87E-7f. + Use NaNf for a float NaN (missing) value. + The range of floats is approximately + +/-3.40282347E+38f (~7 significant decimal digits). + A number that looks like a float but is invalid (e.g., 1.0e39f) + will generate an error message. +
        +
    • double + attribute values (64-bit) must be written + with the suffix 'd' and may have a decimal point and/or an exponent, + e.g., 0d, 1d, 12.34d, 1e12d, 1.23e+12d, 1.23e12d, 1.87E-7d. + Use NaNd for a double NaN (missing) value. + The range of doubles is approximately +/-1.79769313486231570E+308d + (~15 significant decimal digits). + A number that looks like a double but is invalid (e.g., 1.0e309d) + will generate an error message. +
        +
    • String + attribute values are a sequence of UCS-16 characters + (i.e., 2-byte Unicode characters, as in Java), + which must be written as 7-bit ASCII, JSON-like strings + so that non-ASCII characters can be specified. +
        +
      • Double quotes (") must be encoded as two double + quotes (""). That's what spreadsheet programs require when + reading .csv files. That's what spreadsheet programs write when + you save a spreadsheet as a .csv file. +
      • The special JSON backslash-encoded characters must be encoded as + in JSON (notably \n (newline), but also \\ (backslash), + \f (formfeed), \t (tab), \r (carriage return) or with the + \uhhhh syntax. In a spreadsheet, + don't use Alt Enter to specify a new line within a text cell; + instead, use \n (2 characters: backslash and 'n') to indicate a new line. +
      • \uhhhh - + All characters less than character #32 or greater than character #126, + and not otherwise encoded, must be encoded with the syntax + \uhhhh, where hhhh is the 4-digit hexadecimal + number of the character, e.g., the Euro sign is \u20AC. + See the code pages referenced at + https://en.wikipedia.org/wiki/Unicode (external link) + to find the hexadecimal numbers associated with specific Unicode characters, + or use a software library. +
      • If the String has a space at the beginning or end, or includes " + (double quote) or a comma, or contains values that would otherwise be + interpreted as some other data type (e.g., an int), the entire String must + be enclosed in double quotes; otherwise, unlike JSON, + the enclosing double quotes are optional. + We recommend: when in doubt, enclose the entire String in double quotes. + Spaces at the beginning or end of a String are strongly discouraged. +
      • For now, the use of characters greater than #255 is discouraged. + ERDDAP supports them internally and in some output file types + (e.g., .json and .nccsv). + NetCDF-3 files do not support such characters because NetCDF files + use 1-byte characters and CF currently does not have a system for + specifying how Unicode characters are encoded in NetCDF Strings (e.g., UTF-8). + This will probably change. +
          +
      +
    • char + attribute values are a single UCS-16 character + (i.e., 2-byte Unicode characters, as in Java), which must be written + as 7-bit ASCII, JSON-like characters so that other characters can be specified + (see the String definition above for encoding of special characters, + with the addition of encoding a single quote as \'). + Char attribute values must be enclosed in single quotes (the inner quotes) + and double quotes (the outer quotes), e.g., "'a'", "'""'" + (a double quote character), + "'\''" (a single quote character), "'\t'" (a tab), + "'\u20AC'" (a Euro character). This system of using + single and double quotes is odd and cumbersome, but it is a way to + distinguish char values from Strings in a way that works with spreadsheets. + A value that looks like a char but is invalid will generate an error message. + As with Strings, the use of characters greater than #255 + is currently discouraged. +
    + +

    Suffix - +Note that in the attributes section of an NCCSV file, +all numeric attribute values must have a suffix letter (e.g., 'b') +to identify the numeric data type (e.g., byte). +But in the data section of an NCCSV file, +numeric data values must never have these suffix letters +(with the exception of 'L' for long integers) -- +the data type is specified by the *DATA_TYPE* attribute for the variable. + + +

    *DATA_TYPE* - +The data type for each non-scalar +variable must be specified by a +*DATA_TYPE* attribute which can have a value of +byte, short, int, long, float, double, String, or char +(case insensitive). For example, +
    qc_flag,*DATA_TYPE*,byte +
    WARNING: Specifying the correct *DATA_TYPE* is your responsibility. +Specifying the wrong data type (e.g., int when you should have +specified float) will not generate an error message and may cause +information to be lost (e.g., float values will be rounded to ints) +when the NCCSV file is read by ERDDAP or converted into a NetCDF file. + + +

    char Discouraged - +The use of char data values is discouraged because they are not widely +supported in other file types. char values may be written in the +data section as single characters or as Strings (notably, if you need to +write a special character). If a String is found, the first character of the +String will be used as the char's value. Zero length Strings and missing values +will be converted to character \uFFFF. Note that NetCDF files only support +single byte chars, so any chars greater than char #255 will be converted to +'?' when writing NetCDF files. Unless a charset attribute is used +to specify a different charset for a char variable, the ISO-8859-1 charset +will be used. + +

    long Discouraged - +Although many file types (e.g., NetCDF-4 and json) and ERDDAP support +long data values, the use of long data values in +NCCSV files is currently discouraged because they are currently +not supported by Excel, CF and NetCDF-3 files. +If you want to specify long data values in an NCCSV file +(or in the corresponding Excel spreadsheet), +you must use the suffix 'L' so that Excel doesn't treat the numbers as +floating point numbers with lower precision. +Currently, if an NCCSV files is converted into a NetCDF-3 .nc file, +long data values will be converted into double values, +causing a loss of precision for very large values +(less than -2^53 or greater than 2^53). + +

    CF, ACDD, and ERDDAP Metadata - +Since it is envisioned that most NCCSV files, or the .nc files +created from them, will be read into ERDDAP, +it is strongly recommended that NCCSV files include the metadata attributes +which are required or recommended by ERDDAP (see +
    https://coastwatch.pfeg.noaa.gov/erddap/download/setupDatasetsXml.html#globalAttributes). +The attributes are almost all from the CF and ACDD metadata standards and +serve to properly describe the dataset (who, what, when, where, why, how) +to someone who otherwise knows +nothing about the dataset. Of particular importance, +almost all numeric variables should have a units +attribute with a UDUNITS-compatible value, e.g., +
    sst,units,degrees_C + +

    It is fine to include additional attributes which are not from the +CF or ACDD standards or from ERDDAP. + + +

      + +

    The Data Section

    + +

    Structure

    +The first line of the data section must have a case-sensitive, +comma-separated list of variable names. +All of the variables in this list must be described in the metadata section, +and vice versa (other than *GLOBAL* attributes +and *SCALAR* variables). + +

    The second through the penultimate lines of the data section must +have a comma-separated list of values. +Each row of data must have the same number of values as the comma-separated +list of variable names. +Spaces before or after values are not allowed because they cause problems +when importing the file into spreadsheet programs. +Each column in this section must contain only values of the *DATA_TYPE* +specified for that variable by the *DATA_TYPE* +attribute for that variable. +Unlike in the attributes section, numeric values in the data section +must not have suffix letters to denote the data type. +Unlike in the attributes section, char values in the data section +may omit the enclosing single quotes if they are not needed for disambiguation +(thus, ',' and '\'' must be quoted as shown here). +There may be any number of these data rows in an NCCSV file, +but currently ERDDAP can only read NCCSV files with up to +about 2 billion rows. +In general, it is recommended that you split large datasets into +multiple NCCSV data files with fewer than 1 million rows each. + +

    *END_DATA* - +The end of the data section must be denoted by a line with only +
    *END_DATA* + +

    If there is additional content in the NCCSV file after the +*END_DATA* line, it will be ignored when the NCCSV file is +converted into an .nc file. Such content is therefore discouraged. + +

    In a spreadsheet following these conventions, +the variable names and data values will be in multiple columns. +See the example below. + + +

    Missing Values

    + +

    Numeric missing values may be written as a numeric value identified by a +missing_value or _FillValue attribute for that variable. +For example, see the second value on this data row: +
    Bell M. Shimada,99,123.4 +
    This is the recommended way to handle missing values for +byte, short, int, and long variables. + +

    float or double NaN values may be written as NaN. +For example, see the second value on this data row: +
    Bell M. Shimada,NaN,123.4 + +

    String and numeric missing values may be indicated by an empty field. +For example, see the second value on this data row: +
    Bell M. Shimada,,123.4 + +

    For byte, short, int, and long variables, +the NCCSV converter utility and ERDDAP will convert an empty field +into the maximum allowed value for that data type (e.g., 127 for bytes). +If you do this, be sure to add a missing_value or +_FillValue attribute for that variable to identify this value, +e.g., +
    variableName,_FillValue,127b +
    For float and double variables, an empty field will be converted to NaN. + + +

    DateTime Values

    +DateTime values (including date values that don't have a time component) +may be represented as numbers or as Strings in NCCSV files. +A given dateTime variable may only have String values or only numeric values, +not both. The NCCSV software will convert String dateTime values into numeric +dateTime values when creating .nc files (as required by CF). +String dateTime values have the advantage of being easily readable by humans. + +

    DateTime values represented as numeric values must have a +units attribute which specifies the "units since dateTime" +as required by CF and specified by UDUNITS, e.g., +
    time,units,seconds since 1970-01-01T00:00:00Z + +

    DateTime values represented as String values must have a +String *DATA_TYPE* attribute and a units attribute +which specifies a dateTime pattern as specified by the +Java DateTimeFormatter class +
    (https://docs.oracle.com/javase/8/docs/api/index.html?java/time/DateTimeFomatter.html (external link)). For example, +
    time,units,yyyy-MM-dd'T'HH:mm:ssZ +
    All dateTime values for a given data variable must use the same format. +
    In most cases, the dateTime pattern you need for the units +attribute will be a variation of one of these formats: + +

      +
    • yyyy-MM-dd'T'HH:mm:ss.SSSZ - which is the ISO 8601:2004(E) dateTime format. +You may need a shortened version of this, +e.g., yyyy-MM-dd'T'HH:mm:ssZ (the only recommended format) or yyyy-MM-dd. +If you are changing the format of your dateTime values, +NCCSV strongly recommends that you change to this format (perhaps shortened). +This is the format that ERDDAP will use when it writes NCCSV files. +
    • yyyyMMddHHmmss.SSS - which is the compact version of the ISO 8601:2004 +dateTime format. You may need a shortened version of this, e.g., yyyyMMdd. +
    • M/d/yyyy H:mm:ss.SSS - which handles US-style dates and dateTimes like +"3/23/2017 16:22:03.000". You may need a shortened version of this, +e.g., M/d/yyyy . +
    • yyyyDDDHHmmssSSS - which is the year plus the zero-padded +day of the year (e.g, 001 = Jan 1, 365 = Dec 31 in a non-leap year; +this is sometimes erroneously called the Julian date). +You may need a shortened version of this, e.g., yyyyDDD . +
    + +

    Precision - +When a software library converts an .nc file into an NCCSV file, +all dateTime values will be written as Strings with the +ISO 8601:2004(E) dateTime format, e.g., 1970-01-01T00:00:00Z . +You can control the precision with the ERDDAP-specific attribute +time_precision. See +
    https://coastwatch.pfeg.noaa.gov/erddap/download/setupDatasetsXml.html#time_precision. + +

    Time Zone - +The default time zone for dateTime values is the Zulu (or GMT) +time zone, which has no daylight savings time periods. +If a dateTime variable has dateTime values from a different time zone, +you must specify this with the ERDDAP-specific attribute time_zone. +This is a requirement for ERDDAP (see +
    https://coastwatch.pfeg.noaa.gov/erddap/download/setupDatasetsXml.html#time_zone). + + +

    Degree Values

    +As required by CF, all degree values (e.g., for longitude and latitude) +must be specified as decimal-degree double values, +not as a degree°min'sec" String or as separate variables for +degrees, minutes, seconds. +The direction designators N, S, E, and W are not allowed. +Use negative values for West longitudes and for South latitudes. + + +

      + +

    DSG Feature Types

    +An NCCSV file may contain CF Discrete Sampling Geometry +
    (http://cfconventions.org/cf-conventions/v1.6.0/cf-conventions.html#discrete-sampling-geometries (external link)) data. It is the attributes that make this work: + +
      +
    1. As required by CF, the NCCSV file must include a line in the + metadata section identifying the *GLOBAL* + featureType attribute, e.g., +
      *GLOBAL*,featureType,trajectory +
    2. For use in ERDDAP, the NCCSV file must include a line or lines + in the metadata section identifying the cf_role=..._id variables, e.g., +
      ship,cf_role,trajectory_id +
      This is optional for CF, but required in NCCSV. +
    3. For use in ERDDAP, the NCCSV file must include a line or lines + in the metadata section identifying which variables are associated + with each timeSeries, trajectory, or profile as required by ERDDAP (see +
      https://coastwatch.pfeg.noaa.gov/erddap/download/setupDatasetsXml.html#cdm_data_type), e.g., +
      *GLOBAL*,cdm_trajectory_variables,"ship" +
      or +
      *GLOBAL*,cdm_timeseries_variables,"station_id,lat,lon" +
    + +

      + +

    Sample File

    + +Here is a sample file which demonstrates many of the features of an NCCSV file: +
    +*GLOBAL*,Conventions,"COARDS, CF-1.6, ACDD-1.3, NCCSV-1.0"
    +*GLOBAL*,cdm_trajectory_variables,"ship"
    +*GLOBAL*,creator_email,bob.simons@noaa.gov
    +*GLOBAL*,creator_name,Bob Simons
    +*GLOBAL*,creator_type,person
    +*GLOBAL*,creator_url,https://www.pfeg.noaa.gov
    +*GLOBAL*,featureType,trajectory
    +*GLOBAL*,infoUrl,https://coastwatch.pfeg.noaa.gov/erddap/downloads/NCCSV.html
    +*GLOBAL*,institution,"NOAA NMFS SWFSC ERD, NOAA PMEL"
    +*GLOBAL*,license,"""NCCSV Demonstration"" by Bob Simons and Steve Hankin is
    +    licensed under CC BY 4.0, https://creativecommons.org/licenses/by/4.0/ ."
    +*GLOBAL*,keywords,"NOAA, sea, ship, sst, surface, temperature, trajectory"
    +*GLOBAL*,standard_name_vocabulary,CF Standard Name Table v29
    +*GLOBAL*,subsetVariables,"ship"
    +*GLOBAL*,summary,"This is a paragraph or two describing the dataset."
    +*GLOBAL*,title,"NCCSV Demonstration"
    +ship,*DATA_TYPE*,String
    +ship,cf_role,trajectory_id
    +time,*DATA_TYPE*,String
    +time,standard_name,time
    +time,units,"M/d/YYYY H:mm:ss"
    +lat,*DATA_TYPE*,double
    +lat,units,degrees_north
    +lon,*DATA_TYPE*,double
    +"lon","units","degrees_east"
    +status,*DATA_TYPE*,char
    +status,comment,"From http://some.url.gov/someProjectDocument , Table C"
    +testLong,*DATA_TYPE*,long
    +testLong,units,1
    +sst,*DATA_TYPE*,float
    +sst,standard_name,sea_surface_temperature
    +sst,actual_range,0.17f,23.58f
    +sst,units,degrees_C
    +sst,missing_value,99f
    +sst,testBytes,-128b,0b,127b
    +sst,testShorts,-32768s,0s,32767s
    +sst,testInts,-2147483648i,0i,2147483647i
    +sst,testLongs,-9223372036854775808L,0L,9223372036854775807L
    +sst,testFloats,-3.40282347e38f,0f,3.40282347E+38f
    +sst,testDoubles,-1.79769313486231570e308d,0d,1.79769313486231570E+308d
    +sst,testChars,"','","'""'","'\u20AC'"
    +sst,testStrings," a~,\n'z""\u20AC"
    +
    +*END_METADATA*
    +ship,time,lat,lon,status,testLong,sst
    +Bell M. Shimada,3/23/2017 0:45:00,28.0002,-130.2576,A,-9223372036854775808L,10.9
    +Bell M. Shimada,3/23/2017 1:45:00,28.0003,-130.3472,\u20AC,-1234567890123456L,
    +"Bell M. Shimada","3/23/2017 2:45:00",28.0001,-130.4305,"'\t'",0L,10.7
    +Bell M. Shimada,3/23/2017 12:45:00,27.9998,-131.5578,"'""'",1234567890123456L,99
    +Bell M. Shimada,3/23/2017 21:45:00,28.0003,-132.0014,\u00fc,9223372036854775806L,10.0
    +Bell M. Shimada,3/23/2017 23:45:00,28.0002,-132.1591,,NaN
    +
    + +

    Notes: +

      +
    • This sample file includes many difficult cases (e.g., char and + long variables and difficult String values). + Most NCCSV files will be much simpler. +
    • The license line is broken into two lines here, but is just + one line in the sample file. +
    • \u20AC is the encoding of the Euro character and + \u00FC is the encoding of ü. +
    • Many Strings in the example are enclosed by double quotes + even though they don't have to be, + e.g., many global attributes including the title, the lon units + attribute, and the 3rd line of data.) +
    • It would be clearer and better if the units attribute + for the testLong variable were written in double quotes indicating + it is a String value. But the current representation (1, + without quotes) will be interpreted correctly as a String, + not an integer, because there is no 'i' suffix. +
    • Unlike other numeric data types, the long values + in the data section have the suffix ('L') that identifies + their numeric data type. + This is required to prevent spreadsheets from interpreting the + values as floating point numbers and thus losing precision. +
    + + +

      + +

    Spreadsheets

    + +In a spreadsheet, as in an NCCSV file: +
      +
    • Write numeric attribute values as specified for NCCSV files +(e.g., with a suffix letter, e.g., 'f', to identify attribute's data type). +
    • In Strings, write all characters less than ASCII character #32 or +greater than character #126 as either a JSON-like backslashed character +(e.g., \n for newline) or as the hexadecimal Unicode character number +(case insensitive) with the syntax +\uhhhh +(e.g., \u20AC for the Euro sign). +Use \n (2 characters: backslash and 'n') +to indicate a new line, not Alt Enter. +
    + +

    The only differences between NCCSV files and the analogous spreadsheet +which follow these conventions are: +

      +
    • NCCSV files have values on a line separated by commas. +
      Spreadsheets have values on a line in adjacent cells. +
    • Strings in NCCSV files are often surrounded by double quotes. +
      Strings in spreadsheets are never surrounded by double quotes. +
    • Internal double quotes ( " ) in Strings in NCCSV files appear as + 2 double quotes. +
      Internal double quotes in spreadsheets appear as 1 double quote. +
    + +

    If a spreadsheet following these conventions is saved as a CSV file, +there will often be extra commas at the end of many of the lines. +The software that converts NCCSV files into .nc files will ignore the extra commas. + + +

    Excel

    +

    To import an NCCSV file into Excel: +

      +
    1. Choose File : Open . +
    2. Change the file type to Text Files (*.prn; *.txt; *.csv) . +
    3. Search the directories and click on the NCCSV .csv file. +
    4. Click Open . +
    + +

    To create an NCCSV file from an Excel spreadsheet: + +

      +
    1. Choose File : Save As . +
    2. Change the Save as type: to be CSV (Comma delimited) (*.csv) . +
    3. In response to the compatibility warning, click Yes . +
    4. The resulting .csv file will have extra commas at the end + of all the rows other than the CSV rows. You can ignore them. +
    + +

    In Excel, the sample NCCSV file above appears as + +

    +sampleExcel_1.png +
    sampleExcel_2.png + + +

    Google Sheets

    + +To import an NCCSV file into Google Sheets: + +
      +
    1. Choose File : Open . +
    2. Choose to Upload a file and click on + Upload a file from your computer . + Select the file, then click Open . +
      +
      Or, choose My Drive and change the file type drop down + selection to All file types . + Select the file, then click Open . +
    + +

    To create an NCCSV file from a Google Sheets spreadsheet: + +

      +
    1. Choose File : Save As . +
    2. Change the Save as type: to be CSV (Comma delimited) (*.csv) . +
    3. In response to the compatibility warning, click Yes . +
    4. The resulting .csv file will have extra commas at the end of all the rows + other than the CSV rows. Ignore them. +
    + + +

      + +

    Problems/Warnings

    +
      +
    • If you create an NCCSV file with a text editor or if you create an + analogous spreadsheet in a spreadsheet program, the text editor + or the spreadsheet program will not check that you followed these + conventions correctly. It is up to you to follow these conventions correctly. +
    • The conversion of a spreadsheet following this convention + into a csv file (thus, a NCCSV file) will lead to extra commas + at the end of all the rows other than the CSV data rows. + Ignore them. + The software the converts NCCSV files into .nc files will ignore them. +
    • If an NCCSV file has excess commas at the end of rows, + you can remove them by converting the NCCSV file into a NetCDF file + and then converting the NetCDF file back into a NCCSV file. +
    • When you try to convert an NCCSV file into a NetCDF file, some errors + will be detected by the software and will generate error messages, + causing the conversion to fail. + Other problems are hard or impossible to catch and will not + generate error messages or warnings. Other problems + (e.g., excess commas at the end of rows) will be ignored. + The file converter will do only minimal checking of correctness + of the resulting NetCDF file, e.g., in regards to CF compliance. + It is the file creator's and file user's responsibility to check + that the results of the conversion are as desired and correct. + Two ways to check are: + +
    + +
    + + + \ No newline at end of file diff --git a/download/changes.html b/download/changes.html index 9cbe50fc1..5aee5c66d 100644 --- a/download/changes.html +++ b/download/changes.html @@ -42,43 +42,326 @@

    ERDDAP Changes

    + - @@ -93,10 +376,6 @@

    Changes in ERDDAP version 1.74 (released 2016-10-0 Previously, the middle of a long title was replaced by " ... ". Thanks to Margaret O'Brien, LTER, and EML.
      -
  • Things ERDDAP Administrators Need to Know and Do:
      @@ -246,7 +525,7 @@

    Changes in ERDDAP version 1.74 (released 2016-10-0 Thanks to John Kerfoot.
     
  • BUG FIX: If <quickRestart> was true in setup.xml and you requested - data from an EDDTableFrom...Files dataset that used , + data from an EDDTableFrom...Files dataset that used <updateEveryNMillis>, the first request to the dataset would fail, but subsequent requests would succeed. Now the first request won't fail. Thanks to John Kerfoot. @@ -306,6 +585,7 @@

    Changes in ERDDAP version 1.74 (released 2016-10-0 In either case, individual variable names may be surrounded by double quotes, e.g., if the name has an internal space. +

  • @@ -552,7 +832,7 @@

    Changes in ERDDAP version 1.70 (released 2016-04-1 you can now tell ERDDAP not to try to subscribe to remote ERDDAP datasets. Thanks to Filipe Rocha Freire.
    In your setup.xml, right before <fontFamily>, please add
    -<!-- Normally, if you have a EDDGridFromErddap or EDDTableFromErddap 
    +<!-- Normally, if you have a EDDGridFromErddap or EDDTableFromErddap 
     dataset in your datasets.xml, it will try to subscribe to the remote 
     ERDDAP dataset so that the local dataset is kept perfectly up-to-date.
     If this ERDDAP is not publicly accessible (http://localhost), or its
    @@ -718,7 +998,7 @@ 

    Changes in ERDDAP version 1.68 (released 2016-02-0 EDDTableCopy, or EDDTableFromEDDGrid has active="false", that child dataset is now skipped. - +

    @@ -3615,7 +3895,7 @@

    Contact


     
    -

    ERDDAP, Version 1.74 +

    ERDDAP, Version 1.76
    Disclaimers | Privacy Policy diff --git a/download/grids.html b/download/grids.html index 853af3b97..a423cb981 100644 --- a/download/grids.html +++ b/download/grids.html @@ -85,7 +85,8 @@

    Heavy Loads / Constraints

    connection, ERDDAP's responses will be constrained by how fast ERDDAP can get data from the data sources and how fast ERDDAP can return data to the clients. The only solution is to get a faster Internet connection. -
  • Memory - If there are many simultaneous requests, ERDDAP can run out of memory +
  • Memory - + If there are many simultaneous requests, ERDDAP can run out of memory and temporarily refuse new requests. (ERDDAP has a couple of mechanisms to avoid this and to minimize the consequences if it does @@ -99,7 +100,8 @@

    Heavy Loads / Constraints

    for ERDDAP/Tomcat. An ERDDAP getting heavy usage on a computer with a 64-bit server with 8GB of memory and -Xmx set to 4000M is rarely, if ever, constrained by memory. -
  • Hard drive bandwidth - Accessing data stored on the server's hard drive +
  • Hard drive bandwidth - + Accessing data stored on the server's hard drive is vastly faster than accessing remote data. Even so, if the ERDDAP server has a very high bandwidth Internet connection, @@ -109,7 +111,8 @@

    Heavy Loads / Constraints

    or SSD drives (if it makes sense cost-wise). Another solution is to store different datasets on different drives, so that the cumulative hard drive bandwidth is much higher. -
  • Too many files in a cache directory - +
  • Too many files + in a cache directory - ERDDAP caches all images, but only caches the data for certain types of data requests. It is possible for the cache directory for a dataset to have a large number of files temporarily. This will slow down requests to see @@ -118,13 +121,21 @@

    Heavy Loads / Constraints

    lets you set how long a file can be in the cache before it is deleted. Setting a smaller number would minimize this problem. -
  • CPU - Making graphs (including maps) - is the only thing that takes significant CPU time - (roughly 0.2 - 1 second per graph). - So if there were many simultaneous unique requests for graphs (WMS clients!), +
  • CPU - Only two things take a lot of CPU time: +
      +
    • NetCDF 4 and HDF 5 now support internal compression of data. + Decompressing a large compressed NetCDF 4 / HDF 5 data file can take 10 + or more seconds. So multiple simultaneous requests to datasets with + data stored in compressed files can put a severe strain on any server. + If this is a problem, the solution is to store popular datasets + in uncompressed files, or get a server with a CPU with more cores. +
    • Making graphs (including maps): roughly 0.2 - 1 second per graph. + So if there were many simultaneous unique requests for graphs + (WMS clients often make 6 simultaneous requests!), there could be a CPU limitation. - On a multi-core server, it would take a lot of requests before this became a problem. + When multiple users are running WMS clients, this becomes a problem.
        +
    @@ -234,9 +245,13 @@

    Grids, Clusters, and Federations

    The parts of the grid are: -

    A) For every ERDDAP data source that - has a high-bandwidth server, use EDDGridFromErddap or - EDDTableFromERDDAP to serve the data in the Composite ERDDAP. +

    A) For every remote data source that + has a high-bandwidth OPeNDAP server, you can connect directly + to the remote server. + If the remote server is an ERDDAP, use EDDGridFromErddap or + EDDTableFromERDDAP to serve the data in the Composite ERDDAP. + If the remote server is some other type of DAP server, + e.g., THREDDS, Hyrax, or GrADS, use EDDGridFromDap.

    B) For every ERDDAP-able data source (a data source from which ERDDAP @@ -260,6 +275,7 @@

    Grids, Clusters, and Federations

    EDDGridCopy and/or EDDTableCopy. +
  • B servers must be publicly accessible.

    C) For every ERDDAP-able data source @@ -272,6 +288,7 @@

    Grids, Clusters, and Federations

    EDDTableCopy. If several such ERDDAPs aren't getting many requests for data, you can consolidate them into one ERDDAP. +
    C servers must be publicly accessible.

    D) The composite ERDDAP is a regular ERDDAP except that it just serves data from other ERDDAPs. @@ -357,31 +374,42 @@

    Grids, Clusters, and Federations

    Failures - What happens if...

      -
    • There is a burst of requests for one dataset (e.g., all students in a class simultaneously - request similar data)? Only the ERDDAP serving that dataset will be overwhelmed and +
    • There is a burst of requests for one dataset (e.g., all students in a class + simultaneously request similar data)? +
      Only the ERDDAP serving that dataset will be overwhelmed and slow down or refuse requests. The composite ERDDAP and other ERDDAPs won't be affected. Since the limiting factor for a given dataset within the system is the hard drive with the data (not ERDDAP), the only solution (not immediate) is to make a copy of the dataset on a different server+hardDrive+ERDDAP.
    • An A, B, or C ERDDAP fails (e.g., hard drive failure)? - Only the dataset(s) served by that ERDDAP are affected. +
      Only the dataset(s) served by that ERDDAP are affected. If the dataset(s) is mirrored on another server+hardDrive+ERDDAP, the effect is minimal. If the problem is a hard drive failure in a level 5 or 6 RAID, you just replace the drive and have the RAID rebuild the data on the drive.
    • The composite ERDDAP fails? - If you have no - hot or warm failoverIf you want to make a system with very + high availability (external link), - the system is effectively down until you set up - a replacement. That's not good. So if this is a concern, - set up multiple composite ERDDAPs - or a hot or warm failover. Then the effect is minimal. - Failures of the composite ERDDAP - should be rare because it has minimal hard drive activity. + you can set up multiple composite ERDDAPs, + using something like + NGINX (external link) + or + Traefik (external link) + to handle load balancing. + Note that a given composite ERDDAP can handle a large number of requests + from a large number of users, because +
      requests for metadata are small and are handled by information that is in memory, + and +
      requests for data (which may be large) are redirected to the child ERDDAPs.
    -

    Simple, Scalable - This system is easy to set up and administer, +

    Simple, Scalable + - This system is easy to set up and administer, and easily extensible when any part of it becomes over-burdened. The only real limitation for a given data center is the data center's bandwidth. @@ -435,7 +463,8 @@

    Grids, Clusters, and Federations

    offer all the Internet bandwidth you need, but don't solve the price problem. -

    For general information on designing scalable, high capacity, fault-tolerant systems, +

    For general information on designing scalable, + high capacity, fault-tolerant systems, see Michael T. Nygard's book Release It (external link)Cloud Computing it is that we are using and buying a lot of the service.)
      -

  • The subsetting problem: +
  • The subsetting problem: The only way to efficiently distribute data from data files is to have the program which is distributing the data (e.g., ERDDAP) running on a server which has the data stored on a local hard drive @@ -538,29 +567,15 @@

    Cloud Computing

    has to request the entire file or big chunks of a file from a non-local (hence slower) data storage system and then extract a subset. If the cloud setup doesn't give ERDDAP fast access to byte ranges of the files - (as with local files), + (as fast as with local files), ERDDAP's access to the data will be a severe bottleneck and negate other benefits of using a cloud service. - -

    One possible solution is that ERDDAP and the underlying libraries - could be rewritten to use a - MapReduce (external link) - approach to the problem, - i.e., make multiple, simultaneous requests to the file system - and then merge the responses. - Unfortunately, that would be a big project for multiple groups. - It competes with many other projects for the limited development - resources available for ERDDAP, netcdf-java, and other libraries. - And it seems like a lot of effort given that the common solution - (getting local access to the files) already exists. -

    Thanks - - Many thanks to Matthew Arrott and his group for their work on putting ERDDAP in + Many thanks to Matthew Arrott and his group in the original OOI effort + for their work on putting ERDDAP in the cloud and the resulting discussions. @@ -577,7 +592,7 @@

    Cloud Computing

    and include the ERDDAP URL directly related to your question or comment.
     
    -

    ERDDAP, Version 1.74 +

    ERDDAP, Version 1.76
    Disclaimers | Privacy Policy diff --git a/download/sampleExcel_1.png b/download/sampleExcel_1.png new file mode 100644 index 0000000000000000000000000000000000000000..75f95638c5901bcd59a0bf5a54da9132de8d85fa GIT binary patch literal 81801 zcmYg%1ymeOu=Oq;oZt>20Rq7xxLa^{cMIoV~ zr=_~Ps_vbdFnL*V6hs0<004j@@k2xr0Dwvd0H6mEpdeSC1zQUM05X7th@i4t`e}x{ z_H5t#fZom0-X&n+RDQf~wj$x{F`B5L#@UL2H>Nx@8#Z0dlZRjaK3Ii@O-ZTsx>#gv zwzX2K1qKU?JZ^Y+){wK0`E|3Y^W4nG*Nd;^o99KfHs{2^@2rfBo!69NT?NI1WR<3_X zc<+E8%VDI=PKY+ks;$@)WEZLvZ~5J%t?N8KdQ7 z!t459u2doib>%JT5|;leoz?HT_^+TB;TY)pr#=3J-Snnp&+`dVh7FpMSye==00C#H zTBusC;yHyfR86|#+S`%diW3=gNMYtr>Dt(B6qjpr!Y_oXA?!2oFBbxawnhtoyp<@e zdk$UI(7M(w-gyZ$3WPGXF6qH5v?hg7C)^G`Yo_q`G)R|2S7ELa0d%c)Ao zij)8wxTZd8>38Z$hgAxg-mRJ#LZyAv&Ng^+9RzkkgT~AmvQ+XjNx_&K_JC~vhol7n zK_>enqCnu%O#o4=|Hganqe*&t_X>QwE`kLG>rFLH0fl){ ze7UJNNAc@`YBf<`tcUaBMS|ias55aj?q*5}qrqd3?%&P14@Io^E!(vc(W5taKYiM^ zpG^2V6tyJ2c+0^`0wq5PTwS*5)N%#NhDy_W-b5}OOz6pLfY>5cp@VZ()-^tV#!J`4 zFCV65bSc#Yk^l;YC3(Tz4pp1Wbe!@)TeR)D^tbI`!7V8Vitjk>=RKw^ky3%swtz16tfy0(oXg}-#G zczX|*E1XreaGq-t6?lDlK_FMXkyElM9T6R-1;}t#_-keqpAs2STWhm((;6Q0)GU z=?Ua4G2%l^pJ8ykn)ec5JBVxC?v+@11E4dTzH$q{$nYEzi&6vPPd%Nt=e+=UjI_>{ z3|d8mzB;6hHwn2IS<)ELyQM_cEaqp!Ipg1b{U;)Nd)GLHd!3|EPnDxH`KqaPP~;`9 zorkL4_W$gbMm-|X{4Ic=kjz*vdBXXt%{;-L{4}INT*FA**;~Le&UP zT%+Nje@vV5I^V(4yK(4a{n%31$$HH$wgGS#VVo6Bz~w&MKB-h>6s;GMzh zX_Z%d_R#6L*G=oczoUbggjdv%qQSRK6}0K6nv7bzlSKKUPM0#96V@c88Anc)UFdORKYXVObKWEFL^~XWuZcH|c*d)lmF5#`8S3@%g-2OWawsA)LGJTpbiR zb;*rD9 zMfHQbtVVA=YjY*^rF*`qj+C}oZw+>3vGs!srS|snrcdWT*xLimSvqTGsFu`Guf$Sp z5cm>vTw-9N?JpUA`a==GgckwV={tB0Ei8FYg&H{)o$77(`@Q|h)?nU0(14ng(HdU> z*k#^5zg<^lBw-Jn99WEnf;J`o!8+YoyC1BOX7owAqOx&{TlJLTZXl|A5Z`CNEbcvK z4?bizVhpbh`K^tj^3)i0j0TT|&R_G-s`MWMc9S<+wh?wIpHkC|K4t03%vuYd%vL4S z*|IXF0xTi0C+3`BHkqza!+z#2z25cUPNvd!zXOrP`-yq5{0D)8ylHM?*xL++!0@e} zh0~%mbEiNc4{NLY;_(BU{EcFRuA{Abnkr)6U*LtRE=e!XY?EUL;raP$(?*$SIkuAn z+T+Spn|Yb=d(p2R_lWGXFY9d{TIeg^w8k@@*ItZv`j2vx-AIbfFlus!nbp*VvEWiYV6FeG1AbDtyQ2f_pT=bw?ooA_t zKio}R-|c-r6zaKcpvPrdA_`BHyYiOhlo=Y+0_>R^LqirmUQS_{&NYW!( z2>v@oJiTO?LxZXYb>UgPoEnz9ai1cW|x$nC> zu*~H=)lQ71>?fPeMK?#cW`7%dIb4{z4(v+ce|_BRx#wN4UuqL&mjWms6`yrLBp1%> zED)HSY_rq<+9^D}{e+acK$c(gXW_^G15zUuzXba^&A%+MXF+_=Diu+wwVZDt=HRYG$QxZ1n=Zm$$)l zJDz+6J@+@FzedIKvhxqSsp>}ZM?kJA=&l%}cwx8BC|~Z$Hl0JaG_}U{_ZC&hqU$~u zTCC6S)C4Yk%5M1wa8zeKahD@y%esd=;1K&>8vz^*u5l7i{Cs3`D&87=$N@lbw>cZu z%{G19H|vs%4DtZ0(&=)2^$4{bU;o-4;_%2#DS80_P>WB54QgvMdaM~gz(=fe?#+3Z zY~UMGCP%)!90*f`e0AF`w`mZXab4dIW`a($-d{k7nsjsYkJo|TM^wfttG|cdG*_=b z-A5fY7pH_yylu;c!lHn^+n<}hOz*R->t?pNoDuMOHRk6|e_oC}u?-1HbT2qVBM>38`Ia-{Not{-blCu{MXe0Ah|a~_`?4u|*A;jTV86|^RW&CZ(( zaZtq70V6_`<}w0H_{m2I-o9Dx)aI~W019Us7Y_=9-1${ps&sHIt`&hrxg%KNK9d={H z_?B443ypd?oh|RCXq|;%^rr=m!z}dAArsk!!4Loa{W>=a{cGOlnt1WB3N7UY;*|Be|Os)S;#lGY3Rhn%e?Ykzl zmDdcG{AMG+$I?4Y#|A8?%b{=I4R4_k)V!=_44y(-|oP!CwNfmVEv>MeC?I z0)>(w#ApRKLXYECs7+un98S09!9>Y&bcgNO&2gCA#{wwLJTte4qt(^z?rzhuv<7pF zx{V90`DzfT;sVm1+FP)5`0hXF<>Pk?Jv?>s42Ana15mES^AP&&5l69Es9A|QNUX3l zHvwB{Z;O8hD)6}u>0Frnlx7LD!b_HF_E!L6z+U7w+a}xI9U^_9PQGIqN*}MoQlcL73VAb0KU3Hh|y3 zR(gWr1o|>d??Oy%%jFH4omu*PP4It^u0enrl=RkYxLrJB5&F}sSQ5T0O7$pv?)V*1 zxg=N>a(3)Ac_e2DD_^Zx?Q_5}E=)gT>C7cqc#vi>TKUh9Mi$edpaFNtXxC|Gh;IB3 z)Fhxwbd5LqA2_;OX-Yjn>4X38j=7<_)^z`WxH2!y0L$&>!UmE04u2uR+dY{z;rS#)-2 z8&@do?blSQrEW!E*xT=qftm}Ub&YzF_(7d<6r>~)a|#B>*$1NHg6NWxBW~?plX*1a zpDfT(y)$HizWEFPnySE#E@`opl_aCxbAWcVULmBU04fQnqtp5df^(J?h-ldoabQIX zNPQCl3PkE5=_|Eh0$`ZY+&taW`Suf<^rH{UXO4Ux!P-Xiho3^*WqmLGWl=L9|C3ON zl6t*Q#iXibF%1oyh?eNLVy8ecT=MKW%~i&7oASj-Yz`oZEW0*86W2ApVa|#w=7hTO zH==bNY0lB<;U@$rrQ`qlk5o!9Y*AM@EmTm)^I%gtD+(GFRNN9P&iF0f9!8L=hzKQr zMAvorfZL;%%b$f_59a!^S<9xKny6@Nj`0uMUrqFeDB{+th-jH?QBg$7_VRIwXHzIH zr2BnDw|60)T@8p*d|mV_?1c4}%MePpNxE&O1ECIDwp%ye?L*^%d0Q>?2Lk5%nH~T9F)me%7mW8vN_G!Nof*%L6T;Gw3xzwoibNczbHZ~oK^_;>{OB$B6lA6z~P53iGYC3#swqrf_S8P z?59NGOn;cb1xpRW63NEGQm%L?0@v9n!@G|$ol-j8X<&X#__qRNkWkUV=8wy;?GJx%2cpgzMn7I@{5dtPM;We>?54 zj*zo}g^`Ek&kzVccKcQ41P*BO7%5Qg9b3~fJhR%_o=wf^$-?LI>i1V<7fEb6PdS2L z!-xexgrX+#ol6#;8<$V4;&IW|(JL08XjP_pfKgEO;;d%Octz%)gu%OgEDQ67t z;T6o!#&f!YFY)0e2_xM;g5XW(iq(lru~D>!(mt1;Q;Uo2a2VqG9{_~9RG{R28$BkU zMu^DCg6tkl&QIXKl3@w}WRdV;_XH4N5Cw^<3Jl+MQ|;jm+BCv4#$hk=ABxCYIF0f*{bm>ab`c#Z|J`Pn0q{@tp$-Q4LqX-l{}4Wy3^Up%+7jRq;Gw1p*KOUrQCt2nO+!zEE*Zw? zc67=@b&x(3Nr;RPB#%ggE=Z;fz^2BgF4V|{GjM_e#dFoGMJlChVz56QHBQN?-LJ`& z{Yq;w1g{qCh*1$Wx;cOkAxHc=ug#_RdZY}KO(&TtK8((F5)&1y&QF(SA*dBT!3*~a ze>0WNo6Yhov{<|!B93Y@H5`y1xiOCm7u4e_`Cb9px0}c}<;9=3N@tv?iJ6q+Sv>47Tl%>$GXR!V-QjSWr5$_e^OH+y9 z>SOR&D)kX}CH=;R+yA+QPBpAqr1s`%~n1Xq{ziiiWEz7#jWx=E3a(J1C zr+Ern28<=pC{!eX_AW7A=W%devsC z{6P`wb+!^~9U4jb=Y09dKjAwLO`cuOmKv-kN(ChJ(=}`U9yYPH+?}Qclp1-wC-&C= zk_hrKz#t!}b_B*JV$o1|JmdFuSeZ_aC(P&5byLoZ+w5LQ7cxQp6#ZW1*V~o+Uu~+9 z-BCkJThA_K1fWuzRcrmsiG1w5^D8v@ecYsEB%?obIK?6vxfQ?tEZ9oQc?zmodLhRA zuTVV_vI1@bK;S&=(-+y@c*z{7*Aubh%F8$WrD>#rFr(M!WGqABaQOI49*o%ns_yPU zKAvn}M~*sE2rE{vrSi8puIV<^jM!dr4h3(i}%%`COYI+V-LQQVSmYo+eHg^1(6ct?zg-tc2YM#JCb+)6L9W|qExN})Z!>* zL}&w8HGWu2{n-zTHC>QI`T_-o@o^LF0T$*{Nh22T*?NElUz&2nQ=c-PnU6E!Q}Lqj ze7Q+tR+PE--dB3$BbHe-#@1x5>!zLFq9KDp`csx~+r0x+Qbryp!gC)&Fdmea@L#(N zW?!nd`nc;ZdO9z?3z?K+(!5!Xdueu_ObZuPt=csLRc+^XvLg1Nbr669_ROeI1^o7+ zr?^s*86VN30EQ5QNkIfJ$>9$<+59ychtA0Xto;Z;5HFrEn^6#Gb6oRXf5i+Cfa$hh zj~_J2739Id9k&0|r3-d(%r)SFrH+*dga*e7erNLGG)I5QO*%u8%HVrR!DRW%r_nRM zGcU~xu?^ACYNtyFp{vdlr!Jqe*+waDPuAdR9USeUFqF<04>llLM=gRY|91C-^DK+O z9pYLTje=A!fr^J+|E_b>Gi3-qH^IwdA&)6vf<_;xsV^9#D$a9frwv!?q;WX7FY>{y zuS*dNH|lrB&o{@O-)cK$P@NE&ofjE_)p*~A)H9T*n7fBKQqto_jG3FH>2*g;{_T>6 z_R>d%_qWL7hD;pL(3;ZY+z0nan6LhR6f#av(%wH4mldu<*s}*TJ@j zCT>2#N=Gw($6-<4@4q=ZHLLbrd^hqoiL2D2 zm;{=wtC-V_&zau;)Drm^)}q_*vN^AzDpqILI-@q#XkW_tc&^xy&%reJn>;+m4rnQ% z4_EQ-Zi76$dzmE6XlN+~q1sNjx0}Yy_*)zb1&++0ZXm}|KdAl;{he}zl;GhPuh!{pj#-5Of^2smi;q-^9LcKH*rJUftitafr~)~1IDyC ziUwP+CkwluAuBOYDukcK$?f}XzvMre){@oxn0m;RgAle3NS#j2a|~cw4t6#C4V9&k z#DA2)z2t_95^HhQns$0jpo~Cd+^02ZM6M-GBSvh%NT42&@}wb$)Qx9S2l`m5^kp5LYfPZ=xEOx-(b|yw#r-iaqPvy4KzmhdEyG5!X zF}=ViasI}w#+n#k+>qj*nUG5QQN7mifkn9B%x~Ac<(iQwP+n4s{>@^0+0{V$hk5Y+ z_bC;Lo0I8p+(2@ec!xg}2;@O|MC3#QB`|9!#I^A^pEKz=$Kx<*V7Kz4EG@vLJn-8R zP6^puh_cJAA*@M){R)8!ydVHfk1SjKaniKXRmCXu`dHx;e8p%Je6PCgM!WOYeE_As zq*my2U>cj%`eXnr52$FSz(llEU3F#7F0*HqC^P#hGtwlfjnlKST|s&ALS#XMoWDlq z1qH(dfRbN8^vWj=J$fn9F+)~H)C1;>l1c?~E3s$*1pKPv#MH*y&4?lHxw`28DkoQ8 z@(K~4q+mwNb2b(8(5dy@5pkaIN2huGL^gJdV;@rP&N8g)R>-@>YVa zQlgiCrL5l0v=360gcbS-fyD+6NvD=~-ugO7z_Dj@nKXcBZ7d-t?A2Jv5wmXYYFK|8-1 zl{4&!sLtSpIV@7RVabC&+Rb_`s39WyV*=!90Ls|_Y#1S6F*Im0tfdc8Ia`FtA6Fz& zmKp7tt90%c=75q|o*K`@Oz zV?=C^HV7KP!}FCYPkB&HvjU|!gCZWi+a3mP#jQuyX}gd+*nB;I;V?U1 zz%K&PFj~9N0^jWK?AHWVE)W)(G9_lkXL_Ax?S{u)KFGdQ8IG+M>mO`-fER+G{fc-( z?zRRKt*be!L~p5@y%5?@Hh_{1df5w_X7r*o*m=J7HL2$D1P@8g)7( zKgO(fbPj;84bRZ8gzp~TV}K`#kD1uA-DU zxRRAlrlHH!R;Dg;Bm5H#9I$I>NH*Z-glx8?tPvT++@DeefzfB zbP=~K@&h3!zItpI9}M?@&26aEzTnN`7coI?``=iASCyeAA^@2P*WchDp;fO}EdxQZ zRnHq44*S|yy}f8IKi9mTN}K@**lIOW1^^f*WF)uB4rxk9%OwG=ch6~AqK;ub+p^0k zbNeSJ9|3{v5q7A~$|@P9Z$e^9K`KolqDr8fyTS|T&+0ks&x3I7B{!YUwK2dsHQABurg-Lz2+G@vN4jh(YoV>^?C8f{Q z{pE&KBQ_JP(!`CfdMJayj)Mc6smhD| zYo!Rv5AXuV^U2Ee^z8B;|6SvrvG+y_2qc5@HuF^7E<|AZsL-XP-xKNybftAhzZMWk zxEn?bLiZzi$u+tD1KZy*A-9f}eaaOQ?Vh8k1uC0+->rj*0H{4PbgLvbHi>jt06jAg6BscHb3gwU(PV3KC_0R#v>-E zENfvmur=6*y+PEugbXs`S0}Cvq&tN5c0@M{7OD%k^(Is7)XmM!XbSb zlwc&3V8Sf=?_|Ex`5%s-)=(CRO*OaUy@CkJy;Q}&pUH2a%Kn*JtL-)=o z_I>b%M~SDs`HYH zFQPV+hj+F0Qaa!OspZ+x-_@oRJ#Yw39lDQV5GKzI!~Ygp*csC{lN5M z2@(0^oZ(BA{Rv%00(QL?nrU+5We7>E+twHr58;_Zb7b2jzF1>bCG}zVebn!g9%L@2 zjtf&IU%>~jDSRXN_j+Gw^>%PF$C3=}VunPajGWHkf#o(A=H}_RonG)wtw;*c$Ktl+4{uN=^MMr{Q_}gunh4trFv~M7U^U zB24ear{8Kq96zlc+6@fWw#u=zpr)d6Mno4XnnL7i`|6ir52XgzA`MfS|pt5L}x@rZZXHwM2f-#1QX=IZy3XWO+_ zl^prT%)%ZBe7IKysI!%~)cF~$%s;Rhj93&*o195wc4q{Of^7$LhieP)BfBxYIm8P} zte9S8d&yn9I7EexVpkZ#n{~6HK+5RU0~io84;@C*84`2E<^Sn!-P+FLmiNB<%(cIE z&LS^FMxldguj)axV{P`(=F&G%t;LH$pbU-xUnXOwK(65L-%`yw$(N^Lt&}Csg3ZX= z*47x{(W3>@Pk45N*VDnpBKSE=wuzhX-CT-$L`S6y3Fj3fnNNEqNy5UyRI(`#T3xo1 zz9wCd0d1c5imRi}@HwX|iE>RA3$j=_z} z5vpnJ+;?_iVi%P87xhIlavAc<_p_oSeEy1lsH)CvS2SxeZ!33lQMPY;Rfn5-<+TI^ ze#i8MBf#|rkgMHFG48(4Scy_y(!|2Ht^lm(d}XNaCpnvs49}EV6OxDeLj&FL6J}qW zIBq{mMCK5}tF$csQoeN$wKq@^8Hf(WKZt0O;=S%qXLm!%%19$7!9mr3J^eve{T+iu z9iNza(|N@f`mRK~$7PYFyo9|!t0FEj7Fg5dbG@Ni&XN7i;^U%UHTyxl@PwAY$hX$LavZyN=2)7AvwRE zFzQDR%D?{W$Ynr1-NVtyL!nER#WYfp%tiMp31r{4doKTcF7iySm`UYO&v7?6U*M^* zkqrm(khn~^X0=x&xOdc>r;OY~2~@865PO92gfUP%p8^SMrv#&AA6^dZ{_F14LcWx@ z!|Ri?ZVz-apQpz@d&>Fmd7Jlj*#yU#^yZV;SrfT7@n{l2zZ?M?!{2ks6fWoEDx;M4 zcD@-?S*Qe1b!J>PNW%2b3Az(U)qvQ9gRjlVOU#64q%9wC&Sg z9x%lA$`tyX@ZEmY`1N7Y$#n6tRCeS|kYNM~mjNiRpU(=b}oLbXzl#+npdq3Wvil)t6nn^kHFX@_DaSn=%`-Jm99eyW^;yu4Ci- z56(nT&AWj{l=Bw?B^9WkWmLD*1ga^={S~cgl!>Y}*MMa@3zk0=jxq)Wxo|msLHu z-_!y6Ppd~GxOD6=_MUUqrnx7Zf9e<2bbXD6@sg&U>ob6U!VF=r_mE|^t)_z)KM7)~9A*-EFU%pWPc(n${c?Uq9*AZV{ro`1!Ww7`F^WTN^lk4eFTRO?|i^+Wz3V;?QlW+FkFiWb?rfYG+7f`&OcioUS zpUM@5&TKj#HpQB&IrS!O^%G3o@P4M8z=Ht<8dB$yC<;_KM04#pCwysz;8$mlxM{yEp7W*G>QB;R@ixcXM+sIwEu1 z>{*gn$LXva7?j#`v4`!KpAL!g{F}8G7Z=z2#rh`6hVXl1N6it};`m`Guum48Gb^)L z8}`e3Ea*l6Ufojtn`eoQC6Q&tITlfotj%i9FT7bM*e`z!nga2_Q^d@E6$v@%*dIRc zc!QYHCxG+2Ge z5`ngK!i8CE@w|w59|NGzvy_U4<3MK9YNbJ7jU+D{n{(f9?3sKSDM?Ah4MiGnM}*f)>s*uQG1f@Vaj>vz z-LAF|GCfhn2wrpO(ZY-_;8xY^O6djXFDtA6$|S_oztMDCRUYl+Q5a^pMF#eqMJxry z2MX`cw^iK@Be%>AMamUslji5gmf^ zTSumuyy{V=WSTX7SgTgwesYpLyzs?R5GoY-AdY-lu;Qtcsmf4D*V@PQbj;TnrBDsNoxR_u-P{?_h3 zBW+A%v2QG14PH@*ii-OCN0Ct5K@`;F zJ6o>PLPu{YoX(|Iw!kU${sTt*HmaXobVe6ZFos@aL_JMvIhEb&?v_Qa zLZhDY)2HpCfh^5xuMfuxc|Gfc#eJC`=7!BkiT0EnE8{;;0olUc^Sja9zsJ9mprx(D zaW3jI72986N`iHKhmhALS1IMTV{2>W_#QuVb91wt^p-B@dYU~>XSPwawzUrQMK?JJ zt7)j1kOV%5wiJVD3Y67KEU{{SLWCX3HQC*yk|7JeyAFnICkKbK1Bq95O^ zjGd0$2Hj>Ec&8rq^+)ytBa^Du!laf*9i{`McMo*u@B47|9-f{Eh{PjEs(fyq zPV<(c2I7*El44_l0y*+L?=LqN78a=Er3-R$#N`u3n0DK4+~jA(XUkLkA3;EOPuq{Y z9=FL{w)qQm;W(LF*dc>5AyMJ&bY+P*lLrJ2V6bLfB*heslTYah^u(i5;6gKCNw96s!}0rdkKWb0PIKbU%9I|0D^^6WI^NpXGbUS z_1-(gq7oKE)u0i1f2u4mW)>yQw05)iOmNyl4G;wi>{+C$ioNnc;1%L+j{zws$mEACJJRM)3l&ilkh^9b z_hcU>?sfRt-RuhdBil#NI7uBhVpbv_L&t6Pc^?7Yv(dZHY1agaQ!$^-TJxM8uk*3; zz`**{0|cwZ3;}-So%c?>?aS6W!NK@B78ck@^IADA9u|2noJBeOFIxL7toYhm6ZzG- z=jdxi+jpzyJ2W&B9=E%O8|e2}5%tA)?1cEXI%A8UKSx6%FLatDY*po5(K=SlpQ!<> zq#p%<@}CYT+?kfAGJo~I2K3M!c11t|rUA)tt#BAR7f0yZ2hA@L+<1C=dOHfPv8wH5 zn!LsZRzC2897PRd7to>o=i8aUS8osO9r>CZCseKsxn^GbB$aVG*jC8PL2=99Iba_Ujd}N*w zrK+N_h*zb$(yQsoC<48>L3~(53%$abY3QWPi2hC2mN&V|Tx;DbPdVcP&E*j%;sSOy ztdAdAjLrf|B?^5otTtkd@pAm=YzsyH$4!t(jM)B}Mh80_fpM7(Cbd%t$vqBEneGV+ z&H1I_Sb-A}K`oc^(V>p@xz6=?i?8n6v*{OWcUO4}`>psQmH=U{pnK0;oxwnrL>KPz zgdFXm4I!=Y25I2r-7p~VSNt)1%-n^f9Qe5xQ=I#xRWJ{FlMU}#y*~0y`mhk>B%gp14aa0W}~M4<#8QVjx}O6YKn|% zkM#{QAMG3s@Le*Q&P&tC5g_)dHqLfQ+UAt@UG8k{{$ayAx+Br!o169a8R8nt4oR&! z!FIFaJ<rS|KG5eCE#rJkPZ#@P+VK3ikv_Gkb1GHwbEh~I5hr@2G* z`D9e|s&Oh9i#htI)&JioWx z;N9}9`z%@NY7#m_`xhiyF-EKm@;wBk1K%CiPPtG(Gh^E7+%x9(M#T7cB6#}tS;O*i z7hmC-`{q8oKF(H3K34r`;WeS~-MfAma(sT}7E_X_uvjgdHDT7c*&l0bZ5=f&pofZK z{`)lf9q|)=Thh}h`h6$G9onC`NkHd(+~%W$ZqE&mPy*u4hC7LbkucR~kZtM$`)Q76 zgs;P{bY=)oW2(J)cd<6~w0z10V%vu)vfwWWwL((1YSRH7wd}O=Uj$0>RJc4!gxq|t z13b^I#K}7}B-j#WUwNIQb~vI~_%$OHn>|i|Z_D5gdcCh5E^B)aGUyH7zdm5gY+$#- zxtqCX&lLv(vX8aLtsegF@&J1Fy<>gJ);UUr&8{oWA{8boX%_4_?C8aGzKohPvx*of zNDjz&=T#RMS63J3Rzv=ZizTgoqMLb!!$MYbox5-t{v>#r%|ui*SSbhcNs4vy476Ve zLxsTg7F1?ZI}J7>;KLL^UU!Pdf@78@L93`?5dl+KgyHbWaL?u?hb`;nHdiZ z1>q`#yKal#ShD^x-GYcGk2$ZIq{|cb@B7=cXlM|nW2P3NetMghD=1wEMUV+` zctxKy0B+u{oD&dcX4^;CP;LFwSFQC140g{^JTUVA@z=GLs}+#3N|E00ZO4qtBhWP} zpqj%B6Sxd8(@IEcP`ugWsrVN%n;Fi{%09Gh#A@mP+T%oF-3xP52f2A`#(^EQAP#3u2w*RQz9hKSGf`^x*Ltu`nJka-48n-wx+W z-L;+qCU0F|rQE$O?y5soUR>an=$t*g<`$Qna7tORUq^Zh>KLM8PC zD*=LHkZG`uG=*<}nz=!fuY{J8=yKQ7c#Ay?0y%*UECDNf`kyVYw>}h7pB}3Rjh!8? z5Dt5hn3@--w~FWvu6Kx_&cmWqIC+^X2GOC1UzI_{?^GgYTB6Qoz`vf1zaaaR1&;5?!+zSboC)9+YGG4sg zVpkgAZ*qEC{9<)KU-aW^k(uw~&V?S5wx+HD$Ayat?>HhWQ&d1y^nY^9^z7T&TJ`foB<*{YgHm-vHhoRq2;|%}n^6M5n!S1L9uCbfJIRS{- zsY^PvPKDtz@x8Nk?Z5KAr>QOQ-x}Bmhan^mOi?L&FKQYstS+e)8}5YERp6 zYClSCZA|K~FD9;Wr}B-Hlap`wzVrH#@OtbX9B}{1m4Hl`McUHbXx$$|f0}3(O6o0T zlA8+O6JwHAY!@#y?sAg2VP0rZNJzd}!Ml#bqq^$;;s>J*jVDwA zaX#+p;J^IOr!xw9V$ctlJ!cS{{Gn%QU#HSl}KsegWvrgN9P+U&Qy#%sg-eeLJ!DJo9V3i5wc72Yi|Y zard6&6FV7n18^Z5dTEP9MGnjLvQ|3_VkC1Ykv6XLY~ZDBb}GJ~+_K>qQ64QK z;*BX2wdW+;)@n52OoyJA`*q=ZWH!9kk%;la%4d7UTVb-iR#6QsfIsY~{os?Dk8v)x zYsrZ&pTB>fZ%fPh^{RYj*a^V)tF@iJzP>(o9Kj}>c!-d}1qwiem*TH#0JgzUzinM3 zijTrmbr$5b^|y1vc97Kd_K*m2 zfnR(!n$6oMEUCogSF)C{4i4UDr_T}I~%gah~B1}Fl* zT)mASnFqo>!W+7+PP>e1@g9OcIo(=JM&}VO)+?M~R%bL6Y;Ke_VPQd;j`@#eZa#is zw{^Xi)m=4l?NwT6zku*0wT`Sd2SHZ9TOPK~0<(i)7gb6~`ts$giiFjj*G2^OvvWxq zcy$XCcX@qnx}a&HBoD5&)y;NkkFbd!JJh*IrL0bC&q*^4aXO{dnn&T)S;h6Tr5Kan z++Aj9Ica4@Zuw7WQ_n?OL)p(K&m^Pw3PW?_6&KeAyYFyAPirp)sV&;^NlB2`2fKTF zR|I^J%J;+Yvau_^hQe)YN7S&bcDcX_*!w@7+VX$XXrCGwfycUeXWwXBb{R`$L)xE! zN5{oYYIWEH6E3(CEnam!U$mr4`uM!fTd!c1Y4!~aG}xR?a->9>c`erfh99bTC{_Xm{1N7M z9P5c_@i>_g1O}4CMSnysDyjou=5E8h+8ly{k&c#g2FuhtR)#+XW<@br``Q}EyjjiX z7SfTgurLS2)KOa^!BPi^wI*R)O+CCR9EG#&ivg71tI${du}*NVl3n3>J%w-6Q&NiL zsb?+05ZJ8K53Y2g2BhCYc1s%zUN~i}CRyq6bwM=MP`1^A%m?0Y-P; zk1G*XTUZYr@GJG^`|e6gO-DTx?+wNgZ11RF3@V0)_M*x;<}}r6BdI>dCT(;epk!}7 zGJ^|yopb{)?qsVmhIxFq6LF=~Ox(5ZiW|HRoscK*XX)OQAyiH?t@G)Kkyf2$-_6V7 zmJt(9PKGcrm_{4cRv<7wes}T`FAi^2@eoc7?R{cBgFA^XnfBqSwZ-L?HcpqzF9-gA zV*#oNhl_smx?k_!Mj&4WK!u|Dy6+65nEd!PB4dBt6=>W3NC{|f_38?Mje@DViU@sR z%ffSfPac5M)49^<*3vWnnWg_i3!9slr&gv3OGZxK#T0Bp(CdR-k3>t&vHOyk7fgYo zIT{lao67RaaenB)W1~|*@{tIJjP{jw^Gi=;bo%>rEIT41hQiJ`v@lmD{~wD)Qk2jd z)gnpI^?A#MYOTtp$;-|J-`j4STyxCf{e~}^Z3Hp*d~hY8cwt@r#WHNEVl0`RUxFvt zW#`FxzV!naH_Oug@-hbQ{V?NM1_X@s6SljY%*zzTvvN(`eEm9v=H$6$7<{gMB)R~@ zXm?d{;;Y@rP*JMto>yX|(0`(WN9^2lfcz-xxXXET76ezjki% zWFEM-v*a4t5vqGwRK{+v9{~@-AJt5~`>h`8T^qdEsnT8KlMNzV=_FC&*YghH);D#I7s@q!*o}`(Ry}iAHCix6>j#mc> zLR@1t2f`Y)rKGRfe-_0(+VtO>cnefnr=~XUD|T}#n}KSr7C0eKo+w6D5vQ$7wH5c_ z_aCnpKiYh+2wQXdoFiYLNG~Q|f&a%E{R)*q|3XqjwCz-G)1$|nQRs3ug(=f~Py$xC z(SD??loIp0xZtBz_U_lsnhI|&$>Sm(IpDMBDrG;Qj*Ewf$Kh_!S0D2^GwPgJOe_MH z|84SemYbIz_?XKjB#XRb-r}<2t#EeRp3^~1#W)C%D@5K4o|rP3N@UTIapOX2KVE8T zySYF!Q9xw1JSo5E+KY){o`0E3E+Y$0u|6u|z;BKDqtjx3pH#PS5uW~WsiVz_t}C=4 z3jIyRe((zFH#pGN)>aRZ*Uo-+7@nYKQlr?8hZatIds9=Z#Y)3%bK*TsGwaFJxECj_ z00wATwSGQfu4IY&P?@Jenk1$fThvJ>*%)V|kwo=zsD5|(Tm|YtZ-y|qd|%#`|`JegbBOF8w|FZa8d3E+~%a#0g&H{pHz)11P6wMA_j{WC?M ztnn()I=m3F8PA{s4^M{v;$)%4>3VM>?+DqhS-(iJ5CP>C3AgR19C;&SbAS`(uhIyc^7o6k{Hv!Nti^JMd`t31m4mniCc@x48t*o%F2Yw)hbO+ z?xOM5V%}WejJjdK);j5U8JMo~NZ0O$Lt)g`_m17~bkdEHd%0%38k}~xm^56syc))8 zOj8T((XT0E`ym3?y$GrE$up%jt(vXl4JAR(tBu*=J*wdqd zzCK{G6XD?@y``e+;$Xi4q#+Rdazzdm@GXq|?A!jGZ#O83+wL*(cqD3iW-;L|57ESD_^XB~M~iR3WfF?9?>RU$v`WTRn0DX12BB(qf$<3c^3+ER>T5PDw(1UZz1tC!)-beP z^mu1-Fw%0;gQ4|hcO!PNY{~Hm`3Shf7rQ!SW554V52)QJi+zJG;3Nr#dsnac&9>lo zbF=1U06k!&e$>`~Ul%1C+oG~;QbZ6;)-ZV1ulWm|tBifE6CvJngq(x~Q=DjHg+M9C&ZIY3JJiWYj zT6tVfAep4H6$ZVLJ|B_DNQn_vA8yY_`WRxG^`??-S_<6FeQumC`o>|&kKnqvR19{? zm|=?qxgD)V2LG>LZJQ#{g8BI#{VTc*W)oXqW--FmDtD>CbFF$J7t)mYod$hcn z*|?px_8Gw%ZKG<;ZUxJJJd45pmdN?iA$wqt;9<5m2n0}o1-5+hsYY0N0^4_XBz%aw zSNz8!{c4%ix_X(VAXj|5=2W<`W5MoAVk3mKHxI(r)6p3_*GCI^rb~^?>f1^dDwv*| z*lgS4v9ux64zaY-_M9aT4-ex8Jps2?U!+ejP^T-)OW;NUu;H4ImVZ@J5r zvMoj^_SciMe%+$zGI+{anR}$W@=9*^JF(=;%~vL-J7d6*GNpQY*%IZzB6;T=kg2!I zTsUDCC6xysd4;gj;OX=*a%q9BgHRj=x#h#Ee*}3w4W|MBp>$*`w_DFP$6$(+T_U=D zL-6U0%=N|v97z1-DEwEp`q96#7N#{aiN9`ooC$0b=VYxw=i{Cw40v5K@#iNe<)=jo zN);7fj7Y?ll;UrujMy~4cl2mGp}o!=WB|3Gj`C?lr=`}}e{SA2Jxr^n9<}qmD{6VY3d1Nw51&Z&Rk22kN%iz5S#~(w#k!K1i7YaXR)zRBZkS_B z^&Waw>%|*1Z1)Md+}3PQ9u02q!GRA&bJcQD&dR@thUOzO`rjjATO^NuY;0<9zbemz zCv9@^QMtTI>9E3{Pm-OB&QzqII6sS%*cu5J9+h?MKMp4v&HM;gNMB-;ta;EBO~!d} z>gwvw`#Oq=!h*>kXHvMLFj0|J`g-A4@07lLDag#ER;enR_OnSLH(T#j55R}lU@G1SLm7IMfaP=rSV1Ffp$YlD z1emj#i?wcszi*w~73Wi4B>N^0rMOp3UoW?Mm8R@yEaicGP|1km&dqcDR0^Ui8MF!% z3kO|IZI5~|(i50cBUTK5Y`^Lq97N-E$ogA>T+MlFrh?pC5UlS+@ltsUT~S$i`U81N zgm2Y{7Dy?$P4~AS7F`GJm z#(I~7;p#1Jkfp|s!dYp?O;}iHK2_uppzcg^^lZ;jy4-x?;p#+P3g{UdQSG|zOMDhf|yZPJopV9>A6k*w} zJ?PvY^o06yrSB|G^MKlfGwKCDu&D&B$0$)K0no5NEpqa-4+x@WjHS}kFBweiJubd4Vh(qZuJXLj(()pY#jKwFNWuNv&~0pT{{ww9;^)0>H-9~nxf<Y^iyA~{$ml7R!i_`UsjL8|`qEF)r*I7xMpQ>vy93(C6F#I!6VD~R_Gyq$ zrp;v5x%`K)pr{wwpMv3H;O=NPNs&d6-?F|#B#raM_5Do2E&omA9vQxOg@_w!0s4eu zgndM8YIMPdi=;nPwQSl<4d!GeCu~Nc;pB{ zH;KQo6;zrsl30j7sD5MID})D0p`j9(ga!He=M=}H%>S}^DUTntNB?uc3oMN0%8+m^ z^qE@H_omYtQI<-%o*vX#y@~uk^yz*huxYTzSxm;4ifyhANJvZUd*{Fx>^)?^M@u95 z(9HS4fkf;*ccjH8*T_w8I}k5q7UV|{{w~NbhfbB03)u+GGBTnnq%P;JWS!R<;R*8b ze><$Uc^T)N^Vfr2Gg${j@fq&-g=^1DyQ?aiDcfsTghqxZn7uCMeWHMe}B?}G<}*~CGz;V681hlRCyOvv>W0(S2? zwY#Xl&JP{w`OWtjCo@Wh?D)xjHvNehj`H==g-}!LDcXPDL;kVDRAuz+C( ziRUq{x1~`qpg!9phV7LJdT=~}sc?2+4rNcXO>L)i&}=@(g4XLL0WJ}IxBf7lhZ>tV zO{KVyC!3b1d_+(-oT4?Vugoac+)?qcfWj!&RkkH6!>e%AZw>Dec?E;IsOW0W> zlp&1#G48Q(j~Fbp`^4gPaqf~*iPZ>`B!3*><#<_HZi?WE!7ux8V~DDA4e38pt8);W zB~UmcQi84fo;#6E&Ifh1 z?19XBSTBcPpv8wfDzi{8dPCMD%hKH?FSQ5)1uL;C$4(c(&O2&TE?Sowy*9AfRFk9m zv#ByCEFqhE=M|1B>*|rUFC18|4_8rMw`2iW^Jnw55l?GO=uu4RQX-zb=*X7 z)Rf`gJ-2e($_4+s4%(!5yJX{bF~Ucdw;j!PG$5Ks6)=M9;`T^P))n%}__{*$(l*^4 z;iix8ok!}_M40uY)sFQITA&bro6ROzW7$4Omtyc&%j)Is0IAdmzru z>~*U}?;*+4$=@eT7T^Y8EWu%l{=>rdbh%hHx&V8SZ83wLZwUwY&AY``X1mEOS`2Cw zlHXpOi3zqm#1xs#k%~Z9aJ|@Q%EAj5@3%F4U8;DS-co4* z_j_V1M|*g>*Ol6Rx_pM~E>uvf~6#4;wD?HlyC>O@%hqi$8C`8YAw_70``5pt}b{+bq%8v9vB~ zfh+dlaQ}o&QN4w^0)y7QMYBqGdrW+Fq#XC|!0=rwj zBl5uoqTy1;`?_D2@93i;5BMsXb+jMe#sozI1$(@?t<%DVaZPESPD`?%b??~RCuDN@!bEL4%KHJA?BbTiQ_g*c&tu@| z#uv6Ic<>0Z-%3J=w*1+=&ZTp8t<$m*wQN5XmG0?2SqW0;Fw9>ppwhVsgKFP&Tx%x; zV$6)Q6Q&B%7}izoGF;7Wt1 zo9D;3pK|%U<^jGNkGLyKOD#>hHp@#jrf{4$%ahjV_Lch|JozZ~h{SRO7Nx-gUS$gc zwS3ko*kNgM?6d!8c8tz3>*=8Flvp?c;dz0NU3P*Jo;gQLXfx;vuXF?WZzpA2=LGL2(w`2M%6Jd9+HUOw#s+nslC$v5t8V6J zpr9pWkgh!8;q|2Np&itP&*IuE9Sm!pIpX#J8WZ?z+er#1%))ogCeOA0v98_rnE$xGB;JIybcLCNN68}(*n z*yP~LKclcC0*z5x>mGs2Nd-*)o7E+k6|d_h7x2ldzpTI|F2n7cvth{Homv_1{ScFe z^^L=&ECDQCyNYd!zjrJ7UGTRmq22E#ADK(#lg!gHb$$DWG=;>?h=`f}n5aP@g;7Le zr*nkWvLJgNYd$4tx-v4qj6s<&!D-ATL6|20O*^VgZ5?_ zl-+TBh?FjP)M)iA(_)nBwcoymz^=P|SMH|k;3;Im=R;^Z7nJ+~!lP}6 z;j1n_ zRjXDN0=@GhC@34hD+q-x*-4nO?8kue&)BguI~=^5{QMA(1X|$YfP_r-RrYfmi$k_v z+mr=|yE{nWv+LcHzWV$^;Cj&^FICj>QBoGNH#ovgb>hCaH)ji1e%ck0_;^>=ws|o@ z0KR#O^4^r4X?1|EOxeKxq%(P)mOY8d-kwfzddU*=y*9CF%UVRIDH-2ug!RMs+L6%p z1(jsVOuy$ycn|De9fn^RxWDM?__%F;WTaGBNe^Z(ceDbI0R+ zSoOi|VcIvH1T9#0d5>p22W|`qt@FpQr)%wtb%IeHhogI# zjoGO`w2cvV&jPt+IBwa|Nf2!0{{0#?EHoc4i*r#prr)Llk3&u^n`GbsV_3VxzXu+~ z`u0OPu4I^3#Y?dDW|8C%P0QUHq0eQuUxr_CAj*hl{!Q(S?RU`ex&p%nTL9b`8q{_U zBDguM8WlJ{937Vxd#A!%>b8(}tpq+f5youE`6l$@aR+wS&t7Nw`NiX!hJ-)U_p;iO zAHN8=M=-GCOHHzCDD!SRj?8PUh84MeV;?`EsjvxOfn(K`@N1N{4k4=(uest)|&mx~=}{>OHh6+=4plGj-4i6ZuL8+CA0vjDR& zjSedO-Ctwsd3)u1=vyZGvItqPp68NBzJ>Fm%$zrVK_^FB{Vz3}JSf5sS~Q~SU0 zV|RL9IO~aQIhpRgourT_gA{RxCE!ecJ9v)|73l`I?2dZ7m>L-;Ms$(8k6O_+*Qqd( z#i5+1qP+bT;eTJM<3!}W&?(Jzyu_>S))VPtoXK!S>J6pI!ZBe*WT-i696erP#}k0e zFg2;PuC=9pUzP$r{lu)8_W!tF`z6nlwfX2e{b3@Ng<0EIy*KTtO48d)HPD#Y#c+=M zA>+Du(SFYNz$H>WQ+(&=hc;M-g$++LghA+q70C}9LC3i`^y7jPt7SGLZ`g4xjvjno zcUs`jG{W$%=Ov^nhkVT|d&gPnb480BaY_^I(_>O$kyz{X6fI9%bR|}R72x5$;YS7L z`A{uHw`r?)(0zM8))i$Lt{y4Avukp-V%IHbRL9q9wUi|_qdS9;*oShZ1-w}MPAdx^ z5%fb3ynTacz2gyFgUS0Qi_8e<*?Fm)DKjw;?=$$*tv0WbK^H5Jv-4mGN3}i5-=Hkx zjnXUsg|kFow>eEtGM2d)io#KOgl7We^**}CZkgLaFxtv5eiftxp<+Md>8 zC~yVXz$Ffc3u0q#JDtlt+m#NZPCKM?Eac8M)Yc?Fu1Wn4LSigQ7VBVs)Y zyUMRy=PZ}976G==Y`f>Yf2<8mwhj=@`?PpfZzS{)hfg7WnRL%g_qhp1fNroN01dKi zIcwgf#`6R%;z(sN z`Qy!d{@rt(8~vwS7#qFshXGjYZjfBqKWp+`#NTb%cy;|otv?^U);wMFK#5;)rA&cn zU;Bc~87(ILq~0%~kl~JiZ6@VG?W&+CVfxBtO}*RT%E!ntL}GqxeQ!@>U{ihEwt@fN zC+q(6rdcU`+--`)y<)E~;8|mVK8|=-& z$Ee*c?EuAOu`P))hcw6Nt)e4%@)7(q0);*Cn|#abR~Q5x_#ceEbe&o*CHViW~ z@$=+Es9ud8ah zM;9y2k<2Bm;h5f2vfC&O8@Tm4^Jm?OQ~xO8xdY4Bo_xdE&{7n2Uc^ zR1e)H2viEBful;1Q&rW;EX1Su5c7c_@e=Kd8+2;e($IN^c`lV&R?AeRC3Ns2evu<< zc?`>`R;H)An|-XVP4AjbYm%q zMEZq;L<7TP7SKyVV6&pO1aIT35jyx=Bfoi3?9sBc-1XB?D{`KSiI1yn+QI^{3yV;5 zu79d`yfC?Vs}`k5rIN2|r`#^oxj_KB7&>7Y02J^NJvE%^RBXJ)K^1(a8_KAFTT@d$YDcrJyw}VuU@2 zSoD%6_Ql@;f2@5MO)}KtE3tQ?4wM{!6bjuNB$rU>){3l(Xy!sFr9I-RwKH1M8lCx& zg|?JTrt$g1QQHf>vje)?c@~(>)8s|FR1&F$U!}NKn0FMDW}(;>m2#DvRFK~!X~L2~ zKBI5Eaw5uVkGAv-84Fv^c3Ng-%tn$eHV&!-l)c&!gE94#FX$*gHlIFU60?5d(ejKm z26N%q;Ii7L0e7~!z#=vp35QIMKYX})WOG69CM^w!cI;IL|G`8MFL<-(Xu2dW?sdi$nEPPT@l zcXabx-_G2CEZ$=Z7D1l+P&JpCWD%fBuxbUUC~_nu8D(dgI4iQcAeyI6VhcUoqX_UfsC z8v?MK#*f7cihQuSm^vzk?<6KLbtfr5AHfUkrpc^w3OKcN4)pQ^S zDQVIJzD7(1MlRP!57OMSK4d$+oT74@{wv9ELOW%$`+sRm^$=5Y^p;BEeu=DD{y_(A7RC*4puR9(0<~ZfC9pFC6SK-1bCk;*Zz+7`PAq5y4u=luT@Os z_yyIqqs9$bxO%=B;_*lH2s6|D;u*GsGt8u7t{GG2feucNXP!idtA95`aOFvJ;2;3Z z>V9~0%Q67>{FmqQ4`B7oTE2#AVERkr$d?(;46AXnO?le1N&>4@im`}#ZAA08 zxzs)04OMLJ{g-611o_!^0)N7UdTgacjkjM*m^T^I3U_@e4yn?btZiU;`;r=Z76`7#z$%*t#s*7Qpy~^LX|3Q*2fIZLsdF1xhYk(kYseSt> zf~S76zlj(UwK2`44zY`*CKNJG5wg-R^c(XjWY@C-zIe*Voo!#^`nb&wstmkUD$zz5 z)5?-s%B;{WQsa}6xS}!fF}-*hE=NJ&y~(@DpUXs2PHH&ur7$)ftGfJUR^g|_f15RS z%Bl2*MCFh0Ch}{+n^POE&CTZX3nAN93v0Hy0{Dz<0qT&Me*} zAakBlybeOsr=dp7$FgPvL`QM>NT_%WSXGR&q~OCZa+Wy@lc54*Bq&8Qi|*qq&?n_R zsMI40o$T)!%cWneoH|w-%1r^&*z^rW)GZkqe~tr&jh9~BvM~>dPGqrO42GKNklQ_K%9xtH3pu_j<=Ip%lxzxbB-&`Q zl5wyQFIrpeQ_-?9Qc0nE_sYR8{Xr?;p3mm6eED02;nAq=xT>yn7LdlUMXs?ad(Ycsj_3`;Kj&I2d!W;;8Yg zdr$tEgXI4z|J1W*kE{MK3%%Xg{-c=4anJNjbS;E+uMJQM{g=n0t2eV5C+C%x0c>Xs zaI9ghvBc?49LR(j(*`TO!?u>M`?hIJG}60#sOp$`K1I`S9a+BF5dZ*jBETvq4RJ6y z=^8TU;YMo^C-BkB{C>O9bDQXMwh>iw3DK|d^^`~^17l6Uk^i(jJ&zcjIv zM{d+-wmD%%^M5L*>|`_=@WG;Iq`$%ro+~DfNoGw`qAP~L-@cUMfVQm_9hFvUA3`t% zwu*r-NYmxt9nW8J8a$2r4T$h>44V31u8nb(^-YjeG)8rjvcTKltPIx5 z;vf53oJ7ZeKp3!Baw9nnJ3<}#kiGERfwq|*Gq7qjft~&g+3X!-@JpczbJ-4RXuTTC zqT>eouz>_7&`nQ*kqyVO?UAqaJ8fj<{&8t4mJCT+GUkMO>a_Y+%;%%Lb7qUz_J z4bA41fL*B01G!cO^z0uaPKZfE{#;)6AH52N_1CQeSno2ys5Pa2B!mejDn54C32>qkHW%-KXPy2(MU?Gl7mMSN@t zd$=Dg@n5>yE!~?R;_mKjTF?$t-tO!MpD<=m%Zu46bgC=?MF#CZZqTWM>)1})Nq5oC zpg+B?m6er?-OO)$V%eURjIe_3z>(MIMScikfO|Dbr0nKZ2P;peYYJ~Gyq29l_~~@} zOdl2>OI3Vhy7)mWB>&i-KfeOqf&$@{Ru4XefspRsPk7Iu zyAIgzKnks6g>b-0T6j0GuZ0xwog&YLrp@qZ2O%_6>x!?s4|6G>*fP}r+Xf$D+Ji^ewMS^ zo0>fNW?f(Yh;NHkVG=b@>3q4)8t3|aBH!yo&{6bi*(==Gq(`heP6n~~xSuF`?nd)V zUK|-S_HFBSe_qzNss}NjBt88S0~C4U@b4n;{S;p-40k;sfH)JqM<%1l8ZSGnZU*l;GC*ObO8Se^Wx*mNdWmzbIkE`ZF324Pm8aS*bY^ z#I*JrXF4n`Nfm&7+OEnC8A&gJtuzZC~=a{f4 z9gUP=X%L~TnReuU&%G=XI^{Td^Cq`(6^bEuNl^=j6CasuED-{iX$mTXW z3PdBE3z|%eII?b$*HlTt$BAL5$s>^a+0{&u!TmLL z?cOWT8tJ;Yia`V*O=%#U&z}bjdmge5BtmjQfS9XR&T=YECzI`rLx74U;QUHxcv$#M z(__H{qqNPnwUL_V?@bI))LbwG=y4tdrQ_&mbq3qNpZDOd(&T@pnWyBWl!ellhQxH# zGo+0xB5cwGq2$!Y9(1{IvGHWA!@jQpdUfW5s!bjrHi|m5i1kOm2i2HtG%oPB9t#FN zNHOQW7%KFaM)9E-iguNx8Tgv)R#O$2rVkxfxC z!CcdMMfMT5dPwQHrpaLcE61m)%?Zovnp2Vd4V&o9bmpg|nah_qceaQ1tMPxQ6H0znM4(OjPBf};g zI~fVnrp0+n7OAOYtUR$z0u`mj&|G$sc?OO>;oWzE{WlPjRjmNQ!KSgRM-`b{GHU_i z?<-47m*n1=rQU-4h9U=NQZ&H&=Ww(sf&=*sQ$Gv2`q7rtR=bN4oLvr$3F~$oTzot^ z2HIvjBQaVQTf7P@qi5DZsQwr0kQJD^93X!N(u?Ky+1fA54%aa+V_JonqncY@wkr0N zUfheJihuoP{o1cGf+j5}I*)lMH|tLwLDA^@FXmIapE#`4W2pVj@<1CZ-<~EBeglS8J4ZLq~D-0XCS3p}SE+B}H$= zg0|+SPE-d~7MARnR#0397$;LGer+U};6V}n0LBr^z#;Hn&Fb0&U8}b71vbrTPm7i zDeH;NcFS7rwFDC}=E=AHjl4$xa_!+~ia^nFeocxby%C$i@QtJhO@H*sbjTZlaIhRM zOHdCHBI5XM2eqAh^{5^tsydh+!CO7y4EF`dM|vA63m^1`5um0iQ6@yzaR9QFNNa#- z{98tP!Kihct&fpq+v)?{_Oo#Z0^!|Bc=bk-{zIiq6Ej#pqA!c^)llXkrc%NRCM}U_ ziI6^^a9YhId0tzJQTP9eqJnHDr8&S)E5AkpqCdfYntS7dR{@D%&hw9yGXjug>HZbX z^|&#h?gzQ#US${u-h9R#is!vzQ2Q?_sp0>dlt7$xG;F+MXtSfYu5)`?D$n*JY75b& z!N5S5idF@a5)mLxij@>+&>Prynw};ZQlJOpmBskMq&cJ7BAoONd{&KCHdV%rEGK#wFm-c4pu~;W@fLx3n z#cu@czkh#q_nv!c=j-9faYcF0ht9oMwVVYm;9DfX_l*$dTD;;T_khxvO+1I(HIk;J z89$o-juR6}9R3)$oQ{a5+U-mcQMGYbe5S|O}3IlD}U=xU-&*NKgn zp>0!RO4C|8%kD;SCGKcLM!mz9D%dfTl_We;9J0BW!w7PSXeIUh3pI>`owP!0_Dkr2 zJt=j|TLB|Kn%FB>8^ZFG4L#eP-e&GS81K=4#sBI7y=jdo)v(l{pJA3M}@Qk#tnK-eR;Ht5ee(|{%w2BT}duwMFY>* zFFXdOZZ@pdUK43TP64CSKKqmkh{|_WUOZb%V7k`EfKlUY#I#nL84YeuZ2g1-Bt0=RAGZEONcX+gPP^GbM%@@qYofD)c$Yzv7B zSxd@gbL{i2c$}QT?Oe`T)MZ%BcwMzec+_1nf6|k~uF>!GPJ~*c?faTv{b=YT#e~TI4 z*!yHS;YNgKEx&$^dV$war=|!cB}jRW7e(0bbmFR@{&UF@SAxv{6S-$po{<}Md<7do zZZn$yd*q&v2Ta4Q#x|>?IJNv5YO&}owQfFN+ha4H3P6H~b-%o{uY=XFWi2AI(tkh> zaC%$02SE3vaI^zcrTFcBp-%65<6ncYyhCnS@~K~nq^$~oNbP~gvXA?~({-q-8X5~) ziyZljJ+Z}n&_Uj@Ct(AkU%UrSL;q6bNTn+O=yCmL$khd#hq7^MYyfA~PJ8AjhXz7V z5E`yD&%%HH2JJU5FW4{VjtE|rhaVB7fZG>=Wk7T*q*DN$tuXk8R9sSGbVnrg8W_1w zdGP5T^MCVv2sGoK-5j3~r0{>oum2~?cAiA=o;u8LNYDlV6aWJGk%iy&tYOY+&pVGo zmjob#<9}WP2FAZL6fE#}zXtgoPn-SRUwJ(46jbaEFKKEZ34!R(DT+NV@UqE2YR_l3 zAs)qW$#B)sI$yey#ElIUG1K&g7D0^G?0fN~~?F z&I3WT5A_uG_yEc2@mma1~J)UbtNdezlA_yzXSu$M2k7B2>Ot{#9GuT*za?UmXC+_CBC^ zC!e#iX&VgY`a!q;A3dJ)tLnuxG0~6h*Sv`zlbxtGLmFA>u&0yBlx$4(8~*fqz~#M6 zf^VVh>ziXTrO2ZePLLY}28t+`8`NuHXc;YCZk4Fuj}_MSmmk(A9nn+%P*0!=F4*p` zZjGP_{jH2TTF4rFn@00%AUY5*Ug}=1Z;nR0@r!F974Jzob*vy)HVI}9$0$3ZX38=G zO&=r!RQ*8Km)dfjTq<0rRkj$<%%fv9!DM8cBSFU{ENgDzW84 zT(u9l6}nH`S}2>UPf|&}an(w1lOq@VL(ClV-roIr4b+>v7d4NqPNN(uzxg7`h{N9j zu3nitrGZ?pjGeMA%t5(7VQRoF@90MTV}=rxR0kjb_>cbvcWHh@##4%DG=_Pjbuc=C z@m>%3C1B1AerHW~@^4XE-;zz_LCY?H*RgVW&Rfz2`#6EZa@O&VPhrJ%lbRR0sB#^H zP=QT?0*)2Vpa8DJ!uFsMXg)^l3vU&nV&RWO{7%b$Q{q>tt3Ni z!4VQ96w5=LUF^qKGc-qhjbP;fbfjV08o9Oud>F}Nsech&t(N!cv^U~AM*2);Dc9S9 zNGb2IlJ7@F>$@1kWMh|jb0-8Rwd0G$RAZef0nO|lEw2SVBEgelcc=~W*H^TFo6__b zO8%Z2Wo5hE0cX=k`ubNQf93ZWx8gSB#H2xvu3B zUtbwbEJ%_6CTB3_C))RTi`vpHoIN{2@bS z8Q=@kEJEci3aDoJht1SM0IxH9htZ#1 zbdb;T|1T*@{^qTkN?!3G*zlk5dF?;LWd6^n84f_t3$(rTG%^5@SrPvwNSE6_gnknZ zSj7Ec9U;X5_B`!cxgqab0ok^IWD2wq+N*&udWD;M0Spsto4JO*2KqD)^NI*hz%M*~ z#}3T`hgNAl^f5z2W9E$PtI^8nj5W$xp-8`&+58adu^;}Hsx8Pxl0b0PJHC{}mxxy9pym3J!2hvVyI@%y^fh>{tTsc0H0}f{0P=8nfS}gj}q8 zJxZid#9mYvVBgB^-I1S9IZI~V%56y+#hiRB*^s&I%If%+HIDU!p{oPl((WBbylT;$FSjGxyD!lt zx`nrL&=R=zd~useE?{&^^eI8)_0+9KB&|$8&^}Yl8BPHb946Q)`zOz8cIj&0Qnz!fwNOjg8o7pNQhUL{DAX5?M5m(HgB8XO`bmI00 zIOJm#6}CrJWOhR*h%(@}VgN9DEn)Lee+u=T(95uc&{$=WP0i$`=IiJdMM@%Sy?SJt7LU&fS(@I6Ey^ATpbE@ zz+8Apx!Hd$n0yNUDhoF2#p-v;lu3tn=THJ~1+GwdYwX^tPhtK79P3~YAaZpz_1x{R z;YK};%_l1Ak{8I5FNZe{j?cSCH&vy@%(Qcj+H@Dr8ci=K=ZGzwVk?xv^H&QnnF6ez6WU1-YYk9KlnJ^JyPF!YI`fx%;RED> zhACgXmd8rR6Z52ygR_9`mk@a zZZ@CCMShQ_XVmJvd!QCae6Wrb1 z-3hM2LvVL@cL{C@PH=apad)`YS!?gJ_WtgB@4SEMG|<)6)pL$HWY!tU!RSMc%<7SC ziCcDU-0`&!yEOd|_@s6rhUr#0dY9_eNVSZk_b({8?-X^puP$)AwkKrq5$ zUcmq~aoIvqVE@PD#y&bP6>!l2bp{MHV?soU@0o&X7wK{IuomgreZKwxgKuD6e!Idd zu&dzDATo5x5*I?CcqWMzcF*;eN?vVA^`vwml>p1Ce-keZ?tmCDoZ4KxV;(`$ z|Hq7fCBgnj4F~|dL`RWCPqHQqR7-E1wco egSbW}Wf??RGh#0|2`dsBZMpfGBX- zhF&SOH#kHm6Bh$GaxBrNSp<)V??hyQ&PFftS7)R7cV~k(W%!sJzA`2xj=cHU$`~N^ za}PHM-NNSKTl-0<{4>QJyJFGR+;8BxH${9FPq3k0j8+N~e6+?(=nov1;x6aVv(=8*^co2VX5%5Sl7_Tlcnd zy`OPE1yO)b7azdPsmkfFwEg7K+0NsLgj#2XuUD>`Up6wT?Y&ZMB2WhZP7&lvJ$dK7f^MHw~Br+kiWwMQ5gj)E)qK+I4PcYsdzk81 zubBX>y)lM`%{B1M{RX)(;6FkeCNk4gSq2Cg^MTUbDTlYfq%lfDsXS^JwyIJ+GFnmt zsKm2s-ehXsKrtRVTVx=BX!&!#1Wo{*bL>d8f%fo+kzNj1N>W#m{2qp;6G!}rxXASx zP#VFCE-FB_`1hon*x>}<$zGT;_*q%WATa|DcCTl|Bl0OGmw_*%kT5Cf^4ohJ$jk4O zQyC;Pi%I{yU%dT!K(%r_@K+r^k#4>k^_xSA@Xs8`9#aFZ3eb7mzj{QU1<)fH%A2=z zU)3#*XgI{GE`La=@dQTkGThFk>Mx6qFAT>UFTh8S~MGBdhyqZy#lou@41j3 zrA8M^l`qPi9Q=s7&qDohswj0zW606W9q{mrS~y6$Od9o-fh&fPUaQg-b`Q;{vE)Tt zsu2N8IA9SUfbayDCV#6q*nTpMUdlU>`(9j}MlgG1>f`Su9F!zBJF z=C=i3bH1N%3cas9-TrVMM|8&@m25V}f2G~*qg8yXvPA;!R=!7Ld~12=)$mevfN%9e z6a7i~B;>#63_pO+jL}f9qey*w0pa=OD{$)dE+BGwt-nfl0i~P(8_>lBTV6pQeE`^) z*CJjE6jL;z@Blms#dFyJd)+vfhkXDN0*w}jckMP{>UKV7wu+1u z%CQ^S!3&*_q(6`-oP-!MakaVpQY_*3ogT$oIeW!ewAJJV{R=K|ab7>DSLuQx3RCu<2*$b@NqFkN61ZKRBXH*S79Eyic@47TH8CAm4uNcWye9 zNCNGAR~3G4BQ=Vw%{|T_+;-`QQD9k2hiUqa~8<9_0>-H0mp(= zlul8zYzsnLd`L888@zhAXd%}l4QA-^hiU7qTC6l+-C<@`e@)ICxaYQ?^^0GolsDjC zmG7pd66FKqn$4?tQu$=-rdXmjg(;Hm7UAq)mrnZsWSuy{_JZzrg)3vd=Nkqbqa})g zQm-2H4So-|QfBThGXF+n=}X5!kFD?Oq#&s*iSIot4F8-8B;dfLeP5!g(|P%RM#EbV zSR~xo8^~7*lp9u@3Gdq93GdrT41b02{hrjjlsQ)dS45a$&48jZo^1mE*|rW96iypt z_Fc-ybApv1*TU6B{%00&`?bIn3%o?iEJ!67!N_ua>Poy1N<5r6v!u-+g**de$yioj z3t>k&shzN922yywpgLC3qE=Lmp&yJbGF;XGhSuJmUx!XjK(~;Q(~qbFMQgr{3(C0; z7w>t~KCDF6{}-+UeDOVrI_~@J9XxK0oId-HBQBz%&k`5-G=3FC1}h_)$N~yOITOl4 z=d~3?MBPlyse!oPd~m`*BCr9$-?Lz``s^=WwGy!P0ngf=d@U*>ufZ63g7!d(8Id)9 z(7)mfv-n8~Vd-O%3Z)V2DVcZP^0fe$7DgUzC?|MrTr;M~F?reA2LyTl5f}9hhoR=eNU+BN_EZ6U%sOG6h;yk2!4bKMkLeW; znVXjyQ3wX#n-7Qg!CVVq!A(X~?5}g3>;zPmLmbznk!HgBr4gw1=H71@qa3mAT*03` zQ$-7iS_qn|1@S6xzXui(x(fq)ZI zs`o)+)=a7^J6mk+Sh5~K&Kx%UjD$9w4IGDiRh;uZSBX`p9urrH**0a=Km}(!!|ybA zm#wk&bA?k2?wfJFKw_|;C&pT;%8996C9<_F$`0BgH3;}}yo^Y_m(QH+y>u*Scexo~ zuxTTeZA@z$$MyOrXBY+rCDCr6Ug?C0z1nGpl`X`k098dvK`N;LWjyqB+gdiNZW zHvagij848q3v%(0*fkFy+xRF@_sgWIy?W%&T^txe3s6_GP1)*{9AF*Ri|AB$KNzj! z01u3~LFx`|+2gw)B?T5etE*xIhR{<;r|xGt&UB6Kr0>0fMVZ7 zmei^tD`dWwNaaGcD6djK@f8@+VcTqM0ARuwZ?4fqGzDxxDr>}_Z>ZWu1{OCo@Ak~v zc#N?x1S@~YP!tkK@2Lt30O-C)tgM$fm>S<-10`^^L}wYN$KkU2fKyx$)pF3jr8C`p z$B3}iOX&U4WmHyoO|xd@Eym~c8GfGPPz|wGn3Q{?HPP4dKlZ?r3rFkfZV@OlUwOdTA4C@d_At(eZb(**+UA5 zQvWq|jzg~KHRzD(6AGHQ!9uprqRg8?6Z)XS2e|;_=Ph@v%)<&IZO>bkrZi(;S|!X5M(I$CdxU*wZ#}bqRvFDMpEQqe}?A6f8{>@1J3CkZZo1+s4 z#ID}6q@bzT-fv2KMD*M#UPvu{z3f~o=Usq6ybz1Q$msr!S3j8DHx)@*f7z1Z4Mt`c zg*lq}t^mWE*=ImU`WsWhG(F?cdNp?Z<#|hugPFXHkqp`|Z3cK;fL*@UL>sDIUVl-M zelZAZp4{J#)F>G-uCYu+w`+ricthB=!qr)@_^B$Dfw+Zkq7id*et(m4IxFdL0pTk~ zmZ2*so9wsNHvigUnr=gMOQ`yCmho(U<^o-d7X>DT=Yj2+b%CJmwqH= zY@%S(0G{i7gBPsyzqJ~K;LqTyG7|m~pb#(rMoeHSEOYdvGLyBvYs))L+S9%H%-W1Ty)CsPJh#l662$q` zVOH^-R#*skz-z--Fnr>(x1jm{4++>lyX4&V-==BNB31U)51Gd5v`R*$UIdK$N9ahS zdd!U2f8y}J?%a%_lamC>gN6Km8xa^7p~@Qp8Q{rGF`f|B?4leb&19G zPC>4C_NGJ{pS+(r26?ZZ?8`QfNn$U_>ZKv{6vaIfjGGVL0(A)SKCTW#Sc-N4%c+(26m@S5(B2v-%1N<%-sEsB zq0i8>Xi#P>%f#RJ!arGq~ zOs02p4vZU9A)E|N#Fzp>G{Y4N5 z*KgzEL0BX-5F8QnVgarHhnDX4a&2Mhe!`t1@2};aRdzZXb*F1(JPDpS3u4Hz}}R)GHM zJW?guuA&;!oWm_7=7C-PaOAUPTeje2D%1l^?uS9q9h>Y3U=LrcvA^r@3Eatpk(YfyN_D3EgF8Q;CLFoWwnPE%`54P#y0_}tFH5$)P~S(rmjor9Hi6!W}l!D1j}LGI1* zf+GjEa=cyOBX&GF03jem%WAEmTV{Qe?t1~166~h+*z9^EUroh57RgzPH}nda!f=)7 z?E;sU4RT9Y!CniCr;{>L$c@xf$XKiznuG%LwRka2PJpChd$UG`2NmwpOZYwb&>>B> z!J)XYBqvU4L16I#Rlg-$wvG-0CDJv{I39_^58E|2C>^T>VUipCKS=i&S`z&Q}HS z|BS(IOkZsCqg6FDG_*8X%KZo@S9e)S1dQ0^Zxu)2(n;C9kN=h18It|O0WcU1o8VHG z>W#n3!Uwys8PHFsojs-)AwJK%qPGU~vJ8Y{zo$$ab=f-^8+7fSA-;gU9Kg~`uZRDu zu@0bdIw2cY!3^;KyU{!2*(F!>^NGN(7gv_5kfxDJ)irYV+Rs8h$rj$wvJ-X^SRcqh z*_b+g;mc8YvyuCNBZSZmuoeNVK^4JC;hKxjj+&zdP^`7uLmZ%4sC!TF3@fHMI$mKd zi~JS(dD-E_`UtFGF>=53W`b}h*z+Gfc4@`;Ym15hr}M*M#AqU#<$4C}2s;nb#C44$>d5$mL0iUsdw6hZ~5;+5ow^rR4PS(~XyZFOVk!|`5JRbHEEYc%boU#`;5FYN;NL!(@Dxf~E3 z%KTlinqT4fgRJ&%79kP=p3oyl?ttkH0WH61Eo)A{b7#J)6$U|sK>R^Oi;YDS5QRuN zAZZCqP>uyb3+YCiXDE6W>H%5Uh?ljfINr;&6C zzfxt>Jmb!qHPbsBpLbf08jn&d+Z~^otQSOA4yZ@G14!cj}ARv6|*NRj9q8Zci)l58Rf)v=v?SzKp zX8**_ERMw?h3ondY~Rc~)_bTD4omJ4Owki5JH2oBayw4M(uEP{>fhHjiF}4)j)Z06 zaq$x*;F0rsUhljxjvSUKBvUzTq88Fu$PX#n%z&Y^^=^49QK}fP)a?x2Ogzqg%(CxT zmqt`Xv=>35i-E{|utc2J!6^SyGH#I6^3)!40_7?HTbm? zuk9aCj;=93Yp=sVxCOv4Jtcr!`AV3kH&fstQn$yK?3s%5>jM7+_6(rACaJA_tY#qB zmqc)gC1ivmfY#NHfp_yw3j|OZ3nTTC^9t>}M$pLKS$*?LXEW)mPbqq~l4pR!u_Jka zgUS|g$zy|X-rj;@#6rz(ISecn0T0mAZkTc+-d0FL@0Yz}lR%bwpsVM}I${qJL`hYB zZbC_1*yQI3o{=8L%ia=XmdiGgor6Z6ipbCOji}P(oX^^Fj+zv2QyST6N};cy*4|M| z7unv(g!9vXzkl{5#_&0}z52{fcOURerhco+z7F%5g-os8Zk=@2-9^^C>Jt3%A>hDS zK2&B@by3^64Ra1pBDQBrv8cY=q^;2N@W&_b^xUc$IO>fRaf)&wSd5&daOS$n+`QXAXO^s6nSuH`|oPQft3M(94KW;Kg7S#GX*pAwROs z_Z~0?TdfvWjkr`gj(U~3=*3fo|FUDkp#gTxk7Ci)^MGRUa5trDu*(5yd-#l6^%Bz&^$7%xh7&Bx(GyEL2?<*#J<9U(DjV8uH?A;Nr1= z+UvC&6_|3=HBViA$XJxgLTPO8duq5Jy;-TTb9gxhR&?#&@cl9K?^E{zPLCEieOr#T zzk_BAqMtc@`~|>3!m)p~)y|e4#S~vAq}L_twXoIuKZ=9ZP;IF)-r2Nsqt;EMh5_tN zhYVFe$Ac@K@p}k?QNIA!)oYKS%>Z||`oY?6!2G$o*44ued}9#&-TM#p*5>cr7iIa* z0XK{T3lL|U?+b+{EJ2@l7l@Mo1 zq6GK-5VKQg`(U=bzDwe6Y}viP>{l}5MA82>#g9EJ2J5VUFZ%g-ObsDL4+IAQa9|h; z2x(KVL*GN_gK0VypoN`bI~{co%3zcXime=1}Z8&B08 zJ;qUmk9QC47HK}jP%FNiUQ1Rxs{&#SUSUzsI=Q)) zBpVn+pz}favOwN&Whz#JGnkGo*Ro<69lPJ}A1bqKIgma}^!SrLD~ARk19@i!R|5bU zg#Rf#_#P5`x1#s7zeg!1hfy}WM72<#S$f53`3_R3fyH7_%x21^{bp_b$zxO+vA-LL z%Qb!-j42vsT?)z6p|n6=PtKMzyf4Wj;IpjAMtk%VPq zte^e1wx^7cyUA@Tn}i{)t9s~9B9Rs#97PD6)VK3hJlG;zXoJC8wRge%0TU$_z&eG9 z*!u6IRyYy_xz^?mfm?+I2z8o3qHP^_&|VQH&EjM25V_E69Z>M3$ zCi+kt_)xjc(XZFDiHZb%*)Mq#BcW4WfOmf(2APcA& zZm?L3L+gk4TAY^o)psn8s~rs_gpT1qB9L12it?L0% z6fBocB0NTgT*(hF|3uI+@J0d~gdnlG7MXzJJKhkIXP8&Nh~TjyrxucAJ#=W9Q*&8C z{!?$}p6yqGNqZm)qdWFd3_RLQQ3rm+1YonTW)TiW8A~t%E_+%^1#u~;nL5Ca|FIhtkb9D?Q@{4t zA@A6)_%Oxx{KD2)BpZyCZ(-K^;DJvYr9!2mzU~b}CdFzeFDgucV0anN3J@AJE-SwR zI7#jb?dE9scGD98>zC;U%KL$*tbrON_%Gzx08=150z(5tqE#wvYpT0`{F|S54X_Cw z)=J8cfa)@%^Q>Q1o(b(b>K@3Ivm76Gj{`F?oM<_0jLJ||KgtkQQs=lmzFPBi3ty`2 ztvQCRu*V6yxG$3BX4Z9;Ej38TnNC#5s$6DSM1Q6dQ6EnvM1HSzDUT%9RTR$2N*_wj zAl?AHPp3P8s`+dDcBcTca9X8w^<@=)VjjqI;f|&^U}4@-!2|9~jJ$1%yP=oLXJAv~+}tq`bNP_mV{`&3!lfo(wQZ~}esOkF z+@60EEoj^HZ3PzhTT$7s?77$X7x7`<6OgA1to#|p&Ab_}rir4J!fJBZuz^Dj0}gex zD7=}6o|^1&!~buUAHEx))}O)v&6OW#*b(jeXzx`Q03x=4RrAF(7DH2{aDWwCLp}=T zi1uHUd3EDv_^5>71<{=NBb>l-g&o6gGL8cr%QUy~F`oW4c_6rtKafS=qhymuQwiy~ z!mqJ91iWwBqi)X%IhP7kHP+HpK<5wAd%ztyKa-8eT)^~SWd}3}MXNm3^O6A@%>t{8 zN0@ukj}nUozfn%y1zH1+W@O<=5~-TY>^k1UO0e4z{wjhd>637mKc>cSjsra5Em*$e z7~5u1iUN-gkfcd-nSH?vYrPX4L0zqNy%a>GaiI=2l$XtKD?ov7t;D~zav*>E%JS-o zRVPcTczgM=N7Gh;Zdjgf&@EswF&jA9$i)J6visn#G<`)8#S5Ru$?Qf`{qY?=zm9Nv zfHZAgjuk|N}sj-oQ zr_}p;S$=f}j1e-ZgE7Z1bYW-0;;|BYy*NpH!>zAJ0-qNp8;nY=hMv9mLa$5m>ocy( zjCtHM>dQoP((5E_nD$Yp_xL%AQM9D^G~(ypIT1Xw|BCdG!SNOfy7jG`n?AeCr6SMi z&$6ho>BDf-dJeEOY!3H=#+IJ47D&zAkiuJ@d!1a=IL{I#K{XiNSlv!>5yPH9xylXF zK1)&S@OKE?WPiEvd0Fj8jefbzd>Pt>A{q$Msch7IqN$uwiTx3CgXL5p!5PxA@n%xp zj)$?)y8z9M|?2{x@H&{w(!U+RL~f9hK3+y>xFAz!ww~9FNpM#ofc-5*trc z9N*Su5LTUSdX#)g{}Wz0fAzv#bo-XyMDf8qrdG@EdXZ<+l zn?hZ|c zC}2g>*EMo7X}Xa$`C*sLauVkx^R1W`*jIt9I!d(}$8RiT!`TIVbw{p>NJ_kR_ zaqquid-sHMDjq#x3C{3jGC)iM#G9>`oAdM9C|7k&`4g1S%BsVQ$H6slZxr@CZIHg@ z>mD3r`Hg9XMV|szkh1powjUi6QmyK93)Q2gDz~O8$Ic`R%i-lAo-}P)<1=n&dtzxO z)5QWN1RLVeDb~QRQ<9CBW}_e#BSnh8J%(htCooIAw0RT><0LlI;GvOhX5|N&zZS|z zW9BN-V(XU3T`~NaNQW0w&q8QMn*Hv(SoE)w<|!EbpK3o+kAKj();i_S$E(+zqmZ-y z!qX&ojJ}AIKA^*#b;W>DRS+cKEG1%0?_yhOKa3>^F*TNzh*d`YdPUm7bEUhg746d4 zKsKefU9stCnI|h&zR%GmqDfv{yR(2n`-KLI{>hSCW7_PXRn4%lkh1_L8Bdc7eiK?d z*?8L~seTOC?kOq_vO+`%was1YyoL%=ma@UQ2=OV0<#BY=)aQC=LMe$R5+_D+^UYl2NYe!Fc3K4f~=! z6JEdi8r~zGD!cr2W4rK*)ZE?SuFexgoSey%v;Unqz7KXonuJ_5vKQWPy(a*@7TKh( zUMV95J841mHaMVnj#xFK02bezBqM=+q+Xm+W}ia1WWOn)&e+ zP&7CH+Wy_A=<{^@tmtqX%oY21SzP5XYcF!rZYXDHsjt#`+OkRs)vkKUpIIK#I^Wz> z0D(*GLR+sZjqzm*^^1e$3_)@CrASQWh!CW?=qEHf=n55bNZ8kv!|i6t3W6pBO^;e3 zH#}A9Up|W*zBcyhTV}@N%0sI0Ls2z%CSf|Qs~zHyxThRiz@7HUSi-edzHM4hQS2CA zJ$1n;lpi%-ej}&mzyPYmKgoFXyd7KB`Yl2;$QoY#8Jo}sqQldWTTEv@wsr7YcnIE> zAk84=pcb@V3-i<~!@bk5SP|J7zlS!#9OGzB#wgA;_^pH4oH*AZ@7#T{ie94tpBq1( zFJ{cm2nO>?Oi(=m1wn;Z*)8Rr#(2n=k^6z4Rt^y{`h$$lqF#up6S&68u3n7O?H2O+ zLv`s;ot%t?^7t7(Ou@qBy$?Goig-iz)P0e}%1$~wIi8{64aX}IVJJVt-ks1my&@0Z zyfM-6>|#&e5vyh!>|HIWWAWYLB4v%)SdfxRWSx1Ks)eh&4~}jDE;(YTE~_Io7em^3 z9!gA%PoP|xt36opi*8>v+)bfR#OP*Mx%Pa;D60B`wyl z*IPusi~uo*S@V>4r1$bXN$qg8)qZ1nrCMI78laW#ee|OAw01UBhTJ3?vlLa29USUS zdzH1s_XKhr)Srs53{g3R;S1ZlE6fUSklE2{hNscPeQ%=)@kv4H<8oP3m4;YAP*R}1r#!lBL-0nykDlDYJi;#Pi= zp>&~OWtVm_EbaEP`xbPR3w1}HCSug`$KOl=iZfj%k=Ma3u_ii_w)9gH&6aJ zl=lu)&5v*edmsUjGBDTbS^LVbQWViYXw8+fBrXEWM#}hXX*D6L#Hgf!MU0gV32kp6 zI`0Kn73r5B8Txo_yidI&%E6@kSJ-96)%p2>Qh%LF)jdOKbBcTUP~bJ_!=&Y$#{1M9 z?=Ymsrvv>RfS>JLc>QNnOBu``cVK7AUyEDHU_#kI<}1J29MK8#%3&y1T|0X`d*4W%_N%cemHJr(%2){ zUUq-rIlrJ691MKbK*lgkh(xMGTkl0bP^9WqyvZ!MsAw+27hY#qc2P1NR(PJXEpKc| zgg=yOUgxC4&52y7?o<+m^(_Ey$#V0!Mm~}atCfQ#f!2`*9tlC9DSSSq)9(BJP7(^U z2X~??O}%5@D_zXoQRozv4}%8sjuKBoz9?vjKjK)6ED5uTzt2UVHMh_z1_O%?N@a9B zK=he>clMgM%e`fwvL&Z9oA67R;yWFM;d7}Tpi&toCsTVMlO9}|DFY4MAUr+eXEK?Q zyVVgx7_n4`N+n)m8Nr4e8?puiNZ+wr;X#=Xrab>#2=w-9LxrbAJ=a@I#vfXQ-CI#! z4A|N@URacO&+I$UAHSJnqB3+y;)wc9+UGS%0HWf00XzVsR5E{r-&0@d`P}HewBYkT z1w*-zEjWHqIi9dR2pQE6J4YC--|0Mm+0ED3i7S$BtT$YK>You>vC@{h=tT~c>Z*hA zc{?D>JTe&GpDW(_j*WZ0Fv)Z~D31ASkuuvtbSRay53`l@mY_9Ku^_^CsUlf4_V6OC zDMtLbh}YHovR}_QFSB1kd`3!IEb=Mi=Hl{kc78rR>L_5RW`j{VZXTWL)tG9b)!le} zaq|F0Oj1I%9LYEGj zQkX<3B(BP5a6}x4D!;2BiSBUljguJyLe0tHL40whd~tEBaJ^UB2{sqI3q{{HuJ5El zM6GYxr(>c$Idk5F&ujW!*!}6D=1cFx>2?PgOia98GgOLD(y1(X+>Y1ec&2W! zAX&ryX?Ad2KF=;cJm%|iq%<*V;cZ9y8?%OO@Yyi59LAoF3sbU*k71>I>k9;NG#neE zJ^M;Zuzk#+qHGDD{F-?VGP3-TQ|ymiO2Ib}Ux}iQ$gQCsZZdVEOUZBAbW3fYB3|ye zZd^dRe1tCe)cD4UojhmTK~l?6)>C2!{2ZJ39}ePrAn0e2%ac|=Li&_8w@6ylIT5ks zR2=nJc8D}5g|0!c^iUexb~H~MirL^VWg#tUtxZSFZ9Fab19r_&f^QVy|ku8;$ zluZNKd4c&a5gdt5oW!YtMQoIV$+r>F1XR+rv)wqj;_1KiCdPpHImFmSv5z;w&NP4J z>|(ihf~sYy_;~CMTKbPGTg%sMW+cxs$ZxH>73l9VL+M`pD4Uz3fFum?scHTV-q*{G z;UGFEfYM%Tg$1ZQPW8eSJLfJs3Wm{0eO%V`9K-xf-Cq-8^fGRC1@|2?ZOe;mm;pBJ(`|2k_ zsysfgYAZk~yFIxkYT)(FybVI46HAT?bw3Q;j#dImB}wz>m-kVVH!wOEKj@sg?z>_P z2>2w7COF>>{Ipih}H&^SpXa~u>!n>|E){&V@Lx}#M= z*8BO1d~-V&42fjKCE4P;bdusGi>sOkKJy1_4)c; zEk-8Jw@n|+9^kETkmU51<~t{KmX=7S@0B+KU><4lA7EYsPZ{{gtLySaGNd)C=3z%hBQYXcN>W#*9qb4Q}nA9dP~cETE_Y2XVTXW zDIZ8?o(O^r^1-MZD)t*Evs5C??d}tjloQK~;G#*Hpr{p`9Tpn-T@p2}Kf#$#OSN#0 zmq=(1t;b@Av+MI%`GWFvga7il2O8-he|%s&T4RHXe5R$?=e>Vq^SR$WJ6~Jn(|u$k z56VA}KHR|#K`h>st5)KDZcKtgcfXEN#eBLjQ|_1$yHmqZA}q8wf4SMZ-VpY!ipW3j z#cz3d=?{i?;zTwcwh2=0SS*U!=cHlHe%2Q0+bf~VNwNP)LK^@-U4+7)*rnjs@e_42 zQH^{eMU`zNP#n112qkO49w%%N;sLWazZ}k;UNJKMU8m@~tU7$l-}rK(uz}KQYjFc0 zvGGvX>5nM96G|~T$Ol4$#kH|n$;<~#Zt~-(0=nOGU2@Jinbl*Bl5WE(k#k)lb3W0% zctULlnMP)`dR$JI)67;mB;qxvZ0!cb*~(S0Kqb8J(PwfNCbg0aYR;XzwEK>d|6uVgHL`V8T7q>@s>LFWBB%MURp`)dw!A@c|~Sxc4gi{aa3S*j(AzhguZ&{ zN@RcW>8iCV#5rtO#3=9VEvy4E0eD36NEM|gahCsj( zKg)(5kcQVJ)chHkTiOVi~#qVbdaTs41JI4#O9EfPdEYD>A5QajR#=Bivc2aAVX zw4B=y7vRpcOq(mC;#!*IJGCjz#%1oF7e-B>PFPDv1v<~5c%#O%^9f|Tq7{vgcZhd* zf_J#;<1Sp0vBY<~V|5O5D^Db|q?HUFGCUV9Go=S%8^85r zi|}x>?+Ndj3rq{?*kV`hqlj_Xi%(BT3%zac&2Rax(bsWmwkh-C!re_; zF{q3eIYd+2Jo1oL?!$(EQ3AscH;FBaMh`2x8BcCL)SATc#PPGGls-7?DiQdMB_iE| z@?j0f%WSf!ObuAZ#oR|$+U;+3n-~uFBs{#dKavluU;@Kz+aP7t{$!F7r-F)P$~V3J zphbSlik#VMNE6rEbQ@-okC`+uxGpe(q6-t+nBis0*Ce%06UT|RRRO>iwX>F24h25x zhvAIlk^X`@U8*6#q^K*?=N!~%@h>kxX4zl{YTf5B_wRZOG~bE#O-wDMyg^MdsL1+` z^5a0E;MO&aHHOx#Wzj#c6z$sRzm9DfBhGv-y{rRj@0lU*aqM5clh}+E=s%N`!RrJS zq#D(teGE!U)63Q+bG#|DhQuH7F!v4Ux``{(6t3~cs7E5UuDcLEDzakKpfrpE&2ejH zKBXT;KyPw?jEqjR9uEoNxjydgm^MXi?!imEuyY&fG8;4C)@q!*;VBrCVlbJd= z;FnTiF(g#yW2Lm|TP&+;$oHG^)T0y^E17BIn{@4f(dS7L07eF(*wc{4HAJ)wefg8c zZ+536X6?|0r8Cn%jmU{(&MF0x411I*THt>Fg={Rw_O|Nn-rW=*AL0clYxdwsg>R^| zemW0x;314tku)7uM|+*Z@7Y><4KjSG&el?3>JZCmr-^yGUni7AFO9_gQ9|R@ari!$N2iAbLb0mhC1}VSJ2`7Szj}Q{wJTRC;ivz|CzUc zKU8a#Z)DNG(xqvFCJg6XNdV>_0@)g@tMxCW&oejccWgEZ4a@#d%l>`zU%89Jbv4L!?OsCbsfZ8yznVvP`58-FTDWbtlWK9wVwH3lOG;WTq?BSRk zHiJ$5VKwNeo_|Oyy6QVw+0X4yJw$0$d9NT!MH&L=BKt918bx1D2O7OA8Y*q1t$YFA z^NWN=b20pNH>$TtI!rADXT&7o^j6ELn6V@&|BR!6Sd#3r0x?$R67?b{z4&7tcO{I# zs6>8TF(RKrwk`v#eN-&P)wHhmdz!HsW9L8x-@++#%c->20QA?9INEClLI=+1y?c0n5BrnN@Dvd!RcGhZq3YuewXToMv zSp1U!xfl)S_Zvpi9o-EAVpjUS2Z)c`uIFU%E>5fLf5s@;hvbQ{IWR$2Xm2c=V+4a& z4&Ygxvg%1idSFLGXsU1mxk4XSn?rXdG+}R1@EH_P@$IqG;AD7}DKD&auZu%zijHH$yL(i8RTIIjqYw5l%P6Kq;(s`ZQuh2J1%K^a`3UgqxZz50%JIyvjk4(MxmB z5JAdfJI?`VScSwtEU63@a#fbiL<{KNq3mj$E7aApM1SC0qVRxQ0#jd6Khxu}$NT09 zCJ=oF(;Jpioh69Ip>I5pJxgPLG5T{ud_5p-oIKHVX}=9$5WAQv8x9AR%p2fGSl>f z@!$^?%=4!;4zmgm)9^^Ar(G$E$as!BQQ#A?brajLPH)@qaq_7Bt5k%If*kpt1}UsT z#%UFR482`2ws}BXL#qQRxGIcgi|p~MXw`Z1RqfE0_(qT9d@HcSTz*-BEQ;>*vJoCiQG}qu#Gi%b9kocB_IatFQcKS4wGS80}?CL)(i%Hao zwIsRY=kpu@0*gFv9Or)em6#}dMvPm@fgZh6M(f(6k!BD6KC;Bbn{wxTqBKvC}u;ptn**3?E z9-N1!yRHY0hCdq}schvXU7VfI!5s$e9#i1sckij3z0WUjhA2wfm!HtJpWi7uI<04c z_c>NI{Ob?>=ilvi2yX`SR`)tI2DS*gx~xV?z?_Hqr)dB5I(Tqh?|MG&@=jVcmI-s` zU-hy1chzPH5-Ye3a6CKKY$o1bw`?h%4WBL)uoglLsLlZWQaj4}RdniSLJt3h7#i7)4I$qs)@JkQOrN~WtnRAh7v(|Ofu>#B6NZaH)uD0jz{JBng;dFU@1dg9SSvxd* zo5WzT@U=w~cU1rINW=C^vbZ*84q)kmh2mW6rwSxB%{g%?6CS|ggsUToO#KAY3_c3M z-YUC%QOGY5kCRss2?kT9;nWoBsk}!$pxL=}3LuYjF~rDfJwWeQ8+LdT&tOpMXVy+ zSB^L?K$-P-;UWy^RTve`EUJP3!Ve(4eK8xV3-Plz#j$1ylyDP(G<1&O@@|C!L+=&9 zGsfS^c679CL#{S@_u0m>b&zDn8E#kPUzc9=jg9!S^i5L-d5yykn|`EDulGgg zCVTq1)~8Dt{dGS*fRH&-{$`mgs5wYUmTZ+@JM&w_HN|$C`}DBtym)Z;^3hcH+ACl6 z3Py$(0)i$PV4h}9ZjbOs5(NdI-KO2$ukd`RN0K=zdQA^RE8Xc-F+J$$07&puZUyl8 zf|4Mcd9rjG0;Z`|1_Qn{HW^f=^G7z>;iZpB`c<#Va3QLijR3N&>-nirbqh4$^)d_6 zZ1LeHkv+x-CKYbivw=nwp9%B~{GB)lR$EaJGh~{N@u&3u&K8rV1T(jcDqvRgRP{Ko zz9_r8AgU>B*C1aO)!SCA-ag={30zChOjW;?s1=0Gc@5ENBKW)T`OY^<6{=0gw())* z2LZS~5k68M;iK`m>5l)fc>?I@`rfrDUwP9ANtI8Tw7bSA3GV;He@~S{82$Q~6y0Ng zZR}G#0c1buAMSDJTMF>S$fh(q-FB?{&N6q+Rvmk1K z8bjUJ<-It60Pf?r^bp*#xSGsA-#1vxM0$I9e|8LWw=C-RP1&5&^6FRehH0p_p~{!) z@&YA@uW00Lh2954wHHt>E8>q2r-vQSM{O++jbzO0!P%lj<6YD;#O6#r?6s*fu?nWb zA@DUeh?*!#CCA%ll8%ReblK$&iWP|sV{?!dD!{d;88nM;za1%{VCCgnH_m=WuKUYHIeb@bFU_yDV ztZvaEQ9Ccfqhq?A>0E&ijQk0Mv2!}2O4TBOB75()cV!3+J$0On_bk1^=LYhZGV510_`$$QXDznI+kDds}hNueJGMozj{c_)Py`COPu41rVk$Gsr zTj}s<(ke0B;bns`JZD-VRQ-Qwd+Vqwx3zD0DI%bBhk~?pcS%TxfPzT3v~)=*E!{{e zjdXWNr${$Q!=k(4n+tcGbM|@0c;D|G&wtzPaIJO6yyma2YYKTgvAD(3PFJ#f>zd26 ztiJ-6O0&{bO@MwYz6R?A$Z#ToDB&_OK>j}-C z=@Tg|@^6z}B2*$XIo6)`MLjpOk#=9(4yo0Vqi0r8~@t_-z)tFOB^+ zOD>YRuVT%=-*NC&?oN{OYi|$Z4ZgFgT|G!_l3|k5`tTQVth_N+bad@wsX~`-(*Rt< z&b;aJRuz02t*aDx`ui|hx>@l^(d53V0Ao&B{yIgzQYOc&Vm!@5cna_I%Jt&eYGHDg zb^tw){*4DGq}_o9WAqog_6(H|m)xEk%U?0M_}oTk=;~9kCcHDW{=zQ7Jt^1AY>+(a z0HR-GUIU{6!!)VS5g>Vjl4~WB&5!+`A|^7SsAJ)0e#^SFP-Dcm5Fm8zh=4$rx}dJW z3DQg$GgOHW8Pcdgf8zZ+_#&Zo^NI2fq!a7^ZzU?QJ}bVR81fG%@GoP~{qcBw5mF=j zY?$s{$TQteBk=Y`cdE~Um%UC2?bva~qu7QJtn2Vib8)w;*%DaD<2Ms#u4A6h>T&Yr zJ;9+MuOT46ym$?RIQN9{H*ze4Mo>+I9x%OiXl7RR>SWf_msA<$Ef9q?`9}OHYdg(* zrECid-r}h}Qs@29RpnlnAZgpHeILLb{#3c`oa#)SyZ8Z9eau1zWM;~)zOpSP6JG)m zFBz{&pU^P?5>d?Y_1ndugJcc6!$uYA z9SD&$m_PML$qR@^@o*NMov}Fp1JkZhw^&KQ1CnP6&_bF0NO0=!{)FpY2~>RTiFlv( zuvsEG=>>-#xc=WEmxOJIO1!)TYjmW4>Gba~i)n^&o)FcupN)V}lsTrC*ocy0y-{H4 z&A_PZs2|+yrZ=32Dd1%;MV}Tn!g7ed^9BW(Q9z61bI~A0djMrMl;51BG;mL-YDWSE zVy?dWBx@!={Tm>-^wEwhjXpb%~&q&~VIkjIlTjilKX%}Po97Z0Z7 z^Ng;~?qZ+hWf~nHrWq_9%{ugeg$?{`*I71u1V4u@Q_L{qg~psu+b~_w78^~gkKi(0d^z*2Sd^C+Mg{N`b-w))7floa3Z6i&JUGHRx;AGkG5;|L$Q-cQDLhslJx99BNm1WDFk`pi zvTI2VPHVUnnUTUz6d%MJi5QKf4OA*%bw#Z$d_s zcdt|~zTp&)g$o?rt;*h3yemU#9qWcl7Bl^JqrnZ7v}k=sknXx3BdI{!c`u7ds_uAg z=XfD;uw4G;XzfG3_fvOL#33sX`e7@}0MxV#_Sxp%S!WM&Cn2zu23IE=7Z9~oU0$X9 zS_0;M#OEH^yTn-SviNy#zA;{PDXB8+`7dKs%^$@+b#DO3KjOj!fed7ZoLWO7K2a`_ z(Q_j4O}~To06p;#_+Biw%KJvA{78|gkk^|3Mj=mf4@0A2-g#A{@(na<`l2V6M8UL~ zubq9-!~`b#(FHXYj(*-S@#aI&kSXI=7=OD#(U2=}Htsxn?u;>^tZkBCx^Ge?LZ*&* zz`!p;+lNZy{{Wn|iz1M6zKWoCo1~k88atO)#AE+AxV6Xi)JXUL25y<;Ts^rYwx7HM z(J6N_o4D(#0aU)MYID+2Jn+Q#OMF1CA?N4Gund<_Z*gb>NnMJ*87RVFK=sM+|0`L% z8{}fm3@yFceV=Rw0uKl-l$F)=UB3AW zA!w3}J!DF82Ctc;)%I1(%+^ltm(6IG-CtR?q5Y1Ph|U?r?=viuXh%Z5lAHF((!yoX+RxRna1>MLcbk+#aY zKX$KDq*`er^S({~N#Inn(s2Q)zW9^lbk{^5o!s1H5l6VApEx3^7}Uulds8ai!vsrY zGcblAU7?VjPwA6*eDX)kLOE};ro)g2oNDkm;0#$M)}8f}&<0))hn4G#g``{!v3Sp% zcfxPWcxO&D`17WZuv{q5oOGRHm1RwXA!cDObzR82#*E0k%F^;K4F+hk#{nCVOcpY6 zJt`Cmo0~i5t~`D#>2AR1Z>4SK$o3uFJL^PwwfU#&C40M!Le(?{VufLu;pt{<;Zl=| zpP1FKAJ76;jvkQ$t3qCQQzxzK5_9!cmJZ|b_)}h5)(hg%oTiHg@jE@UPq92Yfj)fM z-YTvHVo2+Q?Jn9-1B-6(et9Ym5*@|P?fh->oVe!;@?VTjy|~%#t8e75D&?p~x`ix# zr#-sA*L85g!cM4H=?HRG6!}Hv_iSJ&q^lADDGmgmEYfNR%U&+{dI1cx8?DR{@y>#( zM2g7JB(nV445-=>UssiHE}!+nl0$#ye^aS<=y-qLG>_kVh36LZJl5kowx*^en7{$J zC<8N;=y{a@Iy{RJ;1y_tja0aKcYf0&Fy!6S>|;ZgR?V4ih}-n#ud7Guw{68ZHCYX% zrI9e&R`|06eelI$8t!w1b8Y;DFVNJ`;!^{B^Vw-h9to&cB9F{PB)J z0FADg&=WqbZ{vy#HB2)EYUbj65&UjsT5^D^48UuQtGhyEuGg$V;PH!N;h5O(f@oq4{I*eOe!NZ@X4quFYboi`M3gQ`=HhSn>c=^L& z6V(rgFap-n&q~tIMS{fx76MUv&^*alhmy|=tqMA8)zuA7dlU>VlHN9q01cxWmP4YC z2EzE)+RV)6@)OX5K@Z$JWUjYWB>pyQGfF|cAaED57;}>Qymn-5WHDlyI|OTbf;?!w zV-(eEpgFc*+>l9cWGgU+r-6jHbR8CT9kBr)LkRa=p+E{Iq*ex3r3e`7I%ImN4UX&) zXq&%whhRq%fRN1PMBtX|f<)S~y84YSqhh+`_15?U_oz?1k*9ujhc$l0Jgyj?TSwS7NEFK1`tZ}HuKc*?BvQ|yjT3&EJPV~W$rR-h# zL1V<7ddk@s#UxY1yT{pqM-)Q>X(KOH{3~gyuJ1vB>ha@wI+ECq(5-YI){w=8BucP>yw9CFJmy`EO_RmmfW0=~WrSxeT}H#& zLbC#AXS=#Z)z`9*JqpK?LjgUHSu2rmAXDP8 zVL_Auzl2!<%i`^Wx*IW@jn>~7pk^n6cm~@(@R(+_OX^Jp>d&w8L$f#{X*XrP@&nTR z;#JXsVIKNsGM-tK{X=SsK>}O^V3~!gx#v}K#8U>wR#9Hc>&0qmRvk<}8>d&)`F+2C zX}F}tuy%Q}7KV-edHGH5A@I16s*jczTe#310Om&{zO*|jW~~??247Q`^Nwvc>2eTXEu(4jhSs{$ z0dy7kyA+1WH5#@5zVqJ63ReekEQP2p^|O{orFC!AUjJ zZ`1l3N}B$4O@iSQ>R}@@r|`&g68+y@0(hvm$Cv&?#r{KlrXS8GviF_vA4adB%3I4V!GZ{8^i!I7WI?SkHF~f(vko==QdF)-B6pHsWx(UiQurmZy4mRuj#h1buL8OMN z`xJeK%R&O27G$MdNR+GKGwJ^XQD0yrxOh@HSaBY{PaYp!lqY56BzOmi?f+DqcV}IN z_+My)jlO5+#E~SNY1>je#k=j}-%ep_mu?IH1Yp1+al*Iiv+Mc{B5C>{va%^c?0qXz zl0xuZl9f|o9}^8)fy<;N@GS@t05sFqeF^1R2SdQrz-^=l=LNW#-3W()Jmr?f{?I!~ zMDSwVO3=N7wykh^M6<>)exx~a&)lSlhAp`75u#sg1sXTp42~zHic~rYEpGf7CJPiUao#3@2*gEMag3dQOYN>@t1P{5!jH!vM zmR8O4-+>*hDyIfiY=Zdv@+@5jFu|I$dFVae%7-i;x&iQ6)f~*!=8P}Cj)o>bp)VMP zKsH2OU~w8M>OkTS?f_TlP)jO&$Ef!=l1dJs8JDGRW65tK&mgL8SP^ap+q^588})8- zHiFfot9-9_2rczZ0yF6O3IOz-aCMO%rMdOFXM$I*AOIXu@d`{5CsobTz7TpPZiSzcwqR9vmB zstvGyxqD*3<7jdTn^c~g^!No*)PH~%D(uYG(R()*nf5Igy)>B#7`(Gf2P9O&1y3hBTGi4u!#lYn{c0Gx-gC&Rg! zp8|9d#iuoSSU%igf4jOde{kKGjsx@CY(MZal<#Cob3sQM)Mu34OTpBqRv>OzKk6`1 z0es?VKdW^aB~ab#CVmzmDB0JRG4-^<--nDN8UbDNz?i9GCxAqbDe+mx`rM~+khOuM zt9|4nx*utCEBVUKOKZBO9@@il+QQ9eU`>Zwnsx9N5?9|(fI0K(c$}oihAB}XkA&Vl z`&5GZIn-%I;B3;{Mr2Uok)ad_Ca`TL0yDlG^P1Gq4C0CMZ#YExAvKmbK2KXOZ7WsN zZwMmO_mM+VOD3}!o@MLXYO@4=^}wjxMe_D!E9YuL)d14=Tc*v44XWBK;`Mdi4 z(5~DLk$jZ>5g5;8Ma)tE#2cs`iOS!jBgP_@{94IBSYMRv5UqR@{Em{T$@lerasbie zj~vWrn6bqekfzMv@PmMBcDUIbpW0fXeq2`+hBN9XN%4V0n7*nVyFl1abzpwMGDB$d;go zW;d$q2%*}84~JfW9*+8MX5^Mfu`fMVhi0@@tH=-%Tf(iua?MHz8Q*PDo=8+mu8Mvj za=WrO`cMdU(fxnB;LEcQa@qOPdEanLH-oP}kbgwy=*5K4QZ7a}Uo=Db9(-%{zq?9* zuWX}H06G+NO@==&Dn+2cdz-iad*;{^-AqRuwPPaoDnlCZ99uX*PGY5pgt7!FhXv5c ztt!7y>4G$hBy_CxZjmoQB;kdXfzm_P@nci)5Qx+NlCse(5?rHb92eXv3$z2OOH}7u zJaQ#a^34P+jw|~pBj2{0p6!}EM(JluQBmM*&AfXv7e=((E zQu~1QSZ#CgZl~?*65@a$sqWcWVtB5W=Y8=HcZu+kc3z^D-Jr zaKP!lXAToq>b|nPI4t6KuBXJv6P2NSAdzD=yl7D{Sc(GCVf*|YfWMUb@dj%5C3lyM z+eE0xNlVtv?k{)%>`&{?)Lzo-P;J$61tXb?M5+a?*jO9y3-tRnTDIF zN7tl9b0l>A)4|7;H}Ug%i5$%ODb<+3$;vCqgFAjs$DCr4XE%B-1$rBjy&ZnEz2qLV zQ@t={*Htr6sFym}Q1ut< zTQt9Zr+KLFxsIXUDgYuHM34W8P7LidDWm&~X0wmds1XBmIRGUHyCqBnrE4vvR##X% zEwML%yd)#vmLNZy7XN)J9W#-U&?!RdPXuq2PQIL{+ur+km?E!XwHaLEQA7GB1zp5{ z+bUvcZ1s^bh#LNi|2M9rd<3{s7qHh;8f}H#ue_(-^4>>P@W47-J#u3}Mze;GNB0Z( zkC@dV0*jb+|! zVP4HZvy5u!2KV5AURWrFwE6VMhujngf{gaT*#6L z*xLIHAA-GIr}}@jBq^mA-+w|K#OwYI4Kn`pUn!BEhs%}IiH^s~)GCc_*#~qOGHD;Z z$A9P~}SD40fnV!2hybjyB1$oftx~9wXk-lHZ!iBcj3w?}D#$|LObWCa9wOXhG2<6t< znel$RokISAHjy|+;(tUcN3E3XL^L68EX|xW069TAgIuAF4M<8^Xel>~LpQr<+U|+v z;PbdMruQO0p5%rn#A^vKssXg}*d*V^N8u43Fj-}m1BQWrO;_4FiV6qMvymLf$#uK( zo6p*xAswU<9i6%y(0_$FEzsos1RMZWU_HT$-14OnP8K50&;S|i^{_-2ANS_t0nQGU zhd_E1F=G7-reKKt73A-=0LVfa>csVei^l0@@Pvy($l2bI>5Ya{N$AETRz@Ka%x1>? zp&BpjZU@V79z89Y?H%=z^LLw|UM8X^g~g`_q>xmN7s`>Az0>2XT6=&<-sz7%9XB@9*{5WTq$h&>cCQV6T3A+J1{LH!jUZ_Vx!1nc(_p2R>#~-Z zRpTJyJQ(ZB%y=rtL1IZi3XtZ&1+LPkBVs( zo^w&d?n-e5DxVjq0wmO+pBy@z;f)!SVvzF;`B?n3)6`Fvt5S}Ex#SRCl-{33RB?yt zW|7^ZRPxh@JptCKLGZ*276k(<*pMf9V8r)-w7Bw|gQjqa!CbAVV#+rgHj`O{haGKT z)hAo>!a(MORo~aQ2uJ>yR5%D$AU0fVy~NknK^eeQ*Daa~)C1C%(qo24l=GA6`(;?M z^bt30+>AN1cT>v=)ER)RjVRUqueq83KsW;k(rF-cKDx5E|7QGOEo)fOAO#QbQAp`+ zSQ`Raxi9|MoH07nBlD8b@q_rRB^%2inA>y^e$G)0$s+~3-O|QZDvaeEIe>yf{C^(J z{tvO4`}Xu8nAi-K^WPn+fBo?fUs{y0l^IN02)hFhDIzt1g%7$v+$@~zXgeFO&TSI+ z?s1;7J5GztME^aG;i3>9$=0MMD74*8SSmg7ygg!fNB=121rf$Q-Mp+C78Sm17TUTm z6G$yU&Mq41$6WXQSAw!BY#HxBGMvvzb#Y3 zZAiMlq_J`BD{lj7=3&rU@ z*q`4f{jk6VV@g>JS00m`n!KvCoSCH(LQ$ra55=%Pzt>m6@nIf2a&2Tc|8qDYAY6|M zu}-FfU37>SgN)x0SQRc8Gg&5FMV;EQGA1n-nbB{>(zxI&z{Ne#( zT^a3FQbx#vM~dHQ?Q!DZ|EA9-p6 zHY+nf>1wQVSMacb>pas`(2J=zjMkO2qYeestbHTh{C$k>@*S`gv((AmL`?g{q-b3g zBVo<6dBrrC=KU%ugE!&b8y4i-nH^wKfj5)26iPbL3MMl&QKZgE)bDiiHZvF;aZf>c zF|e-S9wxtxjw*Ld0cCwZP+j!S5g&m-4qBipbPZ!5FcrmBHOY6+2fw* zgV@O7y=%}!A$cEA348L?abzCXyNh5+vbwaAffulLdYm7@pFwTvnyLrEng|cgJk)^Uh5NxF>|ghy z$#x3}Kxa%c^`oh~RNk_Df9o@jX)wbo=e)`;{*jgBYjY6defwV;P}aIG{;Adel{h|k zdFDpG`N}3rlYXaj22t1DCECY7&=quxH(En6U*R-Rc@Bp8~Od=mAmxrU&x_Jnp5Ia z3b23vfnEb?4=380G5DOG>BA=`aA?$SiHg075)*(or3D`E6@v?A=xY9ZjjBAY-Y?gN zg^}f=Gf+IuDB{~K;gv7aTW_b5P{)`*kx>*WN~JH??t5!P+xH%mz%RFke0`+6SS;Eo z?wT8afy2WrxF+FM&xV@$02{n~*Ai6U@ zCi@iV3{jTlf{&{dC+C3rp73ZCZ9^p z)mqO`DTrVfGG@#N#N`#$8ulqU2_f_J6)q5F34e8ODL*Zmdu|s7%RFd3Y zxh|1krzPPAI@)BaB3QxEfuLt~mnSf~)|uZfA2Vd}GW6!iBlYHbF@;34{_I5R=K99U z;~>H6rjhH${b>KfYwo@KyIrDCp{oO@WsinaD_y7+nSCPuUP;K3Q10`TsReuS!rgBJ zgxdJA#~z`G*ofPxz-`C$#|0gl`~f#Y``^vTKp8X>!o$9*9PGx*uzU_j|Lme^;xo*kAavCD*;_M7srXcNoc7Nk!<5LWUtzrL(FQ}oc zX~}0L(K?AwF`Ne(X|?OT8QZN0Bcln-*uKsMSrp95Q4Qd}XcQz>UL7EeJCm%cG2jFv zSmr#WQE%Q+`&WT1HvF>3zelmx7pM{=gc* zBMV(&*Dv)akV4DfsG>=W3MxEyMzHUbd0MsvmQJ#pLKIKYZ7@@I2s0Y}^6R83!?zEz zQ}!ZwnhtB2Y(wQ47Lbj=`m|%aV7%ItpikO zd~BH8#EDh;tC^E~k%k42a_VL|VbO?dCyM}SL=vn>?8u4@Z^jbASNDZB5`J3!Q1}*D z-<4htuNJk583310w;l{k)r6|&)CG>$X=gT)h@^#Y6bfe-tulDn@U#QU{DZAAJ$N5( z0MIXNE|2xJUpKAi{Z|m)OeJysdN+;nEhmgP6B2(WLV5Dzp*q&tmyTg`*{8|P_Wg(i z%2tM5GEw+@hLt&t|;(KmHuuV#a>szG#oK8X_%m%3;3lO4Cw zQ0y0PJ$oH(nM6SmqQK92^p<~p6*&sNN zMo4-s{9TqTpF1b`JQth(@*Abt&1^?4rInlsz{xOi1Q;M)QDFbks>cuwtZ`K@e3%E3slV`|4Hz} z&TXr)C2oM)3wiLfrNk1)EI)l^HfCHfo(7Z=;tc9QV zVV%UCdi%bs_=7y}W#Wm~G@F6A6Q^8cjGmP-fFwNejIRAr7O|jc^5$C;OVCNU0=G!A zEK4+SD6`_kpxkCSGf4zM$cSZVx!vZPpJrNj-}#->dd*J6^R~!tO}u_=V|m=~XwIXz zxO~I_tG%lG*$vn~4VJbgxNgTS9F2i-3pV~H4xemamx;n-ioy04 zfZd2wN%1CWHd#J=4Z=D$=RaUSc|=p!Xvmefg5qru6-b(lE&nNLMiu}`Q%(x}fQv#8 z*PZR_eD4iFV7S{TTsR`QsiE-g0((Ch(zPV5QlN%F;e-#?5LjqkSviFb~zZHg={(5j=K zQ@3$got&h&8f8OmhjR7$NcRQQ&M?Mi9>0RBHR02A=&j-@u+$Vmcw{xA=k||+t*c4~~l0MDBi7sjz%I88#&h-|pGl%%V5s~c_4Y!PwF|tGnPI(EFnBTXll=eEIhW4ET`LAg$)!kR zJ^vl_Q-AHVMOsOl45b|W1X_DKTp9Mi0qu06GbUr}Pi-?{Ll8FbXD`xfr1hsO84{^H zQ*F4San*S+wvjj8o&ow9KZ8#(&|xxvuzaF!Y2J#0CC78n$HcLQ4h^iUiz=TCjSTOFL?`))f51FK@V+vf6MHq7J> z2i7+GJBES%&<7Rvm~$h^?>G6)uIhf`<>zZWZGzpO_qvZd&QfkJZo~txhdi#CQb_!7 zr?fcarYw)VVH}S~uWuuQb(pJ;SHn1fJmY!4+p9dv|Hy<;wh9R25POx}Di?Y8U*mub zhz?e;OU62gI^e}J+=a=Y-iaUVHNf8llTWRm*nj8KY7~F@J7f=Ue5GR61dN6!5GgU? zm*7U|EPmO^iM4c((TZRFMh_L-J`l1L*hELK;e^(qf6P1z3xjpx)~ai* zPwmLo%>G98^QyxfM**%2^RDtac!N_UX5IE~UaHyR1;ozzrk2;%(-WLf_mqGLeEUkr zZstXoPN`>*%tb*{c}YPu$ZN&lXo&lNt@^g_#rCSML8vo3{(t16>o*>6K#-*cGyR zq>1b*?i+2gsV?+=h}IC&>k6(Qma3|j?)?JW;1cBi#h2|nnm=f~E^>2P)fYjWs5!{0 zof3*?VpFs4LSyRi>iyHHWgIc$QvF1-R(zjIJHCkYt@iQNYQQ(*no~Ln6#?*>J-7=TDH5UZ*H(t$c!4+MEaZ06&S}=7B5{3n)>3LGV_!Hu&uG$QN!taTrixphsnx}Ew*#~*n@V{h(+zmPh ziG*gVy4SWGekNujr^ni9KbR$b;lLQgf3-s{2UWjm$8qe?K32#2^d1jLez(T#O=10k zs|C>%?hS6Q_Waa5C!3P7sYvqVib|#^4>Ov|lk}y)nHJ=YgZF>8P_cfCOZvV54I_LHpcnAiN^Bat}AW#tkH+ju+Ey7v_3B1Oui0k1$^13 zYzx9)(fhI`8eTIp^thV!N8q#>=%4@-t)apJvtPYkhbM zwFpVu-@S5$-ZR|15BO$uw_>(3&uv)6e%OqD($`LFAnMZshxc5Gw34O z(@{*dK^x;}@$;0$Vab4;D17!~+P5mTCfuvByPi`{CWH4vuvC^@mZ*q7R&|%v>WFIhz?j=W~f+ z54n8Q{Q;*;!}1YofSZ6`p*=1lN!JS%OAPVHI9|M_t7O4h3WK?Q_)BI-gqEEy-m%qRrEL2g)&7 zhmCXx&~S0aA*9|?-RX~96U@e_;2#JKAaUO-9f_9?ILIWz{q)n6@9kXh+ibZuF`hU- zfURhhxsMc2el2&$MiU`ooDElM*QpbzbJd+DSQc0RDmrr4)7GJti!L-TlIJ}--os}S zFgZhnCrYJ#cWB64qPpuqjYvx7D$sD9WDsZD!Ed>d8XKDouA7~o4MSu(h7Zvpt$8D> z{n8F-=T%Rz_>NLiuICZT4t7p9`%Vv-&|d{9E``C*IRXp-g(&IN7>`&JbgTE zj0yX$r`@%zq3@}U^*izjd@+2*puFY^0#(<2$n81ubt|6=;7-zZ64RA8z*qqT@2FYt}!I7n>o6}+M|TxqY0O}68hXqTI(O5nCR8+myzQHs2dtwyq$}+i3nM! zKTKbyZV~E>O~P?fsv=XUO>%COU0kf?hX|Lno|)Gjuy@|wS)&2oahv}D)cNF-0G$9 zR{1OY3qM=$A1960jo-A~Tt12?A^hm^+iAEwNONU(+?n@4i;q-s&1aQc@Q$KnbiCti zW#9L1>(hLmcR4vucN_W5juaZT3!6+)>d9qlQO^tQU3H_Ze-Jddx_CB_=46P4B<%#S0*J#q&O_PIggdA~;3Ra}xTc&?RLWJ=xyt1fx15uMorHd^tS@0XQE~5= z&3fkiJ=5QUC3einwlVXuzW`np4YDs`ljUwR-grZ12B#jQRR++5)- z*WF)^g~U=b1BNsYAfLUNYz|a_pRr6@t*m|1Em!lVY9GVp!reRZ5%i{C{)a>9ep`3p z&Nc3sH&NhWOL%(gOKD{4bqA%Qp!@V4p@?+z)OQ(JIQlMfRL$4i=Ob*yciJt)a#<*! z=bV*f*I>K}RfddJq{nH^$FJZuGZCuT`UaNoJ_;Q6W-ar!h}cl@F!QRq_en|b-iTDc zact%>J_`wEw)j=uiw&x3gAc6wOoqMh540nplqKpvm5K%tJOhg8H+F7LsjpA6c2W`> zFBr~uI;^Z_z6qW!U(Q%vQJ+}7N;!Ib#w0xwzUU!DeAA|I72KL^h|$#qhgoAjz{7R} zz2tHi6By|8v+U%zx#lt;COvtsAZF>D=RbnAM;0aSLMGC=L-UO>`YF6#S-H&kjXu*Z zqo(G&GedN%8$`Km(KKNbHYX`^#yi(4FGB?|bU7Cd^+kr@%L20#mLf20o5GyUF~#u1 z@abog(_N&tID5LV+Ee5wC^pXf4Exs!jwyC`=*TCerJ7POGC#o&Oww&Kf#Z6DwFh4GlmdEG`}F7!LMMZ8{AZ z4}P%ISXJ?9lQnbjrL*(cfDb4-^=|xWoexcJB<0BbI2-z2U&^3+kES`KlWnpdZEQZ5 z#Z!0erL^tlrY$-0WkbR&Ct#_OYRcSxM2!Y9NR~=Gms|e=dwWQ-^Gh0zJ!3wBNb}Z#~n<|+z`H2gT?ZpLU^;PH26#A*IuW!<1DiXySM9o zN-m*MawmatjL%D5lX(;q@ZxBr3Ran&f!Qm25M9xHo`iE~#4h4Pr#f)PcWFNDr`k;C zgbXS@eGfEWI4Y>i^k; zb&+6RI3<*LV|is1QPGU*7sXvXC(Gt8HaS}bkK$^9lOwm|^w^>eb3*bXxI``W)Dv*G zj|E9IE~tM}?ES1W>()}YC*N0|7~LtZqjZTyGAWPHlh@_7a?l*#{Prtp#r)fQL$9RF zzApk-T`9em2PyoJFkMW(qh^ciEzygMwK;wXZT@J>C*D8LRTUg(1t#+>tyD!tQd<+CbcQ}wtRBI7^+6Zk*93RjnrLP@hLoHSy(@{c4*o@|I2lyF z)nIa==%_)_PF|ej93#I^jGVBLk3@jj+ySkPSEiKtsQ10TXYM+`$!DNaQvgRRHeH+O z&{A%f={|papiW#Om{jtQ5-Q_EBHjLBjY{Ngad2s8%^gV5v-iwa@SbW)mTD?yv(ogm zf#!(>Ywe>KqszyjsxHH(Hf!Cmw+R+_>|1}jo!SiOV}p{my1Gm*DBWS}`iN4UU~DtU zO{Ea@=s@#AS$?mkhrc@zo>|_x?be3y+)2DWv|#DpJD5EVU~N;5F94AQ zj$aqQ4(@ECL9p09D3d&vKDKH^*e{seNAOGQjE~fz%`n`3EyLQ&^UD|#%wD9~dV(FL z?T};A`9qfM_=(i(dvO_V%AssRT1a#sHSccLcwXJn(Tm_aifp+~>PXra&Z>1iSx$~D z-ddVCUcoxNU{Ya}5rV|s%Ws#GD}4SL$d%$F^^R7q30%bp^DL2BJ|25%=_ zVhaIdD{?l==zJ4QU*VYFlC+CqnUf=rBJku z{Bu$L9ggHmehb5>?317_aw@Ez+W+2HYHYK*AcuZAZ zPO%o@?eatEX$i)wx420h`HWj8@}b-DT?0k09Paq2hBoZ zu+29KG6D_ok45niMiSFWgeI#x{5+zS4OQIz%tznFel6`Ylg<6``TH|}oVb#51Al=Xar_DY zHKSfetALP`mQmTw@>i;B^jceQ%EIlEn*7K_g0|wkq~%MHZDX=ZbkYwN^&OTTV3!sDoK>4T{{Gm&jx^%aE7_D`NZ3dD0kh)5m5*L_MfOj+GHK>VRr z?W2%P@sZo-e(@)W5ZuUCdADKbUx`iK2r zEo~W^aacWGK#Q!efNqP%Ne5J59;?ed&v+W7EMUh@P)rmZz@{`vd!I|P3 z4nz{N+ANIqEx0G4pbQmuByqavnVaQgV{76gT@N^#C!`Oqi0s3sPupW^F~wt%9V{Io(uFBj4wPadoQ`v3>aRstJ?gny@HZbI38{ys5D3M8holG0 z(U@ZePG(H`?d*ZW<50bDq6;R$f2(xBzqbz4mWx0ALI4UIvoP` zmcw5^R|nYk@bfKby{MeqtILghcQCPmjm;W8I6h6}ERuWHh*xT^k5E)r6?hqV4!DZ% zlKj;?OC^KV`Fn7`{b(T4sG{C=)jWHXl$~LnJ(7nyRktk`c)2X?;;im zqK{Fl0B&X!x5;X_9yytEs2FLE4*fZGLhk)S_adBHHq79$dJK0irji~lrEjEN`h(A! zm0^JvLYHPe^&}2$m@h^*^P-}P{rcd8Ix62t@uW2hFX^IOj$6vvGA1asNQ(~#hf#yU zx^getl5d`Mj9%>8AwZhoKkF%6E$yBa(q&A2w}X1Pa#3~Fd*AL9DZ#Q*q?<}%JKSt_0UZ}LDJ^Q>~PPqDcD9rEUqD6T; zb6Fr?yJLwv6cj#7r8EB;jD2ssAN)ij=ME6a;6EzWogD!vah=9liH*K27F6+F4>x@m z!)!?&3@NI8EI+{{pnlxDSi#8oNaBs9^XGNBjGXg{!3h`9;i-8Yi7Wgw5t+lc>-o@<3QICbZq~9|CSQBp1>;nhUv4r8frsK1}Y3QO3DElRO zv)@{I_`-CNe%)#Rj9t(v(Lh~8YW*H=&3uj5NcaeEH2~V)8L3gVAA=Z<53Zf+7P^g7 z&tecJT`pOsgx1Va9j^(_t25C^VaIF9j+^Cf=TE+rK22pLjVkVNruFo!ulO{a>Bu=y zDpadDugMc){q4e?r#VAnBsk)tRl91xpf~qpV;omBX5Quj|8Glh|Rm`6$l+w(Sr^h+!mVQuGv?KvTj zL)iL#ZASoQ{nZSXW=yP2k3iOTYa^5XXW~=)guO;s3kEX>Nw4}HNE|tk7e<&W18sUr z-o(P1Ej;1C&_SoGA?%B)>#h!(-$Mr>IOo!C78VRJ_$QnhN=iYxA;D0BI__aa@0IiS zFZi8BgjB=-6LrD5+n>opvQWjqmO<73*+4eq-W&k}A=Sgg z^d=a?w?r2sKp{tjsG+k@0d%f-4ohthk2TsHt0f_zfZ_mBFz z=#qr9GJ(DqjdHPD4#~OORH&fN~>4P1t?_ zU9ncjBMMS{np> zCLpzSJ`6}Y&pv1pm=@$cTZf5@WZsMOpM)EQrS(!fstpt67FfUt)yj}5G9xb9y2

    ?x)UZ!P`>SF^|5-6oy%kS8NkO-yYT?RjX@l9{d03*&+)M?@NulA$9 zl^dwwrI-RMGvA8SF=f2ao71dh8*@yZuVCFPpk6o|AZvyfwoL{&sC~tyw9bR~KQkl{6ZAo{Qyph9qITjy8tva%P%ONjP(FG*- zrG4S2dzXN$9kvKy2!GVp(E=jqhPGRy1+@x;#u;M0+fG5Z#Ee&L$}2h&s<(Gto9!*{ z{2`P{;*?asNrftC9R;0?DRWm#CzU?%+IUzA+XrkTO|_J9Cf%^Kzw~@EPa7)SC(~ zlE>GpXgB9Gs$5vZY(O?2kvpF$EY(E<;$&4yg&`t;5AI<-Q!t0;b6w0VcV*r+J2c*z4pps}Kqwu^ zzRhTMYqjX{R;WnUQ=<<|DAAPv$WA|MD1je?Q`D55mOCPhLCK|s1&Qikr3l4j_TW+;*F zQfg3Ix?zan+ym~t{hjB$=RD^ld|=(&ab5qq)>@79phx|t8F`*lP|@oHpww87J zvIvryKtYBgL4+#69kfR?)Gc++`R0c^ex*$N;4}@}*@q zYIA|=mlR=x)$S8+L_mVEmcl`-Pq*}UO^IHo%dC$|r{HWtkA}J+%PVH-r6~1Ww;yNc z$Y9FPyNhYW(_icX!Ax8u>flyvE*c7r8BdoBO*B{D8>_#(0!ktFm66;{`(O8CRkp(v zYS{K_LN{Vum+JtyST}X|noZ-=sV}*P2=3>J5}UM&kL*{)wt%Nh+*6K1g?r6;d7YrT zkoB|Fh22}gJi`Jy3n1y$#5ShQa}-P7T|WkD^5aA~pHZ*7(eoV2@o|+kvHFm-`VJ5b zBqB90o+*3z9*c)?(@9vDQ`PQQ&EBJ$S9SVGfO|^*!_bkNG)8#5`D+4=OYQOm-z$wI z{8hCM@j{oilHMH6wht2nGBHM$9Z8Bs%Ph z7q#Q&3G%cyfQ`gl3<6$YtD1O(Xnpf)H=5DWKmUas^?vqTR3-8|W;l9o{KmE1RN>}a zG;py8$W<7Ckd}(jKa~aou4nK!3B|JTwKTwbz1(RFI3}lL^xOFO@apUPw~T;&naRpM zN14{%pPRJ*S(EYCh>`y@YXbiGrS#A52a6#yVn8nz7mJsCvZ9=CIVC)?kfy73G^MCn zqvgQgsN+{CS`0fN))+NY*nOB0U#RVv&T9D8ub863@dsUDX;L^PXYx1ZDrf@Rh=qWx z7fLGr#(OoH|GgR}T%gvylL|K04tkVrXdc8yeX2t;xUvZ>-zEvI`d0j%MgXXJ-vuG6 z6E^+BJ!*2v+TZ(L7mWF($8TfFiuSl#th>(zm{047BXnBNplo>8 zOe3dZC0>g;>Q{x4Azo0-XFKD=Kafk{+hetJ_V5F?p=S7K&mvXi!jRi&2e;J5Gqh(= z791~^&ckLt*zhwcjB{7X@nsd_Xt&2o?by0eTcWnE3dHD=)>~?tN(65x1Q<+{%4Vo+ z1|6(Zkxma0>jJW~Xk53(+xwu$XX1#t6cqdLAphuSzPh@Bt6&HDqHDAupcf~KIDo}| zBEuKdM6fbTejXAG{Esl24&v#4Ga4BQdEBx%qC}7zqsmlNC|ROjh4Bh588d`1_v?P! zlY7T}PnSps++^RuJE3ozwo6P>LukTH%-(H(FVOk!Es!#tEJ4bmTA@8|2krku=^xwZ zTRr>&i2ZK%16rT0Jh$2q3|>&rRxG+J^!3KLg9zQyAx7lFW$LL-$IJ--E}h914dB~$WW5!Pl5F9QGG3@bxupTi=Cgz) zP7D77QO=b%~ov{hv*3<3|7R!?f^HW~&<+W(Y@}Vc=GH2qR?eo~1flBmcCWpeTuL6A)HFeDN@jq0^hV90N=6w;SmR*_!J=twtzjgx3xB1`;RV65qH_G5+3!x#9ZysDV-l4Y5Q1 zh((f?=xf7<7~ow<%%3Uu+%^BF7;b5Dwm?O>|9h(xUA@o(JGyK=Rstem|7nfxURSUC zpKxQ({pZd1*5YaZ3km;fg-bIu8%<{1_#>Psqfe2#Iqhn43UL(_ibYt`(aO!ht(<|AT<_bm*bK}YY^;(pQ+94!Q0LolvgPh`wFcQK zsYR;4Og+CN+lZW;v$0fx<;Bp;k6~fF{To--um3G(k%$PphczCDZ}=61&)&<*0nphx zDooA$grZ?v`5;$f4Z>&b_GDAcd1^~FUrMZrmlqHh7E6xy^rxyTt$DOnP4f*5t7R9-Q30H>sgd*zgi^iY3k4@5(JLL8P`GZzi_En&JwklEyYZlx%t)t>|56 z^g{vJ5k`~*8uhn~q{Lo$9IrdL1M1i|%>%A3(zbbF17H;^2GQ!qE^MB|I}*AGvP`SA zT7f11yCpX^&TEn~;9QkYb4gixu4*Ug8XKsADGzbLujJszacU&vlp#P($`us>;S=^^ zgJa4!Gb^3v*biQYcS;(X`@40yg`Oa%&hTB2#80yBT-DNyFF-9FV(BMnBFN{i5Ao*p zf6zhVP}+$=g&qbQS_XZm6U)!iFig1A1Tu%08aRJ!n&1A)niEC+w(@D+q>%*=6Ts#W z96GEM$r3*i-r27|mVBlh-zxkhm`N&mv75dJkpy(d-?0utBKh5bm4LRc=HoYn;EVP< zzJ}KTvEhO~_fogF5hZJf*$Dv_qSA0A&rfj*V!dH`v+o|!MO!@$3=uXIdyYnBGk?Wb z{F+SW1>Tf}cbTGXUvnxHo-Y#@IlX4T!kh@`5uopHU;(;W({6z$pCoSjw+)7ViMeml z$%*qpExwsq%xm>_sVKl8(oC}9EF>UKR$+OE$o{G(XSmfF2xfcN37&iN#qESImX)d@ zE1qkfz8`EkRbP^$vReh#r7;lXKyXh@a1=oXm*&s~bAukB8Fn)1719e)lz~XUXC2BQBd97D?Ql za{V`c{Cg#U?194G8Kc(z`1*&~kXw%^v5D3pZK~-u$EZL&W-CglM$x_JM)rb&omKD) za#GbuHmIOn{r(Hu1R?wM_9`eOMEG&a>ha1_fgZ=PVej!-`eCXsK@3l%Q+No;KVY=) z4=~yVoNN2HdHLqny&sVWJw!0gOH%xwk&yqB9v4fTH(Xg+d8r5_0>?J_6*5$I{?4)J z1w6o@*J_%++WzR8Ey2H;l7DD7P@?}0owVLi@~~%9uNDM8Ay0%l=!*zyIol|O3~>0g zlpY9FqKv*4Ch6;2^Qn;InU)b2HAZ&eb(xcsoWMyel&7);ZpS{PimpE~^r&TONPCQ_ zWRuIR4yoI;aC_+Kpmx9 zq=l;p@?v<3&}i9uwu83vB0QB_jC3dCmL=;;FHISXiVuk&MIykx+I;B*y=f28@9b(| zRJF@ryYYi8wW>!KCy>oqTe^k|r`^IJ6RS*N2QGMo-J%R$*~zPkCR%Am z%`r>;C}@A}-HX=oQP2* zWC5t?=lm_!yUQJ|-$BnaQ3{Ga3sfDO&A(@2z&V;@-Q!W2O&q zelR~~Wr{!_0LV7z_;|X;f%uwGx{N;CmK&gl3gy+_1AH;k+RrdoY`+$Pi8@W&>Bqa* zdecUiAvH&3neOv+ha3Z^fLs`ThY-M&(hAPPHI?3Gk=81yokDo21dKZ zdu6Y$$}PWRD_UsK-51vfc)vmPt94=ex`xG|zKV1=6M)+o94oK9r1->?is-`&iQ6dNHuH)=2Oh&FAA2rZ|-GAgv z@A{~Ost^0T47D}E!=E{MUiK^PFdd%`i_PhknY80hMfxc(T+yke1JM0WZO#6$V(n=F zA4Y05QeY`AxtY#?N(%Rxi{rbyNQ@=oSlBYeed{NbVvLpK%fnZuoWBE^5NouJK9E24 zAl(UP`Bzc(s#vW*i_5)IoKX-%7&&n=XVlt_c{Px)#&y8FJq0u7p~xg=v5z!yG*&#N z#COaeJ-^kBU8|gdpArYJH%N{kV<-Gf6SRVjS?QUi^I-a(XSm_oDc_c~=QNq)!x5n| z_zw{1mrVS&_S>jI(;7u1;IzsC8_X`+`(DEdKo2sMzL9=pdesXckYq22r--yBK`}7{ zU8A$VXElA30tizXO-CfKs^Q|1);RAkI6fLx8}-O1FfVC}^~fr-!{rvZm*IjHC-ZMA~P zYHa{o8*nOYeTpo2E&~aQ7-yxS&A!Q8-Fzl%it=*}Pm~!Ad%yES7SeQ)p=MS4BX`>C z>~OqjHTqRXSodk}?9P|(sA_&0=wjMH|KQJeb7%U>UUNpGhb&>o-qDq@1*H0_)#-8zs65~}6 zZfuY`y_>Jd*gb5?Z^k=29k_cP7A!@`r_`#?r8ZVnj5Se8O(T1-*%A$(J=2%}rfdij z8V1zlfHb4ny!oRXt*myey=II_i4wqgiPsN#FFnpHn5Cyn)mbvOaCisU5#ieVJ+zR^ z<8Rv&A%EwK5U7YjA=iUErpXcx8{tH=mfb1oA#2DVSsCm}1 z`e{OsieFeSZv1%3%S0}8FqEz~g8*_(WGjTm@39ER(BhNV>A3ee05J5&_ zCQ6wUtB>P>46_Zirm1vB97)PHePhzIqUD*>94xsZ zdZ4!?r?*6mV1}8sG7cC&zit{a4NJRoxCP!s%YifXyhq6)7N7xoIC9SP2#B;f*ZHPi zmQKe5YZFFkWpfk6L0Jrr8KcMXJKSC{NW)609oyl08&!iNHeSjR&gDeJK<1aVX@YC{i9S(q+{IfctPg zCcnX%t(%(@J$Clob zvHYET6Bj}7$JY>mEo4vv`Rs0-!KZEM(i)1-n{pd>sH)!9!toBE3vx)}0!DqqjtPh8 zu-O2|JqpT_Z7Ss#bn6*$+8V?UFfy6+1bjB4<0T5a5)goO1%=zzuy`?*?O$&e=9BqjcCPXURA(nFI`T+dYw_|bGBvjYMXiSTWRDrThT=%OIOJRyNUZW z_w&r}gwOCck_^sl{9lj1bZfGo9Z(&(V*!ZOG_eBGAEUWw#*GxV(g)csn1*m}<&G^z z%uY8V6D5vq8`P`##kHtvRjguvJxx6i!N@WBgG<~7s&{3%-4?4PF6HL=PH^n2gXr(k zw*6pq5_1oUE5G~vZuz1`yzaXnI*}kf+FX%W3+tjS9P}}{(W({oN9^5(BvoR+^pizE zM9HALx^oFpKE*{$%h$VF@)c${sEc>_1KI3&9v>`j6P=RZz4eG#mwykY>=Mq4k#552uJ3B+fFzfp`H1 zOF>^v$D}DvKhSnBND5&O)f>y7c=}X|*=8%DKc`)PSPYn)uG(RF(~N_~4a+qMpDm&Izg`eS`=%<)0Q?j9Y)dtvHE+Uha%;G)i%WR?tB-dlnmQcWE7!`}KRD)TXW+ttrjI6ZI)X(|y6(##X4)}0L z;UnR1hFF0whoY!3hPZpEC`N^M4~y2&atU(ZjM@WgES9;Xgu2;1bk$49&Hv~nzZ0A2 zI1ilA@CLZtv-WDcv&*PE&=me(`i5sf=L1CPP|; zjR${U7&S2=dTIS#>Z3VHPLzzm?8^jY`;xArLKZH}y`hHYaGN^m@jmts0#tf!s4Oz0Ubh z^WF(eYHUdICw!MA7yc&&4|;E!NiBW)%C9Sow5{q^nv~aGV9m{rlj^+e)_S>~DF@}o zQDc!+%T#MnJy)@O*U=~?8-2DQ@Xn+>tzQ=17K&D_OU+RS%IG$69#!BMk<{W>$_-)^ z#DQ`w*t)i!$-1Ck#3fNCT1Irr`hx~rhQDErE0c2kHJKGI#_+c~wESCKmymqBk0N?m zYxgA%(Qr=V${(a#*dcx`j$#Cz(HGY8;dhSyvT#~>N?P@#V;JP?d#wFG4g}4CL`Yy~W)Q-HipTs>Xu;dHJ_n2LB}D)pm5rpyi5 zy6<^7<Go=h>I<0EEd5n| zm#GpYpi9ISe|gjR75W!_f}3D0Kl!5jm)nkF0Jx7NJ8G8o&ALk>JX+`=X&4w18T=*Q zxZ5hq=`_Dq%ne>SL(e6xUd<)By(Eyg4dk&w*i$5%GzAk*3ynd)>DbKOrL1(HbnW+r zK`bMWINX~`%JMs6r94T9!{vwb>s2zp-^tV~HEQFgG5wbe^*2dT#7^ViA=Oqa9=>(y z6M%)|u@WTkmWct&cR_^Fh~$FKM1|2WQ0zcs!b@R$E-B1CSxztMbJ_BnL#3+`g4pdt z4$TUUb*&5oCpxPR+Z7*kVsN?> z1h)_e1|aYXnL48ua#)J=yP25n3qsnsO_NW0NYVP3je_h@X+5L%Y`hEH<6V2+!ky-f z;#Xt^3z~S(p+l>W1Re^<-vDmxGUWbkrCWaDu$)%l-OiyAJ_JX6hlPs}uaj@#hALgM zBdoKHXp?ZgVt1|D$h@M`9ht7rwCuq?e;c#61t*#nvzcq6BWChmKY0M?w*8J&ZWd!6 z*aERpkKf_CEX$Pb-ilsL1aKh5ajSeZ{l(Lligz_gTcXe@Y(Wk-+Ta%ss_ZWD>os(f zvCNeOac#u-w0z;t6Qw5ye4f?2x3de3II;)ngv$VSECkes$g0Y}J zs;v18x*AwaD(;1mNqE~&1QzvF9Xie`&)}wKT=GWC>!L$qr%A%&VgZ=5NpE@;v&tQR z+TCTfyyYn>;j+|~5AjSNJK5-%ZxR~8o6RVV_t%H=uaUGi4OVU^Cwd*cx^W;FXyx>M zcn1??juq-X214vC7?gnOfKPm&N32B5>luQ4M-zIW0|qSM8`WRkr=pDS76xkhe0IwN zw0l$F95cZZP?$!z=M48n&Yk=%YN7%P1RaGNd7fWao~oF^Cxd|qB%w!$Zs~(oK#}7= zKf{|X$sn48lh@F+*8~KXjj*oPl$jVG25fm=WlzpZoRV?}1=YIiA8TRr3 literal 0 HcmV?d00001 diff --git a/download/sampleExcel_2.png b/download/sampleExcel_2.png new file mode 100644 index 0000000000000000000000000000000000000000..7ed72beaefc4a62c163c7643539a80332f27e3a2 GIT binary patch literal 20489 zcmcG$WmFzp)-H;>LvRQV!8H&xNN{&|3GQxz;2JzQ3GVI|+}+*XJ@_fe?(W_F^*QH` zd&m6;uT`z8Rck&rSFo(K2qGLV90&*qqL`?VJO~KbPY@7La2PP)6}^EoI}i{e5HTTs zMVFMLX6rD#zPMGK6ieDB=kJd;JJrxr#c}+>C6h{5al0D za?991%2&=4=I~Hk<7n;R9meTdCe$RyX5hVHVhKGLcT%Y@RFbk_vi(|r{ZZR4BX0C9 zj5er>WTU#qQp$2=^C^OjlLq;_6Tzp@L60aLF$y)w^G70>E?wcyP%MUZBO&2)aN8}Z*gn{Uw&#bsCiC90WEg4N8H*@=PTODTQ!|3y#TbSSq2>sua2!iLN2+w|m3jBiMBtLs3@D(CK_y$+te+V4)(IHY8-!fT=d?D=*7Vl89 z;75mgU{Y)!hdIZu!4O4&-Pu0T zO6?mOqFpw_OJ7J1wP&fXPh4CatZqaVMR(?#E`RtaTpcU^Jqt=NJ{WRcD4GRz4^5&_ z`wA(%y(^duHK$VF(;(RzVmx!2a8$9tJeJ0lXiV!H&qJVaxu_28#`X!O$}yj})8d`S z&isM-X7#~Aa@N*KY4Y^6b1UmnDHP1(wo zEO2kmd=OQflZBF`I;LV-2GG0PGw4fqW4=bGQ<_iXwtGdWCK$<2u}W!3xnJ@5eYqf( zj&A-nUT_;In^&BZs?$YKK)k0G?7p&`QLR83({nX;xM*&Dj0^b=I7cqJy0j;jz(3c| z!>Xm(b*sDFe`cht6dR8VS2e`l98MgEWOCe2jg}sJW2&Cy)Y8Q~clc?%VMt~~eA_Qd zJ9`FY(u}{LM1G=6)*O%DO;nZd1pncE`N z=JPk611NCS%H@eaYQmb|35se-t0!M4R-{dT4OqZC7sE*-6&6^48K)7rb$v8_z%9E} zc23^bS-^8#8VGc~izFiWwXuD3bJKeyaTk1d!G6=b9DY$Bn!K5BU}&AboWplMiCu~C z^TfTezr6+j_T1$-m7)n!WbL&&{P;cJTma4$@_sCp+IWmVXwYR3dUASdgkPT>d-R8BCKylLz11%_Dg{yhG}aWl>+zcu zMwFc|(tf*r>ekaU&U-IYrS$H2U!$i3UCgzjAt^vUY>4Iq&O*3R?t{RO>3*x(#%tk1 z4qSQ+vtp9+_YX06YM;SJ$(q&U`V)mnqC{zBFvw4F^W-7;fB$%_=Gl(sl2EcA+;g70 zjA>{R5OtgK(2fT#NAXnE#btAZtZIcgGW!E684S_#L%rtv*X2rb+doenPja2GpYa&v zPDLVrb2vJHoduNzF6Vfm8ykC?>Qc_HQ%egw2opzc@(2+O-%j`&J+Nq`rC0EO^VtdR zzy#qZ5vH0n3#0>Qj)n10a++l(5>>Tf`)ipyweVW+tVC6|Sx?L*rA2&QN4>sC-RI}$ z=ZRah;+FHS{t{e7w(TUC)1ilDzvYJp?~ggpJkH z^3==PxK77KuXP`PD83g^SIW#8zmhuk1nb5%*J?xlc0m-x6Ey5CoXh}SrzQr}DzBnF1`B_;~UVpL${ zoRimun8WxczlPPaN=+0*CvTAq@J|WTQf*ywiR~VGIvDM zudEhJHWJxReJeChD>|V*7w+nC2{ZoorOb^#x47T-FF)muwjxW^PhhRwm&J0WHlF{W zSxVG@cK5z#*cA;mP8Eg_HVGk>Zy*)k(5)L=6@;Yj7Y-$FIl_u_*!n0X8 zCp2-OaAs511sAtYSZr0Wavz!sZ?B8ltqDChz*K}>VX9dfWRg0Xlu?@A0n5A$Hj`!o z*g4H@)>Nl*xoMFr7@n0-cD9OWhTVRJWI<96KWb%v;ufqyu~9%dg*fj-m33KGK}{0W zoe{R|`Gw(-EY}qZClAdWjcTp#QGm+^_sw8puRJ1X(zr_}@e=DE@ok?=MKSA4O>72i`aHD#WsmMEf( znUYX^@uB{b}t#8P0y=c!G#t~HR3b|~NTbi~g3Y-AU#RZdh! zGeH+NN-?`?68WL5{m$ZHLe^u*QW@gH)Z?U?c2+lba~YXdqhI??X7ZEfx{j{C73AZy zTfbJ6*cgMX>#WlU#Q6ZT1a)=#182e4Um^I&WNVdWb*>7k^urv&AF}3e zl5zG=!`Y2s#>KT8uU}ua|E=(vTPyS%6c;V@oQNiG%Cd28*VEvZ|}&(7S@>LHfZ^c2S)7nvtZ%Y!ucV{_W~f>fuiQ4GXN+T+Bvhq`JS| z+HT7_ZGY49m|NRjbVNyT@QS4nu(@i2p!*E^@Zc+3ke}?XmxL;v2TNAYRvcnkB{Ve4 z)t*?y*rk5=;^m=1T+2cA?F1)e_ug(dHd^}a{5WOH{4{@cHDGT*Fuluzd?@MTOtkS8 zSm6iyY&xdo`?XbRXzWte4;(X7rlpr!4o=-b((UJ&gw>)_7rF=oJF?|D6aKN-^WgT7 z|5@k5gz_vVq+>k_vcO7z1)j&GNT-!*3DITa^QO(I8ht8hM7~ONM&0xODf+h88T*4T z5s&|UR`ZSK<2{xP*J)a9t<3y{Nt2_=hSZN~Z6?l(BaVaJ_?7zjsz}3o_LSjIrtP-j z_q!E4Sv*(%Y5^M=&5?}dVBF^71cD6WB>`he8@23#Eye5E)Se_u$Ltw1i>2J7N^T{f zNI45){37eACkhakX1a}<)+Q(?3!oN>&6MZFjf7Ofo4ag3(VZP_`;j}7`4R>ipq+AA zal;b_;pO7qfA1uZ{h=;S&R#K{B-uw9PpK|wQ^VWzG$l`6UkTBij^bGo@uhG-3dGYw z1wUxO#u(N4_Fi&v%HQWRD)jb%Ea%~dZbu|z>eYjbM5&#lZ$mSG2KZ4kYKHfb005<3 z*2^G2-U+!?D@l^0!)|1%;s<1Vm;eQ2x>S>Z2*a$J7jCrHi5SL?DkJ$VfuDdB4So#@ zP!UA=K+}d~Qe57*!%1^rIQV4~zku*|dA|Mi!8PZLqy9T+ z6Z<(*<@VV{au)of*~FdhK9QqQ;@uZ=0I^5hWYyw%FqAB^qH8taUC}BM5$=fVOZa*q z?O!|F3&|>0_!=&c2nPqiFKv_>$p}+ozEBXNr4DY0!VDA9CZwX>0w~ zP2BrOiB`SK69xa<*Gdj9j&-(uVzszP>R06HX*DEzyN`D;xO4OR?aSmcD=&*}J0s!e zi`b@?cOvvCuV*Fa!{-$GU+4~Sp`(A#F?fl2KTSNJHV31R2&=XQo1{gQvA$df{@zDrI{&fQ9)_Qd1o)JupZ$?)d3F{X9(RpgO|os|^@J9|tr z%`$v%LA-phkXf=MT1Bc1nFB+1ygV!}k5b=)_d?=Dh!&E6)S*u44YLS=Hk0_RA66p* zPi^I(ZX0)fERwlxA?nN7>;5|gP;x&$8tT6@Hd-ruTcp{%k*r2F$!ZyH5qoexT$D2f z=X>U_I7M5eD2H)y4ma>Mv@1U92X!1vTZ=SC*LlSTiBTb~7@B9eLRfpjlS(1ge06co z(c&v>s{PX9>uR6fG1IO{`Z89RzfCcx{zQ0VnECSD7NKMB#p{6*U|5~ped7AI`ZO*$ zNm|q)Tau&lsaY!31)KJQ87%cxWK--l#kf-dO-M>bq)HC1P{OQ)hgbVGau3rTLsJ+3 z%}fXwoG_j?`%25z_E4s^CI_UK>&|p|dPHu^I-a98Fs$PDQb~T2rFOJ}G5*y*-4T(Z z)8a%kb|gpSy|kB>^CT)wpYbjJ6i9%_wt7gnx!6NaqA7<1ccoXf;~z8pv^%<0V{m9_ zbGyatMg7+>!s|F|xd9<0dvd9JJqH z_gb_6CTFHu_;gL)ube14wMzFJAe53;?<%usjWtqB{HBh=IrC>?VCSS4 zl;KzV=P_v|OUkNW&JqSMZR#NrJ~_^DW93Svr|-A<%nZXwcr`oL(8Sdd7vC~zPdCZ+ znIFm$SAtPJl6PUed}77!l5Zdiz~Vo|ypxb1xR(IR%|h6|lzsCW(K9oLSlu6w@_Ic1yJE(va;tcIZHkwHv zLy_R-eYe&VFHl(Yh}2&mXdK6Y?xC-I_@R7nzXIb7W96niGT?Vpnovg8fO-7@)fbch zBCLQHZRh6X{Cqh^=iL`@WLv&YRA<>|PlLZ7V-1!2&oN$=l)tUv|6CIAebCBwB>Z5Qyaf?Q`E(j>4~p;}2OsZ170sl( z&2)4PT|vw5)-~lXbpNc&$5_raUYLZbWg6H!`nqrhzk4XWbuxPVT3LS>wJL;N9qa4m zJAD_W&J{I}tSFJMCm+y{QjFXn7aP6qGQlA$+b~E2aY#f) z^09GBg|{5LiE;QHd_}r0o<03@Y*RK=w+D9iA)$F-Zo;bZ-9bQZk5=m^-f#BFwIZD3 zgGB8I+Y7bJO|F;W@VUO|dXwW#+-hv=xeHMS1BqL~@j0gIO8TD^O9#X@l9Z!*`+QhY z1YrK5RybCT=Fn5t8i${rZAp^UR~o)ITV})|Pp^udq<+tG#+={fL;y+s()TVZG-Sng&BcJydo%}4+oR(>8_1GvIGH8#S7 z?Af&_WfG3;??n&yi~eu}_|kM(=}YjS@AOKDj@c&U0`Y5)R$Q5lurxqUzAX5F;=fmvXO6uGxqmIZRAoFuw1u+@ zj=*Rn=U{5^i5Y#XLhk{(4UBWi zS`P8XPb(p2x?4HNV|Kg~*S&-8-3+qEcfFkI4Ml4^eU9S-j+~-lOWKNCMvT$RaUU{0 z_muOae&xB#-{1i%$bl0gJU%@`CnqOS3kk+{FJjz3r48$8pH8XPiEDu$Ii`T7acH)q zl_Kht;(Qu4$n$ggXgeHg7*YJJtP96D8eRIT#%aHUS@bcth~APm2{>5T9(fUPFwG^) zBxmfzie^Mc8<#5^7|%&`m&O0FW?(UZ2HP97W!GDheY5N4L@C$AtegNpzU5qwM$4(Z z1Lq}^kA1TvZf~{dA;|oE<=!G5!t(fo2 zltU}XL*ZGT&tG8220z%OYvO4yPUO$l;fpj#uafMKSv%gz+8>u5AN(+Tpz*>{whRN4 zx&H_m3ufgj)SN=&g-ZD;Vq|E+o##Sw%rT7zL=Zw$le9*Z(Rg#yy2$d{8Se)4_&nYl z>%=Xp6^Qfti$o}(bzh>*N(#Ub8{h2D30%9lRf{tpyFnwC68>gIhyUVov12#)DkT6y zZUfavx@Eg}z#07v`U`wcY^fFO8SKEdCNLLo0*OU%U!ZK!X8@8o=W8 z!BCycaep?oVS!uGR5+Tk^^fwPCt6w)l5a?&=YbLWwIcQ{0o39Uyg%-fCM|$LJzkz0 zn^}P{=3kY!{{_A%UnnRj$i*#xX4UunFDt0p`q^uS%uNB zEj~0CzG%K(8g4`lPFp_-f4qX3Mz&Q3I13xwe zAH0~RL}l9gSG-osxjiJ(&}iDA}uZ{^0n9W zmD*6DmavXvj_G#ta3H@A}f7)(ODcVu&Z5iC^*G^ZoG_jr)Ci`}48}cdACy_ZNL)4UjC{ z@88DTju2Q@7my$%DvK$1)n3lG_w(fKOvl3kc+a}et8mcN%tW+$S ziS=ES}aj;id94NEzSJUb|rlx4H1%Iu~x3x^LhHH3v zv6pgPAxogvxvHd(l_woRoz}VLt2@>?Pz@n+w-l#vUPCHQHRP+$>~>3h{Nzrp*9!rQ z&RHtRMNeZgWIW0qW|tC>o7-qT-?Y)cC(hk<*M}dW=XAju-%l{_Yw=ZQ&8K;lH9AOf zs7mZURCc?Kr3?Jcua5R5;m7t8+N#pByWV_sXIQjge)nPxtxl^L-V(mZk3h(p%c!Io|>5Y)-SL>BR|A9w6P)2$V<5^v6uoX|SMM z$&m2S0GLs!aq$=BtS}XSMv|SeYkOIKLlc-)>g3kubkUDnJBp}9H8Af`6+k+RKpE`E z#E+iCnfg)Lv~Lv?gf6y^m-eOig+j;guEcM@6QLXWYRfW|5&K2CO`j{qHLG0m6BVr( zP?1Ves$=_EL(NOx!tAKH*f4-x&v97{Ae#_M6MIUI)wnv=iuHCkJ@dXJo?eAVn&#bw z>7f+!%r&Cq;6-WQHBW;LNraJPc1YxJmW+eF#&RcevHZejiwNd3O}OaHe_DTgy&Z?w zgQ?B8AxV&2uc@|DRAjBZ>=nM~(d@#I_r0M=giCs!;zxXZanG<@&zyF0=w;DXOx&A9 z)lcOwYXS9M1;;j6Uzl-vTAb=dh)@?n4eu*EsU&*80 zl}y)?2H=l_&uTddhS{_hWmHUetmzIMW~9QlT=&@0o>DPD0fMt6xuus8Tict1Y4JjF zXU{z$GJp8{NsSeB*W6A^r+zz3GF~e0h*&1{0BR+|2NpzZr^S0AlJZ3iEuF8PLrna) zR|Fh=HjE_*R?n=%sHX@$YlwGFMtec}WfW*l;wyIN-Q%w<{|CRr8+dn+oS$bk=vwAJ$ zqL<0reE^n_MnJD`Hx#9-0u2?<#F^$+sC`zU----8V)K+{F}1gbN!tZk%{Trnk>!_f z+6tGmP4%L0aw&qRf5uZ(hz{O4BBB`3VjiCI_*z zudlLbDfh(X52Kylez;9`H$3USUF?$Tq42!l@I22Zm9bMvPK!HCg`SBCDwBhofd5q9 zlarh-QyZ%B;$si8 z=-q|mo$rI?EL;*ruvDD6R7%Z1MP96_)X6aXo~7iwZWrH3GNc>WVMXR6Udq|6y4{56 zsN@QnUCvuD|CXgmX?uR*)$dGS$T5^wc;9a5Z4jGoDf zav+DPHDTjKvc(-d?)>)1h)>M zYaCC|_!bVD2A*Ze{D2fY%5pJv8UNSh|t#;GfMBqww!Tmq#(c7i z@?TiZJpY%2H?4%5pI(@i0E{c3Wno+S2_Slux6HV{RC7NR%@<#ns22sCJ2{=>%);+Y zip%yMjD+CJH~ik}PI&$07z-C6bDW~w2;%<>tQahAi3s@oAGE3)WPrxBXNmxaf(=#| zU;82$;MeAOQDtz@9COf-cTBte@#f_{i3id_(jYMocJGsYNh>Skvv`8k5>JWYp}n?) zP4NXvnm1TXjEv-1!am2om#SS)ph00S-I{h(w;AJ2%s!Ab8p_<{>R8i`OPfYU(=nE3 z`2m&tb|!jzoh{vSl^Fv3XD%JF5v(@O3Xd>17_IF;I@7MhW`3>i`}&>U3W|Ef{UZyo zte1|E9QZ~8mHONwPtFt;a#8|o@+*~DOaBft$Z)xgXB2h*VIp*uAy-s%tbNFVM(BF= z3Ob3ykb7vZeJED(DBFGdbV-TPA+7nzam^XzmPNx90V=u21Jkw(JHHfg8T^at^7VN8ePfmskB2%h(bb3+kxK@ zFt_ATpyK!dXF|p-$rL@xMhFgG2+xXq?P9x-D*~3^FI!X?KDe4_B@iZ_8&smV(KLuE z5rgOSmcJjF3+pWW@nctFmq7d>l0A-H$bn8Mr!x*JnyX*KAIBw=Wb&xh`>cu}^nF(4_TB1jgqT@=H>(J5%Y@YSo|1AIf&8y7*X%IglTVV_cm-{+R)A`xB&z-?dH_&s$)>Hk2IS8Q`z}M>DeSB z`dEx(F!8ica7+N<=$=Ur*c|{27w3H^qYi46oM@w=w5Qg$thNDn4Ja;M4Ji->?@Zz; z!|uCY5J1l>0-(=V9VP<+`Rd3p4Nb+;M}bQ%=etU=KW>TEmJ(ozr-;h0usVNL$}8%1 zG|gY7o5p&vc$Ax&F8Xn{c}c0XO_(ED9-t1JZ+-%rhq3L@RPX~F@;+qbyKBtG z2FC3*lxU;$>ul|0=O)J z?JKd2_jL8lom#_~sl`O({hLX{fyOttLMrqi45&mEUXET5r7iYHRv(!hKO~9*>?Nz% zth&mA$7%YWXR}UxM4eYnEh<7UCGkRLnUKd5hy^IYOsI|B=^Ah= z^#jURkucaqcIDa9X12Ps$9U_lu+f9b9iSrQz(XLz1K%HPWF?^t=hyw;Vu5_2SqyjK zkB$fzr`zYw=Ve2c-BHr2d?QTMN1#U@cPsPx_9_$LzS|^E<-p(Q(OMHlQ{N`&INd(_l>))Xvr%ZfWQO5xPmn=qhI#piI1l(T(9n_w_ryehp(;x#Hk7#w7m_rN`p zmj&&Rr{JXr>lnfOnI4YodWfwG;Hd;O6s0Fu>J@7BIi?aZznYO}9L8!fyTAz9TU%R2 ztaQfNcScqgN!{6pcj&EZ(z%(XdCj0*oSzVt0Mt z%I)VQ;_MdH?>B_A@LI#B*Zraw$uiAT{>;cM39X)B?@w5uEc2EGynhR4znD@pY-07& zgU6NeG^lN*9uf6agR?TyWtQDz@ILVt#Sb0QSe_Pgk}$U8%iKXGgl(BmUe;%<4)~Fd z5Rsf2UECkroFPW;VmL~2hDREiIfqL*yFidRL`Z_8?mA9c^_^Z_7+6J6P4Ea(sju@eGD3GLf+H;4nd2<+pW8IEg*?ZXOGX_(CtA~k ziR*@W7QmgHX*ICUMic0;Xt$pxuHQ}n0v>byMKmx4+vVu*92IOzi9%4=PPPgW_GEHbcX) z{p2zliFiuNyltdOuNf2Y#d-5sOC@z*w1bG?e`yEQk_k&TW%PA6vXoBc!)cI}KI{G= zu^$S4@d#XzS|mTf?Np`YX$U1Cg#I!iELj`Y!U!Bg6A0pn^6Thwi-$b^6$TEPxyG@G zgr#e9tKmM{L?DXI?p<4F_k3HqvEB0`+IQX6o+=5n+`XZqXhwWvoS72bJ}ornnOSTR zN8X3t{6&~9o7sql0tvQGhH_8oX|%4tUwAqUdBZA$fKr8SC^RRrND>G zg~(_bvhBWT(iX5ceQGSe#j-KBRc36h2g`Bgg|Ll27?ng=6)T{XD{1rd#re zP+g@NACF1#eM{brr3s3Rkd|c~;t$+*W64cm~?Z% zE7Y>@=WLL?;(_wVBw}mMiONZk&Hq3HTm=9cShjve1J!lnWX|!wRvMI*4I6#@IJm37 z0w#M>1=mpF@p~W08oZIZvM65rt-f^I_(w9zw`Tyt5mFK7SD+GqaMuDL23-MB-aGt&8+gPHkVn@ zke+uJR2#=p1&RP9Vm{+=Wq*LKh&(-Z9dHg0FAe)|$)79XhnJ zy%1MB9YNb)v~zrMsL;LEvkt07=UX9|v6Jy2?@npiKh-%B$e8{`)@#CV-w=8- zoDx<*(5=WY*1QX#4*{-haC3SOF(%f7TR|qGyTI?oho>jPdQUMv3Gb(E!nsCUUT51% zOAq%KM=)(@=*(*N06;+cJU(TS?Vppny;p~*&3{nE*TynxFMp&ey4sC;^Jt;5w0vq+ z2^=~{&BBK!;+w1gMoi}%jBf3c+Lw`TWQ{Ule$4Mk_B+Za@X&WRK6auoG!15k7K-ft zD8VZmxId+7nrYT$NW=+TT%1Xce@muX_RbCq5|0j~+7cVyCa4SYIBnmeBMpTYh=BNd z{(QPC+<0;H4-ybtms_sBE^OYi;7hnd139X<>fF}ZP{aU|wWCnDHUos@#hc5!51a~N zZ|Vt`k{Qk3ePU=2A3i6 zC+;wGw{=fbK90JrA%i;6pJT#jzo-Ys!PEiPVJgDB2)S`z=0V&E(;|LXsK6;9Vc4>| z*Yn*H(fuB*$eJvay_MtEPkQ@JuucQAfWYzR(X|QEZ^azNF*0k!w7Kp|U&+62pkjo5 zNFfkkeHWrv1!|dAfeoYv7q_+jUx|{xC#=tqA@kzp z`%=kUcm}90B9Ls6$#F=5LfT>())s{C&l~a$$Vr2bA%SU@{=On&@Fj{O1#ZUbgc}UA zF1PJ07y`S%52zN^m4lZuiyR?F7O`sfR|Q<1d*}={AuU?%WRVe3#eLD$i5@_bum3-* z@Fo9Ug+F2p(0w8Ldm!f1OwY3C#$WK84usc*{>HR%QjdW`nk!P0iKdsA5$ogguAULj z0V@kS&x&x`LodI37tilKqGt*H|Yeq_2*_bC0ZbFYeMJa&`zEBuhx{Iq3 zZxE_~5U@F%nSQ)g>V|JB z&3^VA_Xu3Wwo)(Dns^Ecmv|>-+AR(lLdWUO<-t%&Q>Ls7>xX)dGX56(h z#?@{S%OW?p{&N?}I`_4*o__sjWetf)(B480FD$Y!92&(whMKpvn;LI&#mNpTIp2<9 zW&-bvWoaM)o@o6v+7Bd3qfyOaqWG7Xps|o8V+)raIq$@Zc7=XLeqk|$LKDH3y#-DE z{HbKBH7zz~`?5b#G`1;jmKx2Kw?X?00z!?)sW^LvPd&Px<5?Bc2jYgOQ!lpOQ)H9t z%?|bwAc$lduPd4GU}k~?3=i{DC{z&21}ntFzar$)zg^Pt@g_EaETf}e9P*clJn6cG zP>>i22{ZyLjenNjRF~J%t3k07p3|%>8mD-W)Fc4dwES<(6XH`)Qf;X%Wu*KBhTz^< z)BFr=0&M;7{4Rj3D2J?2u##S;g72_sC*fs{sj5Ch5eOviBqZR`2m1%(RqH<{t_6fK zB5W;yq~`U=u#*<%ycnHIf@EkwJ%(%o>ap+~;j7C~u{>NhIi-ddgRY;VUvq(MGQHXH zp#)HvF=l4XiFK+wgnRjNud+S`3J2NZLj5a*k+IPqIn;bPg{-_CawFPkcgA&J>S=Er zR>|n--hp&hv>9`r^=7FY#|}0jE&G96r3~xUnj@2y`cTQOx5zua_2$$o^Tqou#_JKX zGl6{wHkUy#?^odZd^8zsz73PAvSiudgyLHC5zz3rIo2V+#s8x;m&^{}Y}2Dt*_$BZ zydm29nz{=kDg#4&m8u*hM1m2YW!GI7I>h?8ee!uN;=u|G_j}~j+{--O`Z&l3T+TAP z{@TujjJ`l;%|F~ck%rB?`m6Bln&PNwC{!SDGch!Q)RE>us`djhfXpXeh0%#znVpiU z_O9eyQlSYsha=IGrnBv%lffp%Di=)S$(7F=LkeSl1mg{Ou({4jGmie1S4D^4gayS8 zL~Fh-yEDKbpjq`DE}&3!;9D<4P104O=Vwg+q?+&e$}%Rs6WO&arJ*|zSYr4FdfS1r zVQOmNJs~dPEmx|??;bV+Sb|aaviWA=*|C&qu~4Y8M!|3e>04Lje~RhHms~U3#>XD@ z6)kG6Tvr>VBYle_+gH#C(??E!BJ#|EACbl@(cW3VJ5*x$ZmKkK*&{ew(PBSHDPJ6H zbYC-F?y8r`=UFj>3P|ffD7Z~uz`Yri(F~!54--y87`nX6$8g)MHwc7!B^Ny(I#VL% zsw`NXH;VDPr~1T)jhP4{zXk@NfmS*uBK z4c22{e(|~70zSZNriJ`b=<%Og`jC6AmyNr}Q7(?w^x9-*nmI1y zlH}vEMm-?TsJblQ?&1pmqii}8`d3wT;R4=rs?vrmJi)4q#f~PijukcsvoI^#0-w zMsoZ#+s)C=bnAyRlhz^RptLq=g{wE4pRs{?8|(V}6nUBGzxdxHsUG{-a|`VF?)Rk! z?wh>|O~^YyplMo4==*R3@<5>Q_hBIalyjJR3FH`>xkpFF67`FEuXeS=x(#z~d;4woyWTsE!#8DHmGsrx17HXUlC1WelT#<}kx)`%WWL+dNt)0F z9!rvJ7jaHwhCbz=M}*Ur*Tn zLGidAO!CqQB)#MlRAD3Nl|qhmVzeisH%aA9g9_W{m{k`;ojq4&Sd(+^2j@A3k$IP1 zARdiuQU*`!+ry>>!lrkYpQ`8AWUuZ_hg)}y#3IdRb?fuuy{px(b7udQ0jg^2G z1MkIC`C9(^ZhJ3GPfcj|`EMLKF>3c1)85`~v*8z2m7;9_uV1m6*pP0Sju6PikGQ-a z4nNa8{87&bLI0(m=i8t=b+k3Cwl{Bm6YBV8 zxxJi+FI)FgY!OfXn_(aOct99u=z9!&FQ$3MWGO5a6@~ z0}aoFVr@In8-y`F%~;>6yejF7=ptK6!6E4=WT;`J$`BaNfVNTsCoAOa3QM3N@T~A$ zsP16iD_4|B(%Sym&-4lK&#~_JCq#@@^g+O}JV*q-^9ba+Rd<$Z2$OVRkiu$hl#}Oi z)~zj7p)%{%jy24~&?GF;^BR=g4NGeo44f1w&JgF#qOe4-ux5|#{V=A^lN7zr*s9FM z&GXi>Ls~v;!zK+2j#2>p^JhK4j9zzl@@trnv(L4kG3%~9(W%n(Dob{DBqosRMJ10Q z6-c!o{b*{n+Gf5rST)*@~KeR|(YmZ);W9*gw*kv~)`r@t&&f-bvmTpy2z}0zcu_5jsuI=#z zEOTt_ELHuJNLMIy@48(0p`1@V%Dgh^!;H>XBJFH5Bv~h-{NIFi1oXlIn$=;T(aLHF zw}IXS4S>?8COcYVC0Bn$^&a$nP6utk3hxXiyOOeJSM`6j*1z8ee9@KsL#Cm|X-pRJ z0-P{Kw})|=sa1Ji%QQulN|3s^QBoJ13g!91d(kyQg+scqeIkX!WiM6XgI`RIjB8jx zlaIwsRnjyipFX{jSe6JVe%lf(@i4Lic0Sf`}nAaxVV8>O6v1u{c}<$9T+8jlmj zjg?l=%9Sr;kDa|(4LN^0(}r05WUU@!n@*<^9yy1sz5)m>bS^%Z)ntQf=-|)> z=&i6mkm9fH7b>Wr_>ilMvZo})Qkt4TS(qe&0!Z?3-8cD$%-)(BQVR7PhT9gsL94q_ zmj%k?!ID3LRzkUuXS4GTXBm z3fWu-0qR&7PxhfO^Qs|Pt5v?xC`LzzyG4)|LHfIZ>bE6vO~kk}ZY}V7DTP&^nXU>< z=f@3~&fesymf!?0sjU{ZvnjM0B_+cF*@VfNQqXv7X*STu1HOAi?o`RJfAofArRVW^ z+eaQQailla?Qy7i-;`R%?2M%7$E^|rZ8^!JCD1f_G8s8oA^NToup3*5e(maVmxo55 zd@K{YGa+uEqFz+{OJ#u_SDr?DpT;9E4P{7`bGLa%+Ba!q7i41T`DEy`u$b%X)*FVjYl=I9w#;FD zrjd|bjyp8hJ4pC`;uC5%0TpOZLvkuYfrn@gLHX?|BJ zY0w4w_&1>Nfdf8B?F%!rCMM0$Uisjego&QG?S`_*V?tn)q{kb*)261P&Y%tcXnG2h zRfGg>1{%Ddb=MYPl=`J_YEaXHEukmG|;TOQ$??F790NuC_scrrvgUWxA?Ellf zFLVUOLP|hq{JAT6>Q0<77$&3x<(MH1n%PKv#3_}kQ~QyG<0C8GPpp0J8VIEw7GZ(f zC^nQsgeR-z5BIVR^P@<~I%_4Qn`?;6Z+i+l77S&}A6U>P-g95@2PKBcQCuQ{>E4Fr z2hjVoft!BiaAzEh)1&;Se^0pofA{YZtC^(1Z87es1c3I!{~_5wB1e(*5_Q}N5R4Q5 zO=f^X`b0MU|F_KWW0NZqd|c|k#r+%uB6oFO-*8xS064}c9)Tsm$)|9>ib!m#|4gCX zc%TFJCYJdhEt6N+ zF!)ZbVI+1gB$Xj2kuNzH5-b=GF6`ph;46XNWY!11^W4F_Ep`zqI-)0EWE8 z714;2ArNU*p~|QPnv_i-1gB`e$HwF!;mk$J31f6+ly#sQ?(u0^%&$Bi0BYcCtIs!u zIh7vohG$q4X_dV7VJzfV!N5WFAA*7M?~BWV4u6f>$6?4a;#hU@Z1Dm|DQ#jvLC*-O zmbmbfsNrD)j2{}1%HkmUb6~*wP_oht7@v4p%e}nU5iX!Q`dJR3g)VD8G(e*3u>?VkQV@f~#}oH9Nlh+MFo_@o$*g## z3nLNX&_lF~;rQyY-54WXn6+(do$;HUa3%NqjmX~LQB3>3kC9*FFQ8rm(ztG~5Cib; zi4$!0)cQy4A>Cj4|FxvS{%T4OaP(GAh|#E=#wC5t?HvH!6-Q8L>!*`QKuFfS|CqJ3 zVWH|o11Q?&`pU|i4IetdPhpII#&2)+6*zZk4>R7x@h5Vb_vYzd1<9&L_v38PvrN#I z)N@AZ)V9Gv4`w(sBG6_b&=;ad$e{Hct@o~Qk$928n}52&WFiKblWT>sCBugn6C)fV zj-`j$K_i2nr=ywPp_$MdZmci@MEYz1Bm@J^&sBky_EPUKQVQ(>E*&oe7z*pEY3+bN zq8#U-hEFJ3fJ*~&pR@ASu#N@LJJjD=gk^i>Si~8zq1k*J?skqzFe9jHxmfqkVNhkf zSA#m-^cRp~v0e(QSLk;G1M450`-a*!M*8nLJga}?@VLH#Ry4er=~~|U^}&soL3}SQ zDei>+bMDgZ4%5veFeW%e0@m}9&A-pp&f5rXd>WnZy==5E=9-?q4ce9G+{^^Jsrvj) z1ei*KC-DTa;C--?qFvD7ig;1&>+}Am+VeTv!5YUwlx|uA3HUL73tOVZdR%4_4tJqG zO0@1Yc_&gFn}9H#+GC-b@UwRJv}j9Rf`N}31db#|rWJ%WWQLofrOM6P3-7nH4%tW; z2Uby@<~;t2%SD&xMnpAxx+OpLDi{oE(vMe0H@h3ZO&VhjfBu!MG%X-UYyD2IsmVuH z^or)HzGyAzJTzR%)kWf*8Zean5d8k@N*eq?9IAK!v{}&)dC9!gLIZYKWMDM@ z8|45|+ZM31MYYV>%7W$sp7-%4HgRr?KEuIF51RYa;Jah9xZ(<482>$snMk&mO)-WI z`?W`acCBN7Kd*>k41-T4?%Tc*fA~D0Ie>$kz1vD`ZRj&UGQp;U``$Y_&TLSzfi=h} zDU!Zq{pJ%1riOa)Xn&)F#iI9|$mz#K!S{v`Aa0#&$w%}*B!LQ6)=tc_Yl==h5NHSz zz!KhG#(94)sguS}B>2${FgA>Im!xCgP))H9tX)*5L(eu!%`%#L^0u%O})bQ zp%16`DVetoztU5A4A%LdRNs(~XHSx`4r5}dG2~UgH#8=@6WH_K)diw+T0nH}VEQFG z2cw8MPiCmgoAVh4{0*c!fl|`|8c(#)c())0ut5oqy4GY-scDkwhbm-d$!sb)<=vQU zWE&^;KW;doLF)KtR%2!cA*FfU^|x@cuNiv}_jtK&0)llyac1FvYs&k3Z6$&M5+ySD zH6R_~vUxrX(>I)et`Fmj;#>G<`xE{D-uGiMFY{OXU(gjOwBc#+J#i8?uqDg?qxAe= z8>`$MZWFXp@i`zk9{}F^x8H4hfSDG`Qvcy%Ws2(>t9J8cBY@%>8DoJUR40;ZDGJ!R z8nExOQPk+LG!Ko0UP(^h&E6RW6xo=Zh1>KwHfs~aS^2d}GoHJam{cPHG6g?UB1+*c z&<%R^+TQng#o~t|aIBe}TrZ&Q<(NWQ3s{HqxN}r9Td8WC^R$m#&&mZ>&3bn!oP3-;`L7 z;rRwh1nh5tmKb+La*t+ID49j=t?G2|2E@;#_pb~EYFd|+;Rml=W)EIcmYd(!)PXi# zGrq`Hz?NozTI?$)?B676mcz&(6SDeY|Rm_!F?rppFYfEb& zVnQ7cqy=Vjgb1!N;rS=^a+o@h9U?Z#O_U>5dPAJ{3lL|(kZbhDApbHZ*9mx zhcs~ZhLbqB?8r|7bWhTVz(c`2?#E}!myxByW_eZS|M6wc`hdB-=d)d(gQ4L#8=OFm z4|wslVEBHS{h!05uNx;2w!K76zsW)WqYoD7RLShA1M5YmU>JFu_A0wgN78@* zPNlQ$#N#Yf=l436fYIE_^Y5qsRl~IgHFZQ`t!;y2$AF`vBP5NlSgE3-A`)_2#x}yV zrdnHcw22m#sW^gAO=2F`npoP>a} z&r_=3_%L$*xqUss{AHx1A=tETZq&s;ahwegyP?plXlmp1ykl?YVlajohmA2Cy_dvr z03y_u*c;Q!F;BaW!JKNzql-#rE!APG4o|QI3i}%<-0dPPcj29ulvKSgh zmt;JY81b)C*KbJ#5*Rm#g97dIl}lDUxVvm6+RB^&TwN`vR^(1q$%c7XttILT5f+E=y%W&2!{GIcf7#2{rJe-Rbwu2I41W+@j^q}4Z9h7) z`-dr&2v@^X1!i2!$DDVKq~8n&k}agl6|imI4_f7Hw-JLcW|!=dIjm1XPzy*c5c9sM zL}+3URNTv1a0k`{6RIB26q~yojp69StJtRz!_v40Wl~pMCZpL5h%idc@VTX+> zpHF6kD{w^O#N6%HwR$lteUm0DRpS zz^F+lU@~je0)#o01xbURLN}%QXhEMNFjNtFBsrg5@MvV76zOfeDeFkc>S55D>q|qGE1VI@%VAO(sRd z)><4@pf|kqY0*FYceD!Ma?UQ|{)sp;+eT2&g$emyT$*}cSHNH}5GISS^7MNf$Cu1s zD-8xsCLImKR2H8v4ffmZ2{}t&)2;r_sFk>DpbU^Gyf9eO+oFJT*%D*suwK@ICdpugRSPOI(o>b7(SircQO~*#z=^qcub7O|gW53QYhTJB# zjW7tIB&pQ?@F<_oQmD!yyuMS>F4&cC(wUL)!k-H{gsiLpVWoJ-D3-sKL}!RX6;$8V zeArjO)e_F0;IKsH?~Q}z3e3o{>hpo}(!KNtMiC_%sEtoadp}WFQATHM(b8tu`HPO{ zJ+nT&s;Hy9`C1&;Dre?ypj`e0INaFP4BRcIMgfhVh!nXn@EGE$h-Phyw64BBL=M63 z+pO2&+bo5rFt=X=c!$;r1M!tFF@zP{wU+{n3D0zUW<@XjgMQ*~LfS_g)?H5Bar9qT CVr?@3 literal 0 HcmV?d00001 diff --git a/download/setup.html b/download/setup.html index f9bbb7eb6..53ccf3edf 100644 --- a/download/setup.html +++ b/download/setup.html @@ -80,7 +80,7 @@

    Why use ERDDAP to distribute your data?

    You can customize your ERDDAP's appearance so ERDDAP reflects your organization and fits in with the rest of your web site. -

    ERDDAP has been installed by over 50 organizations worldwide,

    +

    ERDDAP has been installed by over 60 organizations worldwide,

    including: This is just a list of some of the organizations where ERDDAP has been installed @@ -215,20 +233,21 @@

    How To Do the Initial Setup of ERDDAP on Your Ser title="This link to an external web site does not constitute an endorsement."/>.
      -
  • Set up TomcatSet up Tomcat (external link).
    For security reasons, it is almost always best to use the latest version of Tomcat. Below, the Tomcat directory will be referred to as tomcat. Warning! If you already have a Tomcat running some other web application (especially THREDDS), - we recommend that you install ERDDAP in a second Tomcat, - because ERDDAP may need different settings + we recommend that you install ERDDAP in + a second Tomcat, + because ERDDAP may need different settings and shouldn't have to contend with other applications for memory.
     
    • Follow the instructions at - http://tomcat.apache.org/https://tomcat.apache.org/ (external link) to set up Tomcat on your server. @@ -242,10 +261,11 @@

      How To Do the Initial Setup of ERDDAP on Your Ser these instructions (external link) (especially the article's second recommendation) + title="This link to an external web site does not constitute an endorsement."/> + (especially the article's second recommendation) to disable certain HTTP methods (TRACE, PUT, OPTIONS, DELETE) in Tomcat. - (background information (external link)) @@ -253,7 +273,7 @@

      How To Do the Initial Setup of ERDDAP on Your Ser
    • On Linux and Macs, it is best to set up Tomcat (the program) as belonging to user "tomcat" (a separate user with limited permissions and which has no password. + href="https://unix.stackexchange.com/questions/56765/creating-an-user-without-a-password">has no password. Thus, only the super user can switch to acting as user tomcat. This makes it impossible for hackers to log in to your server as user tomcat. And in any case, you should make it so that the tomcat user has very limited @@ -294,21 +314,36 @@

      How To Do the Initial Setup of ERDDAP on Your Ser
    • Set Tomcat's Environmental Variables -
      Create a file tomcat/bin/setenv.sh (or \'s and .bat in Windows) - (or in Red Hat Enterprise Linux (RHEL), edit ~tomcat/conf/tomcat8.conf) + +

      On Linux and Macs: +
      Create a file tomcat/bin/setenv.sh + (or in Red Hat Enterprise Linux [RHEL], edit ~tomcat/conf/tomcat8.conf) to set Tomcat's environmental variables. - This file will be used by tomcat/bin/startup.sh and shutdown.sh - (or \'s and .bat in Windows). - The file should contain: -
      export JAVA_HOME=/usr/local/jdk1.8.0_77/jre + This file will be used by tomcat/bin/startup.sh and shutdown.sh. + The file should contain something like: +
      export JAVA_HOME=/usr/local/jdk1.8.0_111/jre
      export JAVA_OPTS='-server -Djava.awt.headless=true -Xmx1500M -Xms1500M'
      export TOMCAT_HOME=/usr/local/apache-tomcat-8.0.23
      export CATALINA_HOME=/usr/local/apache-tomcat-8.0.23

      (but substitute the directory names from your computer).
      (If you previously set JRE_HOME, you can remove that.) -
      In Windows, use set instead of export and \'s instead of /'s.
      On Macs, you probably don't need to set JAVA_HOME. -
      The -Xmx and -Xms memory settings are important because ERDDAP works better with more + +

      On Windows: +
      Create a file tomcat\bin\setenv.bat + to set Tomcat's environmental variables. + This file will be used by tomcat\bin\startup.bat and shutdown.bat. + The file should contain something like: +
      SET "JAVA_HOME=\Program Files\Java\jdk1.8.0_111/jre" +
      SET "JAVA_OPTS=-server -Xmx1500M -Xms1500M" +
      SET "TOMCAT_HOME=\Program Files\apache-tomcat-8.0.23" +
      SET "CATALINA_HOME=\Program Files\apache-tomcat-8.0.23"
      +
      (but substitute the directory names from your computer). +
      If this is just for local testing, remove "-server". +
      (If you previously set JRE_HOME, you can remove that.) + +

      The -Xmx and -Xms memory settings are important +
      because ERDDAP works better with more memory. Always set -Xms to the same value as -Xmx.

      • For 32 bit Operating Systems and 32 bit Java: @@ -335,7 +370,7 @@

        How To Do the Initial Setup of ERDDAP on Your Ser @@ -347,24 +382,24 @@

        How To Do the Initial Setup of ERDDAP on Your Ser
      • Fonts for images: We strongly prefer the free Vera Sans fonts to the standard Linux/Java fonts. Installing these fonts isn't required. -

        If you don't install these fonts, you need to change the fontFamily setting in - setup.xml to <fontFamily>SansSerif</fontFamily> . +

        If you don't install these fonts, you need to change the fontFamily setting in + setup.xml to <fontFamily>SansSerif</fontFamily> . -

        To install the fonts, please download - BitstreamVeraSans.zip - (344,753 bytes, MD5=E16AF0C9838FD2443434F6E0E9FD0A0D) - and unzip the font files to a temporary directory. -

          -
        • On Linux (as the root user) and Windows XP (as the administrator), - copy the font files into JAVA_HOME/lib/fonts - so Java can find the fonts. - Remember: if/when you later upgrade to a newer version of Java, you need to reinstall - these fonts. -
        • On Macs, for each font file, double click on it and then click Install Font. -
        • On Windows Vista and 7, in Windows Explorer, select all of the font files. - Right click. Click on Install. -
            -
        +

        To install the fonts, please download + BitstreamVeraSans.zip + (344,753 bytes, MD5=E16AF0C9838FD2443434F6E0E9FD0A0D) + and unzip the font files to a temporary directory. +

          +
        • On Linux (as the root user) and Windows XP (as the administrator), + copy the font files into JAVA_HOME/lib/fonts + so Java can find the fonts. + Remember: if/when you later upgrade to a newer version of Java, you need to reinstall + these fonts. +
        • On Macs, for each font file, double click on it and then click Install Font. +
        • On Windows Vista and 7, in Windows Explorer, select all of the font files. + Right click. Click on Install. +
            +
      • Test your Tomcat installation.
          @@ -405,7 +440,7 @@

          How To Do the Initial Setup of ERDDAP on Your Ser
        • See the Tomcat log file tomcat/logs/catalina.out. Tomcat problems and some ERDDAP startup problems are almost always indicated there. This is common when you are first setting up ERDDAP. -
        • See the TomcatSee the Tomcat (external link) web site or search the web for help, @@ -425,10 +460,10 @@

          How To Do the Initial Setup of ERDDAP on Your Ser Please check back here tomorrow. Sorry for the inconvenience, but the new version is worth the wait. -->
          On Linux, Mac, and Windows, download - + erddapContent.zip - (version 1.74, size=26,708 bytes, MD5=D70DF53AE98785CC35614571E719BC54, dated 2016-10-07) + (version 1.76, size=23,878 bytes, MD5=D988149831895B41164F53F4245BF5AD, dated 2017-05-12) and unzip it into tomcat, creating tomcat/content/erddap .

          Other Directory: For Red Hat Enterprise Linux (RHEL) or for other situations where @@ -439,14 +474,6 @@

          How To Do the Initial Setup of ERDDAP on Your Ser in ~tomcat/conf/tomcat8.conf so ERDDAP can find the content directory.

          [Some previous versions are also available: -
          1.58 - (size=25,000 bytes, MD5=620A54B6A52D7C95C0D1635A9FF39DC6, dated 2015-02-25) -
          1.60 - (size=25,035 bytes, MD5=10332FC959B72F17E6A00AD432A38578, dated 2015-03-12) -
          1.62 - (size=25,035 bytes, MD5=4B17A93CE52087C53AF6AEA905391418, dated 2015-06-08)
          1.64 (size=24,996 bytes, MD5=8E375D13D388E0D290110E196117E3DC, dated 2015-08-19) @@ -462,6 +489,9 @@

          How To Do the Initial Setup of ERDDAP on Your Ser
          1.72 (size=26,760 bytes, MD5=8EFBD16CEBC249A2E21CFE167423635B, dated 2016-05-12) +
          1.74 + (size=26,708 bytes, MD5=D70DF53AE98785CC35614571E719BC54, dated 2016-10-07) ]

          Then, @@ -505,7 +535,7 @@

          How To Do the Initial Setup of ERDDAP on Your Ser

        After you edit the .xml files, it is a good idea to verify that the result is well-formed XML by pasting the XML text into an XML checker like - RUWFxmlvalidation (external link). @@ -514,51 +544,39 @@

        How To Do the Initial Setup of ERDDAP on Your Ser To let ERDDAP know where it is, set the system property erddapContentDirectory=/usr/local/erddap/content/erddap/ (or wherever it is). - If you aren't allowed to set this property in startup.sh, perhaps you can set it in Tomcat's context.xml. + If you aren't allowed to set this property in startup.sh, + perhaps you can set it in Tomcat's context.xml.
      • Install the erddap.war file. -
        On Linux, Mac, and Windows, download - erddap.war - (version 1.74, size=552,359,929 bytes, MD5=5976EDF01CC9D3A6046D83DB0CF10EA0, dated 2016-10-07) - + href="https://github.com/BobSimons/erddap/releases/download/v1.76/erddap.war">erddap.war into tomcat/webapps . - The .war file is big because it contains high resolution coastline, boundary, - and elevation data needed to create maps. +
        (version 1.76, size=534,583,353 bytes, MD5=72F90E2C26E9E3806573CADD239855F0, dated 2017-05-12) +
        The .war file is big because it contains high resolution coastline, boundary, + and elevation data needed to create maps. -

        [Some previous versions are also available. - If you download one of these, rename it erddap.war. -
        1.58 - (size=490,902,857 bytes, MD5=B4D1BBDA2FADD88FDCA29461C2578D3D, dated 2015-02-25) -
        1.60 - (size=490,919,033 bytes, MD5=99319916DA726030C25E0045A65B0971, dated 2015-03-12) +

        [Some previous versions are also available.
        1.62 - (size=491,038,285 bytes, MD5=1FDF3F349EC7D87A4A88AD14A7B5F41A, dated 2015-06-09) -
        1.64 + href="https://github.com/BobSimons/erddap/releases/download/v1.64/erddap.war">1.64 (size=511,898,199 bytes, MD5=0E11741BF06EE5388C022F3632EE1915, dated 2015-08-19)
        1.66 + href="https://github.com/BobSimons/erddap/releases/download/v1.66/erddap.war">1.66 (size=512,007,729 bytes, MD5=71CB248B05F0340BE6CF4CF71F3BD8E6, dated 2016-01-19)
        1.68 + href="https://github.com/BobSimons/erddap/releases/download/v1.68/erddap.war">1.68 (size=511,995,047 bytes, MD5=0E919CBDAD014B724DE89F40C8CA9379, dated 2016-02-08)
        1.70 + href="https://github.com/BobSimons/erddap/releases/download/v1.70/erddap.war">1.70 (size=556,163,762 bytes, MD5=3320ADB87E9F3AB31EBF89260C64AF19, dated 2016-04-15)
        1.72 + href="https://github.com/BobSimons/erddap/releases/download/v1.72/erddap.war">1.72 (size=527,743,058 bytes, MD5=4C84507540C6868413E6CEE470414665, dated 2016-05-12) +
        1.74 + (size=552,359,929 bytes, MD5=5976EDF01CC9D3A6046D83DB0CF10EA0, dated 2016-10-07) ]

      • On Linux computers, change the Apache timeout settings @@ -613,8 +631,8 @@

        How To Do the Initial Setup of ERDDAP on Your Ser
         
      • If Tomcat is already running, shut down Tomcat - with tomcat/bin/shutdown.sh in Linux or Mac OS, - or tomcat\bin\shutdown.bat in Windows. + with (in Linux or Mac OS) tomcat/bin/shutdown.sh +
        or (in Windows) tomcat\bin\shutdown.bat

        On Linux, use ps -ef | grep tomcat before and after shutdown.sh to make sure the tomcat process has stopped. @@ -624,8 +642,8 @@

        How To Do the Initial Setup of ERDDAP on Your Ser Or if it looks like it won't stop on its own, use:
        kill -9 processID -
      • Start Tomcat with tomcat/bin/startup.sh in Linux or Mac OS, - or tomcat\bin\startup.bat in Windows. +
      • Start Tomcat with (in Linux or Mac OS) tomcat/bin/startup.sh +
        or (in Windows) tomcat\bin\startup.bat
         
      @@ -658,6 +676,23 @@

      How To Do the Initial Setup of ERDDAP on Your Ser
      There is no fix for this, but it usually only occurs the first time after you install a new version of ERDDAP. +

      Trouble: Tomcat Cache +
      If Tomcat throws warnings during startup about + "org.apache.catalina.webresources.Cache.getResource Unable to add the resource + at someFile to the cache because there was insufficient free space + available after evicting expired cache entries - consider increasing the + maximum size of the cache", + the solution is: +
      In your [tomcat]/conf/context.xml, add this block right before </Context> : +
      <Resources cachingAllowed="true" cacheMaxSize="100000" /> +
      (or some other number) (The units for cacheMaxSize are KB.) +
      For more information see + https://tomcat.apache.org/tomcat-8.0-doc/config/resources.html (external link). +

    • In the future, to shut down (and restart) ERDDAP, see
      How to Shut Down and Restart Tomcat and ERDDAP.
        @@ -712,9 +747,9 @@

      How To Do an Update of an Existing ERDDAP on Your Serve
    • Download erddap.war - (version 1.74, size=552,359,929 bytes, MD5=5976EDF01CC9D3A6046D83DB0CF10EA0, dated 2016-10-07) - into a temporary directory. + href="https://github.com/BobSimons/erddap/releases/download/v1.76/erddap.war">erddap.war + into tomcat/webapps . +
      (version 1.76, size=534,583,353 bytes, MD5=72F90E2C26E9E3806573CADD239855F0, dated 2017-05-12)
       
    • messages.xml @@ -744,31 +779,59 @@

      How To Do an Update of an Existing ERDDAP on Your Serve

  • Install the new ERDDAP in Tomcat: -
      -
    1. (Don't use Tomcat Manager. Sooner or later there will be PermGen memory issues. - It is better to actually shutdown and startup Tomcat.) -
    2. From a command line, use tomcat/bin/shutdown.sh - (or \'s and .bat in Windows). -
      On Linux, you may want to use ps -ef | grep tomcat to see if/when - the process has been stopped. (It may take a minute or two.) -
    3. In tomcat/webapps, use rm -r erddap -
    4. In tomcat/webapps, use rm erddap.war -
    5. Copy the new erddap.war file from the temporary directory to tomcat/webapps . -
    6. Use tomcat/bin/startup.sh (or \'s and .bat in Windows) - to restart Tomcat and ERDDAP. -
    7. View ERDDAP in your browser to check that the restart succeeded. - (Sometimes, you need to try to load ERDDAP in your browser a few times before it succeeds.) +
      * Don't use Tomcat Manager. Sooner or later there will be PermGen memory issues. + It is better to actually shutdown and startup Tomcat. +
      * Replace references to tomcat below with the actual + Tomcat directory on your computer. +
        + +
        +
      • For Linux and Macs: +
          +
        1. Shutdown Tomcat: + From a command line, use: tomcat/bin/shutdown.sh +
          And use ps -ef | grep tomcat to see if/when + the process has been stopped. (It may take a minute or two.) +
        2. Remove the uncompressed ERDDAP installation: + In tomcat/webapps, use +
          rm -rf erddap +
        3. Delete the old erddap.war file: + In tomcat/webapps, use rm erddap.war +
        4. Copy the new erddap.war file from the temporary directory to + tomcat/webapps +
        5. Restart Tomcat and ERDDAP: + use tomcat/bin/startup.sh +
        6. View ERDDAP in your browser to check that the restart succeeded. +
          (Often, you have to try a few times and wait a minute before you see ERDDAP.)
            -
        +
    -
  • Troubles updating ERDDAP? -
    Email me at bob dot simons at noaa dot gov . I will help you. -
    Or, you can join the ERDDAP Google Group / Mailing List - and post your question there. +
  • For Windows: +
      +
    1. Shutdown Tomcat: + From a command line, use: tomcat\bin\shutdown.bat +
    2. Remove the uncompressed ERDDAP installation: + In tomcat/webapps, use +
      del /S/Q erddap +
    3. Delete the old erddap.war file: + In tomcat\webapps, use del erddap.war +
    4. Copy the new erddap.war file from the temporary directory to + tomcat\webapps +
    5. Restart Tomcat and ERDDAP: use tomcat\bin\startup.bat +
    6. View ERDDAP in your browser to check that the restart succeeded. +
      (Often, you have to try a few times and wait a minute before you see ERDDAP.) +
    + +Troubles updating ERDDAP? +
    Email me at bob dot simons at noaa dot gov . I will help you. +
    Or, you can join the ERDDAP Google Group / Mailing List + and post your question there. + +

    Things You Need To Know

      @@ -802,7 +865,23 @@

      Things You Need To Know

      or limited by some other resource. In these cases, see the advice below to deal with ERDDAP responding slowly. -
        + +

      Requests for a long time range (>30 time points) + from a gridded dataset are prone to time out failures, + which often appear as Proxy Errors, + because it takes significant time for ERDDAP to open all of the data files one-by-one. + If ERDDAP is otherwise busy during the request, the problem is more likely to occur. + If the dataset's files are compressed, the problem is more likely to occur, + although it's hard for a user to determine if a dataset's files are compressed. +
      The solution is to make several requests, each with a smaller time range. + How small of a time range? + I suggest starting really small (~30 time points?), + then (approximately) double the time range until the request fails, + then go back one doubling. + Then make all the requests (each for a different chunk of time) + needed to get all of the data. +
      An ERDDAP administrator can lessen this problem by increasing the + Apache timeout settings.

    @@ -1163,7 +1242,7 @@

    Things You Need To Know

    to try to figure out the cause. If you can't, please email the details to bob dot simons at noaa dot gov . As a temporary solution, you might try using - MonitMonit (external link) to monitor your ERDDAP and restart it if needed. @@ -1302,20 +1381,22 @@

    Things You Need To Know

    old (as specified in datasets.xml).
      -
  • A Flag File Tells ERDDAP to Try to Reload - a Dataset As Soon As Possible +
  • A Flag File Tells ERDDAP to Try to + Reload a Dataset As Soon As Possible
    • ERDDAP won't notice any changes to a dataset's setup in datasets.xml until ERDDAP reloads the dataset. -
    • If a dataset is active in ERDDAP and you want to force ERDDAP to reload it as soon as possible +
    • To tell ERDDAP to reload a dataset as soon as possible (before the dataset's <reloadEveryNMinutes> would cause it to be reloaded), put a file in bigParentDirectory/flag (bigParentDirectory is specified in setup.xml) that has the same name as the dataset's datasetID. +
      This tells ERDDAP to try to load new and changed datasets.
      The old version of the dataset will remain available to users until the new version is available and swapped atomically into place.
      For EDDGridFromFiles and EDDTableFromFiles, the reload is a will look for new or changed files, read those, and incorporate them into the dataset. So the time to reload is dependent on the number of new or changed files. +
      If the dataset has active="false", ERDDAP will remove the dataset.
    • There is a variant of the /flag directory: the /hardFlag directory. (Added in ERDDAP v1.74.) If you put a file in bigParentDirectory/hardFlag /a>) that has the @@ -1358,6 +1439,13 @@

      Things You Need To Know

      active="false", a flag will cause the dataset to be made inactive (if it is active), and in any case, not reloaded. +
    • Any time ERDDAP runs LoadDatasets to do a major reload (the timed reload + controlled by <loadDatasetsMinMinutes>) or a minor reload + (as a result of an external or internal flag), + ERDDAP reads all <user>, <requestBlacklist>, <slowDownTroubleMillis>, + and <subscriptionEmailBlacklist> tags and switches to the new settings. + So you can use a flag as a way to get ERDDAP to notice changes + to those tags ASAP.

    ERDDAP has a web service so that flags can be set via URLs. @@ -1674,7 +1762,7 @@

    Things You Need To Know

    src="../images/external.png" align="bottom" alt=" (external link)" title="This link to an external web site does not constitute an endorsement."/> in the root directory of your web site's document hierarchy so that it can be viewed by - anyone as, e.g., http://www.example.com/robots.txt . + anyone as, e.g., http://www.your.domain/robots.txt .
    If you are creating a new robots.txt file, this is a good start:
     User-Agent: *
    @@ -1710,7 +1798,7 @@ 

    Things You Need To Know


     
  • sitemap.xml - As the - www.sitemaps.orghttps://www.sitemaps.org (external link) web site says:
    Sitemaps are an easy way for webmasters to inform search engines about pages on their @@ -1741,9 +1829,8 @@

    Things You Need To Know

    engines about the sitemap.xml file by visiting these URLs (but change YourInstitution to your institution's acronym or abbreviation and www.yoursite.org to your ERDDAP's URL):
      -
    • http://submissions.ask.com/ping?sitemap=http://www.yoursite.org/erddap/sitemap.xml -
    • http://www.bing.com/webmaster/ping.aspx?siteMap=http://www.yoursite.org/erddap/sitemap.xml -
    • http://www.google.com/ping?sitemap=http://www.yoursite.org/erddap/sitemap.xml
    • http://search.yahooapis.com/SiteExplorerService/V1/updateNotification?appid=YourInstitution_ERDDAP&url=http://www.yoursite.org/erddap/sitemap.xml +
    • https://www.bing.com/webmaster/ping.aspx?siteMap=http://www.yoursite.org/erddap/sitemap.xml +
    • https://www.google.com/ping?sitemap=http://www.yoursite.org/erddap/sitemap.xml
    (I think) you just need to ping each search engine once, for all time. The search engines will then detect changes to sitemap.xml periodically. @@ -1790,7 +1877,7 @@

    Things You Need To Know

  • A given ERDDAP can be both a source of data for some datasets and a re-distribution site for other datasets.
  • The resulting network is roughly similar to data distribution networks set up with programs like - Unidata's IDD/IDMUnidata's IDD/IDM (external link), but less rigidly structured.
      @@ -1874,7 +1961,7 @@

    Things You Need To Know

    continues to function after the user has logged out. -

    To set up the security/authentication/authorization system: +

    To set up the security/authentication/authorization system:

  • For testing purposes on your personal computer, - follow these instructions to configure tomcat to support SSL (external link) @@ -1933,9 +2020,14 @@

    Things You Need To Know

  • In tomcat/conf/server.xml, uncomment the port=8443 <Connector> tag:
     <Connector port="8443" protocol="org.apache.coyote.http11.Http11NioProtocol"
    -    maxThreads="150" SSLEnabled="false" scheme="https" secure=“true"
    -    proxyPort="443" clientAuth="false" sslProtocol="TLS" />
    +        maxThreads="150" SSLEnabled="true">
    +    <SSLHostConfig>
    +        <Certificate certificateKeystoreFile="conf/localhost-rsa.jks" type="RSA" />
    +    </SSLHostConfig>
    +</Connector>
     
    +and change the location of the certificateKeystoreFile. +
  • In datasets.xml, create a <user> tag for each user with username, password (if authorization=custom), @@ -1961,13 +2053,16 @@

    Things You Need To Know

    Authentication (logging in) -
    Currently, ERDDAP supports +
    If you don't want to allow users to log in, don't specify a value for <authentication> in setup.xml. +
    If you do want to allow users to log in, you must specify a value for + <authentication>. Currently, ERDDAP supports custom, email, and google - (recommended) authentication. -We strongly recommend the google option because it frees you from storing +If you want to enable logging in, +we strongly recommend the google option because it frees you from storing and handling user's passwords (needed for custom) and is more secure than the email option. Remember that users often use the same password at different sites. @@ -1980,7 +2075,7 @@

    Things You Need To Know

    so you don't have to gather, store, or work with them. So you are freed from that responsibility. -

    All authentication options use a +

    All <authentication> options use a cookie (external link) @@ -1989,7 +2084,7 @@

    Things You Need To Know

    program (not a browser), cookies are hard to work with. Sorry. -

    The details of the authentication options are: +

    The details of the <authentication> options are:

    - to compile gov.noaa.pfel.coastwatch.TestAll (it has links to a few classes that wouldn't be compiled otherwise) - and java 1.8.0_77 to run the tests. + and java 1.8.0_111 to run the tests. For security reasons, it is almost always best to use the latest versions of Java and Tomcat.
    • When we run javac or java, the current directory is tomcat/webapps/erddap/WEB-INF . @@ -3091,7 +3237,7 @@

      Programmer's Guide

      And it will be easy for us to incorporate your code in ERDDAP. Note that if you do submit code, the license will need compatible with the ERDDAP license (e.g., - ApacheApache (external link), BSDCredits
  • src="../images/external.png" align="bottom" alt=" (external link)" title="This link to an external web site does not constitute an endorsement."/> ERD. -
    Bob Simons is the author of ERDDAP (the designer and programmer who wrote -the ERDDAP-specific code). -
    Roy Mendelssohn instigated the project and provides essential ongoing support -(including hardware, network, and other software support) that has made this -project possible and facilitated this probject, -including by freeing up Bob's time so he could spend more time on the ERDDAP code. -
    The ERDDAP-specific code is licensed as copyrighted open source, with + +

    Bob Simons is the author of ERDDAP (the designer and programmer who wrote +the ERDDAP-specific code). The starting point was Roy Mendelssohn's suggestion +that Bob turn his ConvertTable (a small utility which converted tabular data +from one format to another and which was largely code from Bob's +pre-NOAA work that Bob re-licensed to be open source) into a web service. + +

    It was and is Roy Mendelssohn's ideas about distributed data systems, +his initial suggestion to Bob, and his ongoing support +(including hardware, network, and other software support, +and by freeing up Bob's time so he could spend more time on the ERDDAP code) +that has made this project possible and enabled its growth. + +

    The ERDDAP-specific code is licensed as copyrighted open source, with NOAA (external link) @@ -3197,51 +3350,51 @@

    Credits

    src="../images/external.png" align="bottom" alt=" (external link)" title="This link to an external web site does not constitute an endorsement."/>) which makes these classes available with an MIT/X-like license (see classes/com/cohort/util/LICENSE.txt). -
  • Data from OPeNDAPData from OPeNDAP (external link) servers are read - with Java DAP 1.1.7Java DAP 1.1.7 (external link) (license: LGPL).
  • NetCDF files (.nc) and GMT-style NetCDF files (.grd) are read and written with code in the - NetCDF Java LibraryNetCDF Java Library (external link) - (license: MIT/X-likeMIT/X-like (external link)) - from UnidataUnidata (external link).
  • The NetCDF Java Library reads GRIB files via the Unidata - GRIB decoder (grib-6.0.jar)GRIB decoder (grib-6.0.jar) (external link) - (license: MIT/X-likeMIT/X-like (external link)).
  • The NetCDF Java Library reads BUFR files via the Unidata - BUFR decoder BUFR decoder  (external link) - (license: MIT/X-likeMIT/X-like (external link)).
  • The NetCDF Java Library and Cassandra need slf4j from the Simple Logging Facade for Java project. Currently, ERDDAP uses the slf4j-simple-xxx.jar renamed as slf4j.jar to meet this need. - (license: MIT/XMIT/X (external link)). -
  • ERDDAP and the NetCDF Java Library use JodaERDDAP and the NetCDF Java Library use Joda (external link) for some calendar calculations. (license: Apache 2.0 (external link)). + title="This link to an external web site does not constitute an endorsement."/>). -->
  • The NetCDF Java Library uses code from some .jar files from Apache projects (external link)Credits
  • src="../images/external.png" align="bottom" alt=" (external link)" title="This link to an external web site does not constitute an endorsement."/> version 3 (a Java-based Scientific Graphics Toolkit written by Donald Denbo at - NOAA PMELNOAA PMEL (external link)) (license: copyrighted open sourceCredits >Oracle Binary Code License Agreement for Java EE Technologies (external link). --> -
  • ERDDAP includes the - PostGres JDBC42PostGres JDBC42 (external link) driver - (license: BSDBSD (external link)). The driver is Copyright (c) 1997-2010, PostgreSQL Global Development Group. All rights reserved. @@ -3395,7 +3548,7 @@

    Credits

    src="../images/external.png" align="bottom" alt=" (external link)" title="This link to an external web site does not constitute an endorsement."/> cassandra-driver-core.jarcassandra-driver-core.jar (external link) (license: Credits
    Cassandra's cassandra-driver-core.jar requires (and so ERDDAP includes):
      -
    • netty-all.jarnetty-all.jar (external link) - (license: Apache 2.0Apache 2.0 (external link)). @@ -3420,14 +3573,14 @@

      Credits

      title="This link to an external web site does not constitute an endorsement."/>).
    • metrics-core.jarmetrics-core.jar (external link) (license: MIT (external link)). -
    • lz4.jarlz4.jar (external link) (license: @@ -3512,7 +3665,7 @@

      Credits

      src="../images/external.png" align="bottom" alt=" (external link)" title="This link to an external web site does not constitute an endorsement."/>).
      aws-java-sdk.jar also requires the - jackson-annotations.jar, jackson-core.jar, and jackson-databind.jar (external link) from @@ -3526,12 +3679,12 @@

      Credits

      title="This link to an external web site does not constitute an endorsement."/>).
    • ERDDAP uses code from the - CoastWatch Browser + CoastWatch Browser project from the NOAA CoastWatch West Coast Regional Node (license: - copyrighted open source). + copyrighted open source). That project was initiated and managed by Dave Foley, the Coordinator of the NOAA CoastWatch West Coast Regional Node. The CoastWatch Browser code was written by Bob Simons. @@ -3602,7 +3755,7 @@

      Contact


       
      -

      ERDDAP, Version 1.74 +

      ERDDAP, Version 1.76
      Disclaimers | Privacy Policy

        diff --git a/download/setupDatasetsXml.html b/download/setupDatasetsXml.html index afbeb404c..a43259584 100644 --- a/download/setupDatasetsXml.html +++ b/download/setupDatasetsXml.html @@ -23,7 +23,7 @@ NOAA NMFS + rel="bookmark" href="https://www.nmfs.noaa.gov">NMFS SWFSC Introduction
      in your setup.xml file.

      The impetus for this was NOAA's 2014 Public Access to Research Results (PARR) directive (external link), @@ -488,15 +488,15 @@

      Introduction

      that it can find by crawling recursively through a THREDDS (sub) catalog. There are many forms of THREDDS catalog URLs. This option REQUIRES a THREDDS .xml URL with /catalog/ in it, for example, -
      http://oceanwatch.pfeg.noaa.gov/thredds/catalog/catalog.xml or -
      http://oceanwatch.pfeg.noaa.gov/thredds/catalog/Satellite/aggregsatMH/chla/catalog.xml +
      https://oceanwatch.pfeg.noaa.gov/thredds/catalog/catalog.xml or +
      https://oceanwatch.pfeg.noaa.gov/thredds/catalog/Satellite/aggregsatMH/chla/catalog.xml
      (note that the comparable .html catalog is at -
      http://oceanwatch.pfeg.noaa.gov/thredds/Satellite/aggregsatMH/chla/catalog.html ). +
      https://oceanwatch.pfeg.noaa.gov/thredds/Satellite/aggregsatMH/chla/catalog.html ).
      If you have problems with EDDGridFromThreddsCatalog:
      • Make sure the URL you are using is valid, includes /catalog/, and ends with /catalog.xml . -
      • If possible, use a public IP address (for example, http://oceanwatch.pfeg.noaa.gov) +
      • If possible, use a public IP address (for example, https://oceanwatch.pfeg.noaa.gov) in the URL, not a local numeric IP address (for example, http://12.34.56.78). If the THREDDS is only accessible via the local numeric IP address, you can use <convertToPublicSourceUrl> @@ -514,8 +514,8 @@

        Introduction

        from all of the EDDGrid datasets in an ERDDAP that have any longitude values greater than 180.
          -
        • If possible, use a public IP address (for example, http://oceanwatch.pfeg.noaa.gov) - in the URL, not a local numeric IP address (for example, http://12.34.56.78). +
        • If possible, use a public IP address (for example, https://oceanwatch.pfeg.noaa.gov) + in the URL, not a local numeric IP address (for example, https://12.34.56.78). If the ERDDAP is only accessible via the local numeric IP address, you can use <convertToPublicSourceUrl> so ERDDAP users see the public address, even though ERDDAP gets data from the @@ -650,6 +650,20 @@

          Introduction

          (bob.simons at noaa.gov is happy to help.)
            +
        • ncdump +
          This special EDDType prints an + ncdump (external link)-like + printout of the header of an .nc file. + You can also print the data values for specified variables + (or enter "nothing" to not print any data values). + This is useful because, without ncdump it is hard to know what is in + a file and thus which EDDType you should specify for GenerateDatasetsXml. +
            +
        @@ -743,7 +757,7 @@

        Notes

        save the data to NetCDF v3 .nc files and have ERDDAP serve the data from the new data source. See EDDTableFromDatabase and - EDDTableFromCassandra. + EDDTableFromCassandra.
      • Not Supported Data Sources - ERDDAP can support a large number of types of data source, but the world is filled with 1000's (millions?) of different @@ -795,7 +809,7 @@

        Notes

      • Encoding Special Characters
        Since datasets.xml is an XML file, you MUST -encode&-encode (external link) "&", "<", and ">" @@ -807,11 +821,11 @@

        Notes

      • XML doesn't tolerate syntax errors.
        After you edit the dataset.xml file, it is a good idea to verify that the result is -well-formed XMLwell-formed XML (external link) by pasting the XML text into an XML checker like -RUWFxmlvalidation (external link).
          @@ -904,11 +918,9 @@

        Notes

        to convert the units to meters.
      • If your data doesn't fit these requirements, use a different destinationName (for example, aboveGround, distanceToBottom). -
      • If you know the - vertical datum (external link), - please specify it in the metadata. +
      • If you know the vertical CRS please specify it in the metadata, e.g., + "EPSG:5829" (instantaneous height above sea level), + "EPSG:5831" (instantaneous depth below sea level), or "EPSG:5703" (NAVD88 height).
    • For the "time" variable:
        @@ -1020,7 +1032,7 @@

        Notes

        The NcML files MUST have the extension .ncml. See the Unidata NcML documentation (external link). @@ -1127,27 +1139,56 @@

        Notes

      • No <include> Option
        All of the setup information for all of the datasets must be in one file: datasets.xml . - The biggest advantage is: if you want to make the same or similar changes to multiple datasets, + +

        The biggest advantage of this approach for ERDDAP is: + ERDDAP frequently needs to work through this entire file, for example, + to reload a set of flagged datasets. Since all of the information is in one file, + ERDDAP can do this very quickly. + When parsing the datasets.xml file, ERDDAP has a system to jump very quickly + to a corresponding + </dataset> tag, allowing it to rapidly parse even very large + datasets.xml files. + +

        The biggest advantage of this approach for administrators is: + if you want to make the same or similar changes to multiple datasets, you can do it quickly and easily, without opening and editing numerous files. -

        Some people have asked for datasets.xml to support references to external files which have +

        Think of datasets.xml as an interface between your system and ERDDAP. + ERDDAP doesn't care how you make datasets.xml, so you may create it any way you like: +

          +
        • by hand in a text editor, +
        • by a script which concatenates numerous files, each with the datasets.xml + chunk for one dataset, +
        • or by a script which generates datasets.xml based on metadata + for all datasets stored in a relation databaset. +
        + +

        Some people have asked for datasets.xml to support references to external + files which have chunks of XML which define one or more datasets, for example,
          <include dataset1.xml/> -
        ERDDAP doesn't support that. It isn't a standard feature of XML, so adding - support for it would cause some problems. +
        ERDDAP doesn't support that, mostly because each reference to an external + file would be a separate file that ERDDAP has to open. + If there were a large number of references, that would greatly slow down + ERDDAP's processing of datasets.xml. -

        Fortunately, there is a work-around you can use, right now. +

        Fortunately, there is a good compromise:

          -
        1. Make several sub-files, for example, start.xml, datasets1.xml, datasets2.xml, ... end.xml +
        2. Make several sub-files, for example, start.xml, datasets1.xml, + datasets2.xml, ... end.xml
          Use whatever names you want for the files.
        3. Write a Linux script or DOS batch (.bat) file to concatenate the files into one file.
          Linux example:  cat start.xml datasets1.xml datasets2.xml end.xml > datasets.xml
          DOS example: type start.xml datasets1.xml datasets2.xml end.xml > datasets.xml
        4. Then, whenever you make a change to one of the sub-files, rerun the script to regenerate the complete datasets.xml file. + You know when one of the sub-files has changed, so it makes sense + that you use that information to trigger the recreation of the + complete datasets.xml file.
           
        +
      • Limits to the Size of a Dataset
        You'll see many references to "2 billion" below. More accurately, that is a reference to 2,147,483,647 (2^31-1), @@ -1228,7 +1269,7 @@

        Notes

        Prior to ERDDAP version 1.62 (released in June 2015), ERDDAP used/recommended the original, version 1.0, of the NetCDF Attribute Convention for Dataset Discovery + href="https://www.unidata.ucar.edu/software/thredds/current/netcdf-java/metadata/DataDiscoveryAttConvention.html">NetCDF Attribute Convention for Dataset Discovery which was referred to as "Unidata Dataset Discovery v1.0" in the global Conventions and Metadata_Conventions attributes. @@ -1279,7 +1320,7 @@

        Notes

        Please add these attributes to the dataset's global <addAttributes<.
        <att name="creator_name">NOAA NMFS SWFSC ERD</att>
        <att name="creator_email">erd.data@noaa.gov</att> -
        <att name="creator_url">http://www.pfeg.noaa.gov</att>
        +
        <att name="creator_url">https://www.pfeg.noaa.gov</att>

          That's it. I hope that wasn't too hard. @@ -1406,7 +1447,7 @@

        Notes

      • EDDTableFromHyraxFiles aggregates data from files with several variables with shared dimensions served by a - Hyrax OPeNDAP serverHyrax OPeNDAP server (external link).
      • EDDTableFromMultidimNcFiles @@ -1422,7 +1463,14 @@

        Notes

        CF Discrete Sampling Geometries (DSG) (external link) - conventions. + conventions. + But for files using one of the multidimensional CF DSG variants, use + EDDTableFromMultidimNcFiles + instead. +
      • EDDTableFromNccsvFiles + aggregates data from + NCCSV + ASCII .csv files.
      • EDDTableFromNOS handles tabular data from NOS XML servers.
      • EDDTableFromOBIS @@ -1433,7 +1481,7 @@

        Notes

        aggregates data from files with several variables with shared dimensions served by a THREDDS OPeNDAP server (external link). @@ -1455,7 +1503,7 @@

        Detailed Descriptions of Dataset Types

        EDDGridFromDap handles grid variables from -DAPDAP (external link) servers.
          @@ -1786,16 +1834,17 @@

          Detailed Descriptions of Dataset Types

          EDDTableFromErddap handles tabular data from a remote ERDDAP server.
            -
          • EDDGridFromErddap and EDDTableFromErddap behave differently from all other types of datasets in ERDDAP. +
          • EDDGridFromErddap and EDDTableFromErddap behave differently from all other + types of datasets in ERDDAP.
              -
            • Like other types of datasets, these datasets get information about the dataset from the source - and keep it in memory. -
            • Like other types of datasets, when ERDDAP searches for datasets, displays the Data Access Form - (datasetID.html), +
            • Like other types of datasets, these datasets get information about the + dataset from the source and keep it in memory. +
            • Like other types of datasets, when ERDDAP searches for datasets, + displays the Data Access Form (datasetID.html), or displays the Make A Graph form (datasetID.graph), ERDDAP uses the information about the dataset which is in memory. -
            • Unlike other types of datasets, when ERDDAP receives a request for data or images from these datasets, - ERDDAP +
            • Unlike other types of datasets, when ERDDAP receives a request for data or + images from these datasets, ERDDAP redirects (external link) @@ -1804,34 +1853,45 @@

              Detailed Descriptions of Dataset Types

              This is very efficient (CPU, memory, and bandwidth), because otherwise
              1. The composite ERDDAP has to send the request to the other ERDDAP (which takes time). -
              2. The other ERDDAP has to get the data, reformat it, and transmit the data to the composite - ERDDAP. -
              3. The composite ERDDAP has to receive the data (using bandwidth), reformat it (using CPU and - memory), and transmit the data to the user (using bandwidth). +
              4. The other ERDDAP has to get the data, reformat it, and transmit the data + to the composite ERDDAP. +
              5. The composite ERDDAP has to receive the data (using bandwidth), + reformat it (using CPU and memory), and transmit the data to the user + (using bandwidth).
              - By redirecting the request and allowing the other ERDDAP to send the response directly to the user, - the composite ERDDAP spends essentially no CPU time, memory, or bandwidth on the request. -
            • The redirect is transparent to the user regardless of the client software (a browser or any - other software or command line tool). + By redirecting the request and allowing the other ERDDAP to send the + response directly to the user, the composite ERDDAP spends essentially + no CPU time, memory, or bandwidth on the request. +
            • The redirect is transparent to the user regardless of the client software + (a browser or any other software or command line tool).
            +
          • You can tell ERDDAP not to redirect any user requests by setting + <redirect>false</redirect>, + but this negates most of the advantages of the ...FromErddap dataset type.
          -
        • Normally, when an EDDGridFromErddap and EDDTableFromErddap are (re)loaded on your ERDDAP, they try - to add a subscription to the remote dataset via the remote ERDDAP's email/URL subscription system. +
        • Normally, when an EDDGridFromErddap and EDDTableFromErddap are (re)loaded + on your ERDDAP, they try + to add a subscription to the remote dataset via the remote ERDDAP's + email/URL subscription system. That way, whenever the remote dataset changes, the remote ERDDAP contacts the - setDatasetFlag URL + setDatasetFlag URL on your ERDDAP so that the local dataset is reloaded ASAP and so that the local dataset always mimics the remote dataset. - So, the first time this happens, you should get an email requesting that you validate the subscription. - However, if the local ERDDAP can't send an email or if the remote ERDDAP's email/URL subscription - system isn't active, you should email the remote ERDDAP administrator and request that s/he manually - add + So, the first time this happens, you should get an email requesting that you + validate the subscription. However, if the local ERDDAP can't send an email + or if the remote ERDDAP's email/URL subscription + system isn't active, you should email the remote ERDDAP administrator and + request that s/he manually add <onChange>...</onChange> tags to all of the relevant datasets to call your dataset's - setDatasetFlag URLs. + setDatasetFlag URLs. See your ERDDAP daily report for a list of setDatasetFlag URLs, but just send the ones for EDDGridFromErddap and EDDTableFromErddap datasets to the remote ERDDAP administrator.
        • EDDGridFromErddap and EDDTableFromErddap are the basis for - grids/clusters/federations + grids/clusters/federations of ERDDAPs, which efficiently distribute the CPU usage (mostly for making maps), memory usage, dataset storage, and bandwidth usage of a large data center.
        • EDDGridFromErddap and EDDTableFromErddap can't be used with remote datasets that @@ -1840,14 +1900,15 @@

          Detailed Descriptions of Dataset Types

          For security reasons, EDDGridFromErddap and EDDTableFromErddap don't support the <accessibleTo> tag. See ERDDAP's - security system + security system for restricting access to some datasets to some users.
        • You can use the GenerateDatasetsXml program to make the datasets.xml chunk for this type of dataset. But you can do these types of datasets easily by hand. -
        • The skeleton XML for an EDDGridFromErddap dataset - is very simple, because the intent is just to mimic +
        • The skeleton XML for an + EDDGridFromErddap dataset is very simple, because the intent is just to mimic the remote dataset which is already suitable for use in ERDDAP:
           <dataset type="EDDGridFromErddap" datasetID="..." active="..." >
          @@ -1862,6 +1923,8 @@ 

          Detailed Descriptions of Dataset Types

          <fgdcFile>...</fgdcFile> <!-- 0 or 1 --> <iso19115File>...</iso19115File> <!-- 0 or 1 --> <onChange>...</onChange> <!-- 0 or more --> + <redirect>true(default)|false</redirect> <!-- 0 or 1; false tells ERDDAP never to redirect a user's request --> + </dataset>
        • The skeleton XML for an EDDTableFromErddap dataset @@ -1878,6 +1941,7 @@

          Detailed Descriptions of Dataset Types

          <fgdcFile>...</fgdcFile> <!-- 0 or 1 --> <iso19115File>...</iso19115File> <!-- 0 or 1 --> <onChange>...</onChange> <!-- 0 or more --> + <redirect>true(default)|false</redirect> <!-- 0 or 1; false tells ERDDAP never to redirect a user's request --> </dataset>   @@ -1885,14 +1949,15 @@

          Detailed Descriptions of Dataset Types

          EDDGridFromEtopo just serves the -ETOPO1 Global 1-Minute Gridded Elevation Data SetETOPO1 Global 1-Minute Gridded Elevation Data Set (external link) (Ice Surface, grid registered, binary, 2byte int: etopo1_ice_g_i2.zip) which is distributed with ERDDAP.
            -
          • Only two datasetID's are supported for EDDGridFromEtopo, so that you can access the data with - longitude values -180 to 180, or longitude values 0 to 360. +
          • Only two datasetID's are supported for EDDGridFromEtopo, so that you can + access the data with longitude values -180 to 180, or longitude values 0 to 360.
          • There are never any sub tags, since the data is already described within ERDDAP.
          • So the two options for EDDGridFromEtopo datasets are (literally):
            @@ -1908,7 +1973,8 @@ 

            Detailed Descriptions of Dataset Types

            -
          • EDDGridFromMergeIRFiles handles data from gridded +
          • EDDGridFromMergeIRFiles + handles data from gridded MergeIR .gzDetailed Descriptions of Dataset Types files, .ncml files, and - NetCDF (v3 or v4) .ncNetCDF (v3 or v4) .nc (external link) files. This may work with other file types (for example, BUFR), we just haven't tested it -- @@ -2029,21 +2095,28 @@

            Detailed Descriptions of Dataset Types

            There is an additional psuedo dataType, timeFormat=stringTimeFormat, which tells ERDDAP that the value is a String timeStamp in the format specified by the stringTimeFormat, - which is a + which uses the Joda DateTimeFormatjava.time.DateTimeFormatter (external link). + title="This link to an external web site does not constitute an endorsement."/> + specification. In most cases, the stringTimeFormat you need will be a variation of one of these - two formats: + formats:
              -
            • yyyyMMddHHmmss - which is the compact version of the ISO 8601 - date time format. You will often need a shortened version of this, e.g., - yyyyMMdd. -
            • yyyyDDD - which is the year plus the zero-padded day of the year +
            • yyyy-MM-dd'T'HH:mm:ss.SSSZ - which ISO 8601:2004(E) + date time format. You may need a shortened version of this, e.g., + yyyy-MM-dd'T'HH:mm:ss or yyyy-MM-dd. +
            • yyyyMMddHHmmss.SSS - which is the compact version of the ISO 8601 + date time format. You may need a shortened version of this, e.g., + yyyyMMddHHmmss or yyyyMMdd. +
            • M/d/yyyy H:mm:ss.SSS - which is the U.S. slash date format. + You may need a shortened version of this, e.g., M/d/yyyy . +
            • yyyyDDDHHmmssSSS - which is the year plus the zero-padded day of the year (e.g, 001 = Jan 1, 365 = Dec 31 in a non-leap year; this is sometimes - called the Julian date). + erroneously called the Julian date). + You may need a shortened version of this, e.g., yyyyDDD .
          • extractRegex - This is the @@ -2052,7 +2125,7 @@

            Detailed Descriptions of Dataset Types

            regular expression (external link) - (tutorial (external link)) @@ -2292,7 +2365,9 @@

            Detailed Descriptions of Dataset Types

            <pathRegex> + tag to specify a regular expression which limits which paths (which subdirectories) will be included in the dataset. @@ -2441,13 +2516,14 @@

            Detailed Descriptions of Dataset Types

            <pathRegex>...</pathRegex> <!-- 0 or 1. Only directory names which match the pathRegex (default=".*") will be accepted. --> <fileNameRegex>...</fileNameRegex> <-- 0 or 1. A regular expression (external link) - (tutorialtutorial (external link)) describing valid data files names, for example, ".*\.nc" for all .nc files. --> @@ -2533,7 +2609,7 @@

            Detailed Descriptions of Dataset Types

            files, .ncml files, and - NetCDF (v3 or v4) .ncNetCDF (v3 or v4) .nc (external link) files. This may work with other file types (for example, BUFR), we just haven't tested it -- please send us @@ -2569,9 +2645,9 @@

            Detailed Descriptions of Dataset Types

          • It unpacks variables that are packed with scale_factor and/or add_offset. -
          • It promotes integer variables that have "_Unsigned=true" attributes +
          • It promotes integer variables that have _Unsigned="true" attributes to the next larger integer data type so that the values appear as the unsigned values. - For example, an _Unsigned=true byte (8 bit) variable becomes a signed short + For example, an _Unsigned="true" byte (8 bit) variable becomes a signed short (16 bit) variable.
          • It converts _FillValue and missing_value values to be NaN's (or MAX_VALUE for integer data types). @@ -2860,11 +2936,13 @@

            Detailed Descriptions of Dataset Types

            regular expression (external link) (regex) - (tutorialtutorial (external link)) is .*\.nc recursive can be "true" or "false". - Only directory names which match the pathRegex (default=".*") will be accepted. + Only directory names which match the + <pathRegex> + (default=".*") will be accepted. An example of a thredds catalogUrl is http://thredds1.pfeg.noaa.gov/thredds/catalog/Satellite/aggregsatMH/chla/catalog.xmlDetailed Descriptions of Dataset Typeslog.txt + log.txt as
              statement as text: theStatementAsText

            Note that the version of the statement you see will be a text representation of the statement and will only have "?" where constraint values will be placed.
              -
            Not so simple - Unfortunately, CQL has many restrictions on which columns can be queried - with which types of constraints, for example, partition key columns can be constrained with = and IN, +
            Not so simple - Unfortunately, CQL has many restrictions on which columns + can be queried + with which types of constraints, for example, partition key columns can be + constrained with = and IN, so ERDDAP sends some constraints to Cassandra and applies all constraints after the data is received from Cassandra. To help ERDDAP deal efficiently with Cassandra, you need to specify <partitionKeySourceNames>, - <clusterColumnSourceNames>, and + <clusterColumnSourceNames>, + and <indexColumnSourceNames> in datasets.xml for this dataset. These are the most important ways to help ERDDAP work efficiently with Cassandra. @@ -3322,6 +3404,36 @@

            Detailed Descriptions of Dataset Types

            +
          • <partitionKeyCSV> - + + If this is specified, ERDDAP will use it instead of asking Cassandra for the + partitionKey information each time the dataset is reloaded. + This provides the list of distinct partition key values, in the order they'll be used. + Times must be specified as seconds since 1970-01-01T00:00:00Z. + But there are also two special alternate ways to specify times + (each encoded as a string): +
            1) time(aISO8601Time) (MAY be encoded as a string) +
            2) "times(anISO8601StartTime, strideSeconds, stopTime)" (MUST be encoded as a string) +
            stopTime can be an ISO8601Time or a "now-nUnits" time (e.g., "now-3minutes"). +
            stopTime doesn't have to be an exact match of startTime + x strideSeconds. +
            A row with a times() value gets expanded into multiple rows before every query, + so the list of partitionKeys can be always perfectly up-to-date. +
            For example,
            +<partitionKeyCSV>
            +deviceid,date
            +1001,"times(2014-11-01T00:00:00Z, 86400, 2014-11-02T00:00:00Z)"
            +1007,"time(2014-11-07T00:00:00Z)"
            +1008,time(2014-11-08T00:00:00Z)
            +1009,1.4154912E9
            +</partitionKeyCSV>
            +expands into this table of partition key combinations:
            +deviceid,date
            +1001,1.4148E9
            +1001,1.4148864E9
            +1007,1.4153184E9
            +1008,1.4154048E9
            +1009,1.4154912E9 
            +
          • <clusterColumnSourceNames> - Cassandra accepts SQL-like constraints on cluster columns, which are the columns that form the second part of the primary key @@ -3451,7 +3563,8 @@

            Detailed Descriptions of Dataset Types

            typeLists - ERDDAP's - <dataType> tag for Cassandra dataVariables can include the regular + <dataType> + tag for Cassandra dataVariables can include the regular ERDDAP dataTypes (see above) plus several special dataTypes that can be used for Cassandra list columns: booleanList, byteList, shortList, intList, longList, floatList, doubleList, @@ -3487,7 +3600,8 @@

            Detailed Descriptions of Dataset Types

              <dataType>double</dataType>
            and in <addAttributes> set
              <att name="units">seconds since 1970-01-01T00:00:00Z</att> . -
          • Suggestion: If the data is a time range, it is useful to have the timestamp values refer to +
          • Suggestion: If the data is a time range, it is useful to have the timestamp + values refer to the center of the implied time range (for example, noon). For example, if a user has data for 2010-03-26T13:00Z from another dataset and they want the closest data from this Cassandra dataset that has data for each day, @@ -3495,9 +3609,10 @@

            Detailed Descriptions of Dataset Types

            ERDDAP has a utility to - Convert - a Numeric Time to/from a String Time. -
          • See How + Convert a Numeric Time to/from a String Time. +
          • See How ERDDAP Deals with Time.
             
          @@ -3570,7 +3685,8 @@

          Detailed Descriptions of Dataset Types

          When in ERDDAP, the password and other connection properties are stored in @@ -3594,7 +3710,8 @@

          Detailed Descriptions of Dataset Types

          . They just specify what the user wants. - They don't include a specification or hints for how the query is to be handled or optimized. + They don't include a specification or hints for how the query is to be handled + or optimized. So there is no way for ERDDAP to generate the query in such a way that it helps Cassandra optimize the query (or in any way specifies how the query is to be handled). In general, it is up to the Cassandra administrator to set things up @@ -3618,7 +3735,8 @@

          Detailed Descriptions of Dataset Types

           
        • Make IndexesIndexes (external link) for Commonly Constrained Variables - @@ -3662,8 +3780,8 @@

          Detailed Descriptions of Dataset Types

          The ResultSet must have had - dataType=sometypeList columns - (with an average + dataType=sometypeList + columns (with an average of 10 items per list), because ERDDAP expanded the 1200 rows from Cassandra into 12000 rows in ERDDAP.
        • ERDDAP always applies all of the user's constraints to the data from @@ -3672,7 +3790,8 @@

          Detailed Descriptions of Dataset Types

          The most important use of these diagnostic messages is to make sure that ERDDAP is doing what you think it is doing. - If it isn't (for example, is it not reducing the number of distinct combinations as expected?), + If it isn't + (for example, is it not reducing the number of distinct combinations as expected?), then you can use the information to try to figure out what's going wrong.
            @@ -3711,7 +3830,8 @@

          Detailed Descriptions of Dataset Types

           
        -
      • The skeleton XML for an EDDTableFromCassandra dataset is: +
      • The skeleton XML for an +EDDTableFromCassandra dataset is:
         <dataset type="EDDTableFromCassandra" datasetID="..." active="..." >
           <ipAddress>...</ipAddress>
        @@ -3750,7 +3870,7 @@ 

        Detailed Descriptions of Dataset Types

        EDDTableFromDapSequence handles variables within 1- and 2-level sequences from -DAPDAP (external link) servers such as @@ -4021,7 +4141,7 @@

        Detailed Descriptions of Dataset Types

        .
        The <driverName> to use in datasets.xml (see below) is probably oracle.jdbc.driver.OracleDriver .
      • For Postgresql, we got the JDBC 4 driver from - http://jdbc.postgresql.orghttps://jdbc.postgresql.org (external link)
        The <driverName> to use in datasets.xml (see below) is probably org.postgresql.Driver . @@ -4101,7 +4221,7 @@

        Detailed Descriptions of Dataset Types

        Database <dataType> Tags - Because there is some ambiguity about which database data types + href="https://www.w3schools.com/sql/sql_datatypes_general.asp">database data types map to which ERDDAP data types, you need to specify a <dataType> tag for each <dataVariable> @@ -4300,7 +4420,7 @@

        Detailed Descriptions of Dataset Types

        .
        PostgreSQL will respond much faster if you - VACUUMVACUUM (external link) the table.
        Oracle doesn't have or need an analogous command. @@ -4527,7 +4647,8 @@

        Detailed Descriptions of Dataset Types

        true. The default is false. -
      • <pathRegex> - If recursive=true, Only directory names which +
      • <pathRegex> + - If recursive=true, Only directory names which match the pathRegex (default=".*") will be accepted. If recursive=false, this is ignored. This is rarely used, but can be very useful in unusual circumstances. @@ -4562,7 +4683,7 @@

        Detailed Descriptions of Dataset Types

        regular expression (external link) - (tutorialtutorial (external link)). The entire regex must match the entire file name @@ -4604,11 +4725,12 @@

        Detailed Descriptions of Dataset Types

        Joda DateTimeFormatjava.time.DateTimeFormatter (external link)) - to interpret that as a time data value (2015-01-03T00:00:00Z). + specification which ERDDAP uses to parse the strings into time data values + (2015-01-03T00:00:00Z).

        In the case of the day variable, if a file has the name jplMURSST20150103000000.png, the extractRegex will match the file name, @@ -4647,7 +4769,7 @@

        Detailed Descriptions of Dataset Types

        <onChange>...</onChange> <!-- 0 or more --> <fileDir>...</fileDir> <recursive>...</recursive> <!-- true or false (the default) --> - <pathRegex>...</pathRegex> <!-- 0 or 1. Only directory names which + <pathRegex>...</pathRegex> <!-- 0 or 1. Only directory names which match the pathRegex (default=".*") will be accepted. --> <fileNameRegex>...</fileNameRegex> <addAttributes>...</addAttributes> <!-- 0 or 1 --> @@ -4674,7 +4796,7 @@

        Detailed Descriptions of Dataset Types

        EDDTableFromHyraxFiles aggregates data with several variables, each with shared dimensions (for example, time, altitude (or depth), latitude, longitude), and served by a - Hyrax OPeNDAP serverHyrax OPeNDAP server (external link).
      • EDDTableFromMultidimNcFiles @@ -4697,11 +4819,20 @@

        Detailed Descriptions of Dataset Types

        conventions. + But for files using one of the multidimensional CF DSG variants, use + EDDTableFromMultidimNcFiles + instead. + +
      • EDDTableFromNccsvFiles + aggregates data from + NCCSV + ASCII .csv files. +
      • EDDTableFromThreddsFiles aggregates data from files with several variables with shared dimensions served by a THREDDS OPeNDAP server (external link). @@ -4993,7 +5124,7 @@

        Detailed Descriptions of Dataset Types

        regular expression (external link) - (tutorialtutorial (external link)) used to identify text to be removed @@ -5133,13 +5264,13 @@

        Detailed Descriptions of Dataset Types

        <pathRegex>...</pathRegex> <!-- 0 or 1. Only directory names which match the pathRegex (default=".*") will be accepted. --> <fileNameRegex>...</fileNameRegex> <-- 0 or 1. A regular expression (external link) - (tutorialtutorial (external link)) describing valid data files names, for example, ".*\.nc" for all .nc files. --> @@ -5197,9 +5328,9 @@

        Detailed Descriptions of Dataset Types

        <fileTableInMemory>...</fileTableInMemory> <!-- 0 or 1 (true or false (the default)) --> <addAttributes>...</addAttributes> <!-- 0 or 1 --> <dataVariable>...</dataVariable> <!-- 1 or more --> - <-- For EDDTableFromHyraxFiles, EDDTableFromMultidimNcFiles, EDDTableFromNcFiles, and - EDDTableFromThreddsFiles, the source's axis variables (for example, time) needn't - be first or in any specific order. --> + <-- For EDDTableFromHyraxFiles, EDDTableFromMultidimNcFiles, EDDTableFromNcFiles, + EDDTableFromNccsvFiles, and EDDTableFromThreddsFiles, the source's axis variables + (for example, time) needn't be first or in any specific order. --> </dataset>
      •   @@ -5792,7 +5923,7 @@

        Detailed Descriptions of Dataset Types

        EDDTableFromHyraxFiles aggregates data files with several variables, each with one or more shared dimensions (for example, time, altitude (or depth), latitude, longitude), and served by a - Hyrax OPeNDAP serverHyrax OPeNDAP server (external link).
          @@ -5834,12 +5965,9 @@

          Detailed Descriptions of Dataset Types

          EDDTableFromFiles, for information on how this class works and how to use this class.
            -
          • If the .nc files use one of the - CF Discrete Sampling Geometries (DSG) (external link) - file formats, try using - EDDTableFromNcCFFiles before trying this. +
          • If the files are multidimensional CF DSG variants, use this dataset type + instead of + EDDTableFromNcCFFiles.
          • For new tabular datasets from .nc files, use this option before trying the older EDDTableFromNcFiles. @@ -6007,6 +6135,10 @@

            Detailed Descriptions of Dataset Types

            EDDTableFromFiles, for information on how this class works and how to use this class. +

            For files using one of the multidimensional CF DSG variants, use + EDDTableFromMultidimNcFiles + instead. +

            The CF DSG conventions defines dozens of file formats and includes numerous minor variations. These class deals with all of the variations we are aware of, but we may have missed one (or more). So if this class can't read data from your CF DSG files, @@ -6022,12 +6154,35 @@

            Detailed Descriptions of Dataset Types

              +

            EDDTableFromNccsvFiles + aggregates data from + NCCSV + ASCII .csv files. + See this class' superclass, EDDTableFromFiles, + for information on how this class works and how to use this class. + +

              +
            • We strongly recommend using the + GenerateDatasetsXml program + to make a rough draft of the datasets.xml chunk for this dataset. + You can then edit that to fine tune it. + +

              The first thing GenerateDatasetsXml does for this type of dataset + after you answer the questions is print the ncdump-like structure of the sample file. + So if you enter a few goofy answers for the first loop through GenerateDatasetsXml, + at least you'll be able to see if ERDDAP can read the file and see + what dimensions and variables are in the file. + Then you can give better answers for the second loop through GenerateDatasetsXml. + +

            + +

            EDDTableFromNOS handles data from a NOAA -NOSNOS (external link) source, which uses - SOAP+XML + SOAP+XML for requests and responses. It is very specific to NOAA NOS's XML. See the sample EDDTableFromNOS dataset in datasets2.xml.
              @@ -6155,7 +6310,7 @@

            Detailed Descriptions of Dataset Types

            system. Most ERDDAP servers express units with the - UDUNITSUDUNITS (external link) system. If you need to convert between the two systems, you can use @@ -6189,7 +6344,7 @@

            Detailed Descriptions of Dataset Types

            UDUnitsUDUnits (external link)-compatible string (with the format @@ -6197,21 +6352,21 @@

            Detailed Descriptions of Dataset Types

            Joda DateTimeFormatjava.time.DateTimeFormatter (external link) --> + title="This link to an external web site does not constitute an endorsement."/> pattern + (which is mostly compatible with java.text.SimpleDateFormat) + describing how to interpret string times (for example, the + ISO8601TZ_FORMAT "yyyy-MM-dd'T'HH:mm:ssZ"). --> <observationOfferingIdRegex>...</observationOfferingIdRegex> <!-- Only observationOfferings with IDs (usually the station names) which match this regular expression (external link) (tutorial (tutorial (external link)) will be included in the dataset (".+" will catch all station names). --> @@ -6234,7 +6389,7 @@

            Detailed Descriptions of Dataset Types

            THREDDS OPeNDAP server (external link). @@ -6581,10 +6736,10 @@

            Details

            There can be 0 or more of these tags. For more information see <sourceUrl>. For example, -
            <convertToPublicSourceUrl from="http://192.168.31.18/" to="http://oceanwatch.pfeg.noaa.gov/" /> +
            <convertToPublicSourceUrl from="https://192.168.31.18/" to="https://oceanwatch.pfeg.noaa.gov/" />
            will cause a matching local sourceUrl (such as - http://192.168.31.18/thredds/dodsC/satellite/BA/ssta/5day) -
            into a public sourceUrl (http://oceanwatch.pfeg.noaa.gov/thredds/dodsC/satellite/BA/ssta/5day). + https://192.168.31.18/thredds/dodsC/satellite/BA/ssta/5day) +
            into a public sourceUrl (https://oceanwatch.pfeg.noaa.gov/thredds/dodsC/satellite/BA/ssta/5day).

            But, for security reasons and reasons related to the subscription system, DON'T USE THIS TAG! @@ -6597,8 +6752,9 @@

            Details


            ping some.domain.name
          • <requestBlacklist> - is an OPTIONAL tag within an <erddapDatasets> tag which contains a - comma-separated list of numeric IP addresses which will be blacklisted. + is an OPTIONAL tag within an <erddapDatasets> + tag which contains a + comma-separated list of numeric IP addresses which will be blacklisted.
            • This can be used to fend off a Denial of Service attackDetails web robot (external link), - or an overeager user running multiple scripts at one time. -
            • Frequent Crashes - - If ERDDAP freezes/stops twice or more in one day, you probably have a troublesome user - running several scripts at once and/or someone making a large number of invalid requests. - If this happens, you should probably blacklist that user. -
            • Any request from a blacklisted addresses will receive an HTTP Error 403: Forbidden. - The accompanying text error message encourages the user to email you to work out the problems. - Then, you can encourage them to run just one script at a time and to fix the problems in their script - (for example, requesting data from a remote dataset that can't respond before timing out). -
            • To block a user, add their numeric IP address to the comma-separated list of IP addresses in + or any other type of troublesome user. +
            • Troublesome User - + If ERDDAP slows to a crawl or freezes/stops, the cause is often a troublesome user + who is running more than one script at once and/or making a large number of + very large, extremely inefficient, or invalid requests. Look in + log.txt + to see if this is the case and to find the numeric IP address of the troublesome user. + If this is the problem, you should probably blacklist that user. + +

              When ERDDAP gets a request from a blacklisted IP address, it will return + HTTP Error 403: Forbidden. + The accompanying text error message encourages the user to email you, the ERDDAP + administrator, to work out the problems. + If they take the time to read the error message (many apparently don't) and contact you, + you can then work with them to get them to run just one script at a time, + make more efficient requests, fix the problems in their script + (for example, requesting data from a remote dataset that can't respond before timing out), + or whatever else was the source of trouble. + +

              Users are often simply unaware that their requests are troublesome. + They are often unaware of bugs, gross inefficiencies, or other problems with their scripts. + They often think that because your ERDDAP offers data for free, that + they can ask for as much data as they want, e.g., by running multiple + scripts simultaneously. + You can explain to them that each ERDDAP, now matter how large and powerful, + has finite resources (CPU time, hard drive I/O, network bandwidth, etc.) + and it isn't fair if one user requests data in a + way that crowds out other users or overburdens ERDDAP. + While there are ways to set up + grids/clusters/federations of ERDDAPs + to make an ERDDAP system with more resources, + most ERDDAP administrators don't have the money or the manpower to setup + such systems, and such a system will still be finite. + Most users will understand and will make the necessary changes so that you + will remove their IP address from the blacklist. + +

            • To blacklist a user, add their numeric IP address to the comma-separated + list of IP addresses in <requestBlacklist> in your datasets.xml file. To find the troublesome user's IP address, look in the ERDDAP bigParentDirectory/logs/log.txt file (bigParentDirectory is specified in @@ -6650,6 +6835,14 @@

              Details


              <subscriptionEmailBlacklist>bob@badguy.com, john@badguy.com</subscriptionEmailBlacklist>
              If an email address on the list has subscriptions, the subscriptions will be cancelled. If an email address on the list tries to subscribe, the request will be refused. + +

              You don't need to restart ERDDAP for the changes to <subscriptionEmailBlacklist> + to take effect. + The changes will be detected the next time ERDDAP checks if any datasets need to be reloaded. + Or, you can speed up the process by visiting a + setDatasetFlag URL + for any dataset. +
               

            • <user> @@ -6722,9 +6915,52 @@

              Details


              where the stored password was generated with
              md5 -djsmith:ERDDAP:myPassword
            +
          • You don't need to restart ERDDAP for the changes to a <user> tag to take effect. + The changes will be detected the next time ERDDAP checks if any datasets need to be reloaded. + Or, you can speed up the process by visiting a + setDatasetFlag URL + for any dataset.
             
          +
        • <pathRegex> + lets you specify a regular expression which limits which paths + (which subdirectories) will be included in the dataset. The default is .*, + which matches all paths. + This is a rarely used, rarely needed, OPTIONAL tag for + EDDGridFromFiles datasets, EDDTableFromFiles datasets, and a few other dataset types. + However, when you need it, you really need it. + +

          The tricky part is that the pathRegex must accept all the paths it + encounters on its way to the directories with data. + Regex's with nested capture groups are a good way to deal with this. +
          A capture group is a enclosed in parentheses and has different + options separated by |'s, e.g., +
          (option1|option2|option3) +
          There can be any number of options. +
          An option can be nothing (which is very useful). +
          Capture groups can be nested (which is very useful). + +

          An Example: +
          Suppose we have the following directory structure: +
          /foo/bar/D0001/a/*.nc +
          /foo/bar/D0001/b/*.nc +
          /foo/bar/D0002/a/*.nc +
          /foo/bar/D0002/b/*.nc +
          ... +
          and the specified fileDirectory is /foo/bar/, + and we just want the .nc files in the /a/ directories. +
          The solution is to set pathRegex to /foo/bar/(|D[0-9]+/(|a/)) +
          That says: +
          The path must start with /foo/bar/ +
            That may be followed by nothing or D[0-9]+/ +
              That may be followed by nothing or a/ + +

          Yes, pathRegex's can be incredibly difficult to formulate. + If you get stuck, ask a computer programmer (the closest thing in the real + world to a wizard spouting incantations?) or send an email to + bob.simons at noaa.gov. +

        • <dataset> is an OPTIONAL tag within an <erddapDatasets> tag that (if you include all of the information between @@ -6798,10 +7034,10 @@

          Details

          • <accessibleTo> is an OPTIONAL tag within a <dataset> - tag that specifies a space-separated list of + tag that specifies a comma-separated list of roles which are allowed to have access to this dataset. For example, -
            <accessibleTo>RASmith NEJones</accessibleTo> +
            <accessibleTo>RASmith, NEJones</accessibleTo>
            • This is part of ERDDAP's @@ -6997,24 +7233,41 @@

              Details

              specified URL. The response will be ignored. For example, the URL might tell some other web service to do something.
                -
              • If the URL has a query part (after the "?"), it MUST be already +
              • If the URL has a query part (after the "?"), it MUST be already percent encoded (external link). - You just need to encode special characters in the right-hand-side values of any - constraints into the form %HH, where HH is the 2 digit hexadecimal value of the character. + You need to encode special characters in the constraints + (other than the initial '&' and the main '=', + if any) into the form %HH, where HH is the 2 digit hexadecimal value of the character. Usually, you just need to convert a few of the punctuation characters: % into %25, - & into %26, ", into %22, = into %3D, + into %2B, | into %7C, space into %20, + & into %26, " into %22, < into %3C, = into %3D, > into %3E, + into %2B, + | into %7C, space into %20, and convert all characters above #127 into their UTF-8 form and then percent encode each byte of the UTF-8 form into the %HH format (ask a programmer for help). - But in some situations, you need to percent encode all characters other than - A-Za-z0-9_-!.~'()* . -
              • Since datasets.xml is an XML file, you then need to encode - '&', '<', and '>' in the URL - as '&amp;', '&lt;', and '&gt;'. +
                For example, &stationID>="41004" +
                becomes       &stationID%3E=%2241004%22 +
                Note that percent encoding is generally required when you access ERDDAP + via software other than a browser. Browsers usually handle percent encoding for you. +
                In some situations, you need to percent encode all characters other than + A-Za-z0-9_-!.~'()*, but still don't encode the initial '&' or the main '='. +
                Programming languages have tools to do this (for example, see Java's + java.net.URLEncoder (external link) +
                and JavaScript's + encodeURIComponent() (external link)) + and there are +
                web sites that percent encode/decode for you (external link). +
              • Since datasets.xml is an XML file, you MUST also &-encode ALL '&', '<', and '>' in the + URL as '&amp;', '&lt;', and '&gt;' after percent encoding.
              • Example: For a URL that you might type into a browser as: - http://www.company.com/webService?department=R%26D&param2=value2 - You should specify an <onChange> tag via (on one line) +
                http://www.company.com/webService?department=R%26D&param2=value2 +
                You should specify an <onChange> tag via (on one line)
                <onChange>http://www.company.com/webService?department=R%26D&amp;param2=value2</onChange>
            • mailto: - If the action starts with "mailto:", ERDDAP will send an email to the subsequent @@ -7133,7 +7386,7 @@

              Details


              right after the <reloadEveryNMinutes> tag for the dataset in datasets.xml. The number of milliseconds that you specify can be as small as 1 (to ensure that the dataset is always up-to-date). - A value of 0 (the default) turns off the system. + A value of 0 (the default) or a negative number turns off the system.
            • Due to their incremental nature, updates should finish very quickly, so users should never have to wait a long time.
            • If a second data request arrives before the previous update has finished, @@ -7355,7 +7608,7 @@

              Details

              tag that specifies the url source of the data.
              • An example is: -
                <sourceUrl>http://oceanwatch.pfeg.noaa.gov/thredds/dodsC/satellite/VH/chla/1day</sourceUrl> +
                <sourceUrl>https://oceanwatch.pfeg.noaa.gov/thredds/dodsC/satellite/VH/chla/1day</sourceUrl>
              • In ERDDAP, all datasets will have a "sourceUrl" in the combined global attributes which are shown to the users.
              • For most dataset types, this tag is REQUIRED. @@ -7368,22 +7621,12 @@

                Details

                this attribute often just has a placeholder value, for example, <att name="name">(local files)</att> .
              • For most datasets, this is the base of the url that is used to request data. - For example, for DAP servers, this is the url to which .dods, .das, .dds, or .html could be added. -
              • If the URL has a query part (after the "?"), it MUST be already - percent encoded (external link). - You just need to encode special characters in the right-hand-side values of any - constraints into the form %HH, where HH is the 2 digit hexadecimal value of the character. - Usually, you just need to convert a few of the punctuation characters: % into %25, - & into %26, ", into %22, = into %3D, + into %2B, | into %7C, space into %20, - and convert all characters above #127 into their UTF-8 form and then percent encode - each byte of the UTF-8 form into the %HH format (ask a programmer for help). - But in some situations, you need to percent encode all characters other than - A-Za-z0-9_-!.~'()* . -
              • Since datasets.xml is an XML file, you MUST also encode '&', '<', and '>' in the URL - as '&amp;', '&lt;', and '&gt;'. -
              • For most dataset types, ERDDAP adds the original sourceUrl (the "localSourceUrl" in the source code) + For example, for DAP servers, this is the url to which .dods, .das, .dds, + or .html could be added. +
              • Since datasets.xml is an XML file, you MUST also encode '&', '<', and '>' + in the URL as '&amp;', '&lt;', and '&gt;'. +
              • For most dataset types, ERDDAP adds the original sourceUrl (the "localSourceUrl" + in the source code) to the global attributes (where it becomes the "publicSourceUrl" in the source code). When the data source is local files, ERDDAP adds sourceUrl="(local files)" to the global attributes as a security precaution. @@ -7394,6 +7637,32 @@

                Details

                <convertToPublicSourceUrl> tags to specify how to convert the local sourceUrls to public sourceUrls. +
              • A sourceUrl may begin with + http://, ftp://, https:// and perhaps other prefixes. + https connections read and check the source's digital certificate to ensure + that the source is who they say they are. + In rare cases, this check may fail with the error + "javax.net.ssl.SSLProtocolException: handshake alert: unrecognized_name". + This is probably due to the domain name on the certificate not matching the + domain name that you are using. + You can and should read the details + of the sourceUrl's certificate in your web browser, + notably, the list of "DNS Name"s in the "Subject Alternative Name" section. + +

                In some cases, the sourceUrl you are using may be an alias of the + domain name on the certificate. For example, +
                https://podaac-opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/ + will throw this error, but +
                https://opendap.jpl.nasa.gov/opendap/allData/ccmp/L3.5a/monthly/flk/ + , which uses the domain name on the certificate, won't. + The solution in these cases is therefore to find and use the domain name + on the certificate. + If you can't find it on the certificate, contact the data provider. + +

                In other cases, the domain name on the certificate may be for a + group of names. If this occurs or the problem is otherwise unsolvable, + please email bob.simons at noaa.gov to report the problem. +
                 

              @@ -7407,10 +7676,11 @@

              Details

              to make the "combinedAttributes", which are what ERDDAP users see. Thus, you can use addAttributes to redefine the values of sourceAttributes, add new attributes, or remove attributes. -
            • The <addAttributes> tag encloses 0 or more <att> subtags, - which are used +
            • The <addAttributes> tag encloses 0 or more + <att> subtags, which are used to specify individual attributes. -
            • Each attribute consists of a name and a value (which has a specific data type, for example, double). +
            • Each attribute consists of a name and a value (which has a specific data type, + for example, double).
            • There can be only one attribute with a given name. If there are more, the last one has priority.
            • The value can be a single value or a space-separated list of values. @@ -7434,17 +7704,30 @@

              Details


              <att name="creator_name">NASA/GSFC OBPG</att>
              • Valid types for single values are - byte (8-bit integer), unsignedShort (16-bit integer), short (16-bit signed integer), - int (32-bit signed integer), long (64-bit signed integer), float (32-bit floating point), - double (64-bit floating point), + byte (8-bit integer), short (16-bit signed integer), + int (32-bit signed integer), long (64-bit signed integer), + float (32-bit floating point), double (64-bit floating point), + char, and string. For example,
                <att name="scale_factor" type="float">0.1</att> + +

                See these notes about the char data type. +
                See these notes about the long data type. +

              • Valid types for space-separated lists of values (or single values) are - byteList, - unsignedShortList, shortList, intList, longList, floatList, doubleList. + byteList, shortList, + unsignedShortList, charList, intList, longList, floatList, doubleList. For example,
                <att name="actual_range" type="doubleList">10.34 23.91</att> -
                There is no stringList. Store the String values as a multi-line String. For example, +
                An unsignedShortList lets you specify a list of unsigned shorts, but + they will be converted into a list of the corresponding Unicode characters (e.g., + "65 67 69" will be converted into "A C E". +
                If you specify a charList, encode any special characters (e.g., space, + double quotes, backslash, <#32, or >#127) as you would + encode them in the data section of an NCCSV file + (e.g., " ", "\"" or """", "\\", "\n", "\u20ac"). +
                There is no stringList. Store the String values as a multi-line String. + For example,
                <att name="history">2011-08-05T08:55:02Z ATAM - made CF-1.6 compliant.
                2012-04-08T08:34:58Z ATAM - Changed 'height' from double to float.</att>
                @@ -7494,9 +7777,11 @@

                Details

                and FGDC metadata are from the ACDD metadata standard (external link) and are so noted below. + title="This link to an external web site does not constitute an endorsement."/> + and are so noted below. -
              • Many global attributes are special in that ERDDAP looks for them and uses them in various ways. +
              • Many global attributes are special in that ERDDAP looks for them and + uses them in various ways. For example, a link to the infoUrl is included on web pages with lists of datasets, and other places, so that users can find out more about the dataset.
              • When a user selects a subset of data, globalAttributes related to the variable's @@ -7549,12 +7834,14 @@

                Details

              • cdm_data_type (from the - ACDDACDD (external link) metadata standard) is a global attribute that indicates the - Unidata Common Data ModelCommon Data Model (external link) data type for the dataset. For example, @@ -7581,8 +7868,8 @@

                Details

                change in the near future.
              • EDDTable uses cdm_data_type in a rigorous way. If a dataset doesn't comply with the cdm_data_type's requirements, the dataset will fail to load and will generate an - error message. (That's a good thing, in the sense that - the error message will tell + error message. (That's a good thing, + in the sense that the error message will tell you what is wrong so that you can fix it.)

                For all of these datasets, in the Conventions and @@ -7598,9 +7885,12 @@

                Details

              • As with all cdm_data_types other than Other, Point datasets MUST have longitude, latitude, and time variables.
              -
            • Profile - for data from multiple depths at one or more longitude,latitude locations. +
            • Profile - for data from multiple depths + at one or more longitude,latitude locations.
                -
              • The dataset MUST include the globalAttribute cdm_profile_variables, where the +
              • The dataset MUST include the globalAttribute + cdm_profile_variables, + where the value is a comma-separated list of the variables which have the information about each profile. Thus, for a given profile, the values of these variables will be constant. @@ -7608,9 +7898,12 @@

                Details

                to identify the variable that uniquely identifies the profiles. If no other variable is suitable, consider using the time variable.
              -
            • TimeSeries - for data from a set of stations with fixed longitude,latitude(,altitude). +
            • TimeSeries - for data from a set of + stations with fixed longitude,latitude(,altitude).
                -
              • The dataset MUST include the globalAttribute cdm_timeseries_variables, where the +
              • The dataset MUST include the globalAttribute + cdm_timeseries_variables, + where the value is a comma-separated list of the variables which have the information about each station. Thus, for a given station, the values of these variables will be constant. @@ -7620,9 +7913,11 @@

                Details

                If the longitude and latitude don't vary, include them in the cdm_timeseries_variables. If they do vary, don't include them in the cdm_timeseries_variables.
              -
            • TimeSeriesProfile - for profiles from a set of stations. +
            • TimeSeriesProfile - for profiles + from a set of stations.
                -
              • The dataset MUST include the globalAttribute cdm_timeseries_variables, where the +
              • The dataset MUST include the globalAttribute cdm_timeseries_variables, + where the value is a comma-separated list of the variables which have the information about each station. Thus, for a given station, the values of these variables will be constant. @@ -7640,14 +7935,17 @@

                Details

              • Trajectory - for data from a set of longitude,latitude(,altitude) paths called trajectories.
                  -
                • The dataset MUST include the globalAttribute cdm_trajectory_variables, where the +
                • The dataset MUST include the globalAttribute + cdm_trajectory_variables, + where the value is a comma-separated list of the variables which have the information - about each trajectory. Thus, for a given trajectory, the values of these variables - will be constant. + about each trajectory. Thus, for a given trajectory, + the values of these variables will be constant.
                • One of the variables MUST have the attribute cf_role=trajectory_id to identify the variable that uniquely identifies the trajectories.
                -
              • TrajectoryProfile - for profiles taken along trajectories. +
              • TrajectoryProfile - + for profiles taken along trajectories.
                • The dataset MUST include the globalAttribute cdm_trajectory_variables, where the value is a comma-separated list of the variables which have the information @@ -7680,10 +7978,13 @@

                  Details

                  .ncCF files (.nc files which comply with the Contiguous Ragged Array Representations associated with the dataset's cdm_data_type, as defined in the newly ratified - Discrete Sampling GeometriesDiscrete Sampling Geometries (external link) chapter of the - CF 1.6 + chapter of the + CF 1.6 (external link) metadata conventions, @@ -7698,7 +7999,8 @@

                  Details

                • contributor_name (from the - ACDDACDD (external link) metadata standard) @@ -7712,7 +8014,8 @@

                  Details

                • contributor_role (from the - ACDDACDD (external link) metadata standard) @@ -7723,7 +8026,8 @@

                  Details

                • Conventions (from the - CFCF (external link) metadata standard) @@ -7733,12 +8037,14 @@

                  Details


                  <att name="Conventions">COARDS, CF-1.6, ACDD-1.3</att>
                  The common metadata conventions used in ERDDAP are:
                    -
                  • COARDS ConventionsCOARDS Conventions (external link) is the precursor to CF. -
                  • Climate and Forecast (CF) ConventionsClimate and Forecast (CF) Conventions (external link) is the source of many of the recommended and required attributes in ERDDAP. @@ -7747,11 +8053,13 @@

                    Details

                  • The NetCDF Attribute Convention for Dataset Discovery (ACDD) is the source of many of the recommended and required attributes in ERDDAP. The original 1.0 version of ACDD (a brilliant piece of work by Ethan Davis), was identified as - Unidata Dataset Discovery v1.0Unidata Dataset Discovery v1.0 (external link) The current (starting in 2015) 1.3 version of ACDD is identified as - ACDD-1.3ACDD-1.3 (external link). If your datasets have been using Unidata Dataset Discovery v1.0, @@ -7778,7 +8086,8 @@

                    Details

                  • creator_name (from the - ACDDACDD (external link) metadata standard) @@ -7787,7 +8096,8 @@

                    Details

                    most responsible for the creation (or most recent reprocessing) of this data. For example,
                    <att name="creator_name">NOAA NMFS SWFSC ERD</att> -
                    If the data was extensively reprocessed (for example, satellite data from level 2 to level 3 or 4), +
                    If the data was extensively reprocessed (for example, + satellite data from level 2 to level 3 or 4), then usually the reprocessor is listed as the creator and the original creator is listed via contributor_name. Compared to project, this is more flexible, @@ -7795,7 +8105,8 @@

                    Details

                  • creator_email (from the - ACDDACDD (external link) metadata standard) @@ -7806,7 +8117,8 @@

                    Details

                  • creator_url (from the - ACDDACDD (external link) metadata standard) @@ -7819,7 +8131,8 @@

                    Details

                  • date_created (from the - ACDDACDD (external link) metadata standard) @@ -7827,12 +8140,13 @@

                    Details

                    (for example, processed into this form), in ISO 8601 format. For example,
                    <att name="date_created">2010-01-30</att> -
                    If data is periodically added to the dataset, this is the first date that the original - data was made available. +
                    If data is periodically added to the dataset, this is the first date + that the original data was made available.
                  • date_modified (from the - ACDDACDD (external link) metadata standard) @@ -7843,48 +8157,57 @@

                    Details

                  • date_issued (from the - ACDDACDD (external link) metadata standard) - is the RECOMMENDED way to identify the date on which the data was first made available to others, + is the RECOMMENDED way to identify the date on which the data was first + made available to others, in ISO 8601 format, for example, 2012-03-15. For example,
                    <att name="date_issued">2010-07-30</att>
                    - For example, the dataset may have a date_created of + For example, the dataset may have a + date_created of 2010-01-30, but was only made publicly available 2010-07-30. date_issued is less commonly used than date_created and date_modified. If date_issued is omitted, it is assumed to be the same as the date_created.
                  • drawLandMask - This is a RECOMMENDED global attribute used by ERDDAP (and no metadata - standards) which specifies the default value for the "Draw Land Mask" option on the dataset's - Make A Graph form (datasetID.graph) + standards) which specifies the default value for the "Draw Land Mask" + option on the dataset's Make A Graph form (datasetID.graph) and for the &.land parameter in a URL requesting a graph/map of the data. For example,
                    <att name="drawLandMask">over</att>
                    (However, if drawLandMask is specified in a - variable's attributes, that value has precedence.) + variable's attributes, + that value has precedence.)
                      -
                    • For EDDGrid datasets, this specifies whether the land mask on a map is drawn over or +
                    • For EDDGrid datasets, this specifies whether the land mask on a map + is drawn over or under the grid data. - over is recommended for oceanographic data (so that grid data over land is obscured + over is recommended for oceanographic data + (so that grid data over land is obscured by the landmask). under is recommended for all other data. -
                    • For EDDTable datasets: over makes the land mask on a map visible (land appears - as a uniform gray area). over is commonly used for purely oceanographic datasets. +
                    • For EDDTable datasets: over makes the land mask on a map + visible (land appears as a uniform gray area). over + is commonly used for purely oceanographic datasets. under makes the land mask invisible (topography information is displayed for ocean and land areas). under is commonly used for all other data.
                    • If any other value (or no value) is specified, the drawLandMask value from - setup.xml + setup.xml is used. If none is specified there, over is the default.
                  • featureType (from the - CFCF (external link) metadata standard) @@ -7895,7 +8218,8 @@

                    Details

                    However, if you are using EDDTableFromNcCFFiles to create a dataset from files that follow the - CF Discrete Sampling Geometries (DSG) standardCF Discrete Sampling Geometries (DSG) standard (external link), the files themselves must have featureType correctly defined, @@ -7904,64 +8228,98 @@

                    Details

                  • history (from the - CFCF (external link) and - ACDDACDD (external link) metadata standards) is a RECOMMENDED multi-line string global attribute with a line for every processing step that the data has undergone. For example, -
                    <att name="history">2011-08-05T08:55:02Z CMOR: Rewrote data to comply with CF standards. -
                    2012-04-08T08:34:58Z CMOR: Converted 'height' type from 'd' to 'f'.</att>
                    +
                    <att name="history">2011-08-05T08:55:02Z CMOR: Rewrote data + to comply with CF standards. +
                    2012-04-08T08:34:58Z CMOR: Converted 'height' type + from 'd' to 'f'.</att>
                    • Ideally, each line has an ISO 8601:2004(E) formatted date+timeZ (for example, 2011-08-05T08:55:02Z) followed by a description of the processing step.
                    • ERDDAP creates this if it doesn't already exist. -
                    • If it already exists, ERDDAP will append new information to the existing information. -
                    • history is important because it allows clients to backtrack to the original source of the data. +
                    • If it already exists, ERDDAP will append new information to the + existing information. +
                    • history is important because it allows clients to backtrack + to the original source of the data.
                  • infoUrl is a REQUIRED global attribute with the URL of a web page with more - information about this dataset (usually at the source institution's web site). For example, + information about this dataset (usually at the source institution's web site). + For example,
                    <att name="infoUrl">http://www.globec.org/</att>
                      -
                    • Either the dataset's global sourceAttributes or its +
                    • Either the dataset's global + sourceAttributes or its global <addAttributes> MUST include this attribute. -
                    • infoUrl is important because it allows clients to find out more about the data - from the original source. +
                    • infoUrl is important because it allows clients to find out + more about the data from the original source.
                    • ERDDAP displays a link to the infoUrl on the dataset's Data Access Form (datasetID.html), Make A Graph web page (datasetID.graph), and other web pages.
                    • If the URL has a query part (after the "?"), it MUST be already - percent encodedpercent encoded (external link). - You just need to encode special characters in the right-hand-side values of any - constraints into the form %HH, where HH is the 2 digit hexadecimal value of the character. + You need to encode special characters in the constraints + (other than the initial '&' and the main '=', + if any) into the form %HH, where HH is the 2 digit hexadecimal value of the character. Usually, you just need to convert a few of the punctuation characters: % into %25, - & into %26, ", into %22, = into %3D, + into %2B, | into %7C, space into %20, + & into %26, " into %22, < into %3C, = into %3D, > into %3E, + into %2B, + | into %7C, space into %20, and convert all characters above #127 into their UTF-8 form and then percent encode each byte of the UTF-8 form into the %HH format (ask a programmer for help). - But in some situations, you need to percent encode all characters other than - A-Za-z0-9_-!.~'()* . -
                    • Since datasets.xml is an XML file, you MUST also encode '&', '<', and '>' in the - URL as '&amp;', '&lt;', and '&gt;'. +
                      For example, &stationID>="41004" +
                      becomes       &stationID%3E=%2241004%22 +
                      Note that percent encoding is generally required when you access ERDDAP + via software other than a browser. Browsers usually handle percent encoding for you. +
                      In some situations, you need to percent encode all characters other than + A-Za-z0-9_-!.~'()*, but still don't encode the initial '&' or the main '='. +
                      Programming languages have tools to do this (for example, see Java's + java.net.URLEncoder (external link) +
                      and JavaScript's + encodeURIComponent() (external link)) + and there are +
                      web sites that percent encode/decode for you (external link). +
                    • Since datasets.xml is an XML file, you MUST also &-encode ALL + '&', '<', and '>' in the + URL as '&amp;', '&lt;', and '&gt;' after percent encoding.
                    • infoUrl is unique to ERDDAP. It is not from any metadata standard.
                  • institution (from the - CFCF (external link) and - ACDDACDD (external link) metadata standards) @@ -7970,54 +8328,66 @@

                    Details

                    usually <20 characters). For example,
                    <att name="institution">NASA GSFC</att>
                      -
                    • Either the dataset's global sourceAttributes or its +
                    • Either the dataset's global + sourceAttributes or its global <addAttributes> MUST include this attribute.
                    • ERDDAP displays the institution whenever it displays a list of datasets. - If an institution is longer than 20 characters, only the first 20 characters - will be visible in the list of datasets (but the whole institution can be seen - by putting the mouse cursor over the adjacent "?" icon). + If an institution's name here is longer than 20 characters, + only the first 20 characters + will be visible in the list of datasets (but the whole institution + can be seen by putting the mouse cursor over the adjacent "?" icon).
                    • If you add institution to the list of <categoryAttributes> in ERDDAP's - setup.xmlsetup.xml (external link) file, - users can easily find datasets from the same institution via ERDDAP's + title="This link to an external web site does not constitute an endorsement."/> + file, users can easily find datasets from the same institution via ERDDAP's "Search for Datasets by Category" on the home page.
                  • keywords (from the - ACDDACDD (external link) metadata standard) is a RECOMMENDED comma-separated list of words and short phrases (for example, - GCMD Science KeywordsGCMD Science Keywords (external link)) that describe the dataset in a general way, and not assuming any other knowledge of the dataset (for example, for oceanographic data, include ocean). For example,
                    <att name="keywords">Oceans > Ocean Circulation > Ocean Currents, -
                    ano, circulation, coastwatch, currents, derived, eastward, eastward_sea_water_velocity, experimental, hf radio, meridional, noaa, northward, northward_sea_water_velocity, nuevo, ocean, oceans, radio, radio-derived, scan, sea, seawater, velocity, water, zonal</att>
                    +
                    ano, circulation, coastwatch, currents, derived, eastward, eastward_sea_water_velocity, +experimental, hf radio, meridional, noaa, northward, northward_sea_water_velocity, +nuevo, ocean, oceans, radio, radio-derived, scan, sea, seawater, velocity, water, +zonal</att>
                  • keywords_vocabulary (from the - ACDDACDD (external link) metadata standard) is a RECOMMENDED attribute: - if you are following a guideline for the words/phrases in your keywords attribute - (for example, GCMD Science Keywords), put the name of that guideline here. For example, + if you are following a guideline for the words/phrases in your + keywords attribute + (for example, GCMD Science Keywords), put the name of that guideline here. + For example,
                    <att name="keywords_vocabulary">GCMD Science Keywords</att>
                  • license (from the - ACDDACDD (external link) metadata standard) @@ -8025,21 +8395,29 @@

                    Details

                    restrictions. For example,
                    <att name="license">[standard]</att>
                      -
                    • If "[standard]" occurs in the attribute value, it will be replaced by the +
                    • If "[standard]" occurs in the attribute value, it will be + replaced by the standard ERDDAP license from the <standardLicense> tag in - messages.xml. + messages.xml.
                  • Metadata_Conventions is from the outdated - ACDD 1.0ACDD 1.0 (external link) (which was identified in Metadata_Conventions as + title="This link to an external web site does not constitute an endorsement."/> + (which was identified in Metadata_Conventions as "Unidata Dataset Discovery v1.0") metadata standard. The attribute value was a comma-separated list of metadata conventions used by this dataset. -
                    If a dataset uses ACDD 1.0, this attribute is a STRONGLY RECOMMENDED, for example, -
                    <att name="Metadata_Conventions">COARDS, CF-1.6, Unidata Dataset Discovery v1.0</att> +
                    If a dataset uses ACDD 1.0, this attribute is a STRONGLY RECOMMENDED, + for example, +
                    <att name="Metadata_Conventions">COARDS, CF-1.6, + Unidata Dataset Discovery v1.0</att>
                    But ERDDAP now recommends ACDD-1.3. If you have switched your datasets to use ACDD-1.3, @@ -8048,7 +8426,8 @@

                    Details

                  • processing_level (from the - ACDDACDD (external link) metadata standard) @@ -8056,18 +8435,22 @@

                    Details

                    (for example, NASA satellite data processing levels (external link), for example, Level 3) - or quality control level (for example, Science Quality) of the data. For example, + title="This link to an external web site does not constitute an endorsement."/>, + for example, Level 3) + or quality control level (for example, Science Quality) of the data. + For example,
                    <att name="processing_level">3</att>
                  • project (from the - ACDDACDD (external link) metadata standard) - is an OPTIONAL attribute to identify the project that the dataset is part of. For example, + is an OPTIONAL attribute to identify the project that the dataset is part of. + For example,
                    <att name="project">GTSPP</att>
                    If the dataset isn't part of a project, don't use this attribute. Compared to creator_name, this is @@ -8076,7 +8459,8 @@

                    Details

                  • publisher_name (from the - ACDDACDD (external link) metadata standard) @@ -8092,7 +8476,8 @@

                    Details

                  • publisher_email (from the - ACDDACDD (external link) metadata standard) @@ -8104,11 +8489,13 @@

                    Details

                  • publisher_url (from the - ACDDACDD (external link) metadata standard) - is the RECOMMENDED way to identify a URL for the organization that published the dataset, + is the RECOMMENDED way to identify a URL for the organization that + published the dataset, or a URL with the publisher's information about this dataset (but that is more the purpose of infoUrl). For example, @@ -8118,7 +8505,7 @@

                    Details

                  • sourceUrl is a global attribute with the URL of the source of the data. For example, -
                    <att name="sourceUrl">http://opendap.co-ops.nos.noaa.gov/ioos-dif-sos/SOS</att> +
                    <att name="sourceUrl">https://opendap.co-ops.nos.noaa.gov/ioos-dif-sos/SOS</att>
                    • ERDDAP usually creates this global attribute automatically. Two exceptions are EDDTableFromHyraxFiles and EDDTableFromThreddsFiles. @@ -8126,13 +8513,15 @@

                      Details


                      <att name="sourceUrl">(local files)</att>
                    • If the source is local database and the data was created by your organization, use
                      <att name="sourceUrl">(local database)</att> -
                    • sourceUrl is important because it allows clients to backtrack to the original source of the data. +
                    • sourceUrl is important because it allows clients to backtrack + to the original source of the data.
                    • sourceUrl is unique to ERDDAP. It is not from any metadata standard.
                  • standard_name_vocabulary (from the - ACDDACDD (external link) metadata standard) @@ -8144,7 +8533,8 @@

                    Details

                    For example,
                    <att name="standard_name_vocabulary">CF Standard Name Table v29</att>
                    for version 29 of the - CF standard name tableCF standard name table (external link). @@ -8154,62 +8544,89 @@

                    Details

                    <dataVariable> destinationNames to identify variables which have a limited number of - values (stated another way: variables for which each of the values has many duplicates). - For example, + values (stated another way: variables for which each of the values has + many duplicates). For example,
                    <att name="subsetVariables">station_id, longitude, latitude</att> -
                    If this attribute is present, the dataset will have a datasetID.subset web page - (and a link to it - on every dataset list) which lets users quickly and easily select various subsets of the data. +
                    If this attribute is present, the dataset will have a + datasetID.subset web page (and a link to it + on every dataset list) which lets users quickly and easily select + various subsets of the data.
                    • Each time a dataset is loaded, ERDDAP loads and caches all of the distinct() subsetVariable data. Then, all user requests for distinct() subsetVariable data will be very fast.
                    • The order of the destinationNames you specify determines the sort order on the datasetID.subset web page, - so you will usually specify the most important variables first, then the least important. - For example, for datasets with time series data for several stations, you might use, for example, + so you will usually specify the most important variables first, + then the least important. + For example, for datasets with time series data for several stations, + you might use, for example,
                      <att name="subsetVariables">station_id, longitude, latitude</att>
                      so that the values are sorted by station_id. -
                    • The suggested usage is: include the feature variables - (variables with information about the stations, profiles, and/or trajectories) - in the subsetVariables list, - and don't include the data variables - (e.g., time, temperature, salinity, current speed) - in the list. - But it is your choice which variables to include in the subsetVariables list. -
                    • If the number of distinct combinations of these variables is greater than about 1,000,000, +
                    • Obviously, it is your choice which variables to include in the + subsetVariables list, but the suggested usage is: +

                      In general, include variables for which + you want ERDDAP to display a drop-down list of options on the dataset's + Data Access Form (.html) and Make-A-Graph (.graph) web pages. +

                      In general, do include variables variables with information about + the dataset's features (the stations, profiles, and/or trajectories, + notably from + cdm_timeseries_variables, + cdm_profile_variables, + cdm_trajectory_variables). + There are only a few different values for these variables + so they work well with drop-down lists. +

                      Don't ever include any data variables associated with individual observations + (e.g., time, temperature, salinity, current speed) + in the subsetVariables list. + There are too many different values for these variables, + so a drop-down list would be slow to load and be hard to work with + (or not work). +

                    • If the number of distinct combinations of these variables is greater + than about 1,000,000, you should consider restricting the subsetVariables that you specify to reduce the number of distinct combinations to below 1,000,000; otherwise, the datasetID.subset web pages may be generated slowly. -
                    • If the number of distinct values of any one subset variable is greater than about 20,000, + In extreme cases, the dataset may not load in ERDDAP because + generating the list of distinct cominations uses too much memory. + If so, you MUST remove some variables from the subsetVariables list. +
                    • If the number of distinct values of any one subset variable is greater + than about 20,000, you should consider not including that variable in the list of subsetVariables; otherwise, it takes a long time to transmit the datasetID.subset, datasetID.graph, and datasetID.html web pages. - A compromise is: remove variables from the list that users are not - likely to select from a drop down list. + A compromise is: remove variables from the list when users are not + likely to select values from a drop down list.
                    • You should test each dataset to see if the subsetVariables setting is okay. - If the source data server is slow and it takes too long (or fails) to download the data, - either reduce the number of variables specified or remove the subsetVariables global attribute. -
                    • SubsetVariables is very useful. So if your dataset is suitable, please create a subsetVariables - attribute. + If the source data server is slow and it takes too long (or fails) to + download the data, + either reduce the number of variables specified or remove the + subsetVariables global attribute. +
                    • SubsetVariables is very useful. So if your dataset is suitable, + please create a subsetVariables attribute.
                    • EDDTableFromSOS automatically adds
                      <att name="subsetVariables">station_id, longitude, latitude</att>
                      when the dataset is created. -
                    • Possible warning: if a user using the datasetID.subset web page selects a value which +
                    • Possible warning: if a user using the datasetID.subset web + page selects a value which has a carriageReturn or newline character, datasetID.subset will fail. ERDDAP can't work around this issue because of some HTML details. In any case, it is almost always a good idea to remove the carriageReturn and newline characters from the data. - To help you fix the problem, if the EDDTable.subsetVariablesDataTable method in ERDDAP - detects data values that will cause trouble, it will email a warning with a list of - offending values to the emailEverythingTo email addresses specified in setup.xml. + To help you fix the problem, if the EDDTable.subsetVariablesDataTable + method in ERDDAP + detects data values that will cause trouble, it will email a warning + with a list of + offending values to the emailEverythingTo email addresses specified + in setup.xml. That way, you know what needs to be fixed.
                    • Pre-generated subset tables. Normally, when ERDDAP loads a dataset, it requests the distinct() subset variables data table from the data source, just via a normal data request. - In some cases, this data is not available from the data source or retrieving from the + In some cases, this data is not available from the data source or + retrieving from the data source may be hard on the data source server. If so, you can supply a table with the information in a .json or .csv file with the name tomcat/content/erddap/subset/datasetID.json (or .csv). @@ -8220,12 +8637,14 @@

                      Details

                    • It MUST have exact same column names (for example, same case) as <subsetVariables>, but the columns MAY be in any order. -
                    • It MAY have extra columns (they'll be removed and newly redundant rows will be removed). -
                    • Time and timestamp columns should have - ISO 8601:2004(E) formatted date+timeZ strings +
                    • It MAY have extra columns (they'll be removed and newly redundant + rows will be removed). +
                    • Time and timestamp columns + should have ISO 8601:2004(E) formatted date+timeZ strings (for example, 1985-01-31T15:31:00Z).
                    • Missing values should be missing values (not fake numbers like -99). -
                    • .json files may be a little harder to create but deal with Unicode characters well. +
                    • .json files may be a little harder to create but deal with Unicode + characters well. .json files are easy to create if you create them with ERDDAP.
                    • .csv files are easy to work with, but suitable for ISO 8859-1 characters only. .csv files MUST have column names on the first row and data on subsequent rows. @@ -8234,43 +8653,58 @@

                      Details

                    • summary (from the - CFCF (external link) and - ACDDACDD (external link) metadata standards) is a REQUIRED global attribute with a long description of the dataset (usually <500 characters). For example, -
                      <att name="summary">VIIRSN Level-3 Standard Mapped Image, Global, 4km, Chlorophyll a, Daily. The Visible and Infrared Imager/Radiometer Suite (VIIRS) is a multi-disciplinary instrument that flies on the National Polar-orbiting Operational Environmental Satellite System (NPOESS) series of spacecraft, including the NPOESS Preparatory Project (NPP).</att> +
                      <att name="summary">VIIRSN Level-3 Standard Mapped Image, + Global, 4km, Chlorophyll a, Daily. The Visible and Infrared Imager/Radiometer + Suite (VIIRS) is a multi-disciplinary instrument that flies on the National + Polar-orbiting Operational Environmental Satellite System (NPOESS) series + of spacecraft, including the NPOESS Preparatory Project (NPP).</att>
                        -
                      • Either the dataset's global sourceAttributes or its +
                      • Either the dataset's global + sourceAttributes or its global <addAttributes> MUST include this attribute. -
                      • summary is very important because it allows clients to read a description of the dataset - that has more information than the title and thus quickly understand what the dataset is. -
                      • Advice: please write the summary so it would work to describe the dataset to some random +
                      • summary is very important because it allows clients to read + a description of the dataset + that has more information than the title and thus quickly understand + what the dataset is. +
                      • Advice: please write the summary so it would work to describe the + dataset to some random person you meet on the street or to a colleague. Remember to include the Five W's and one H (external link): - Who created the dataset? What information was collected? When was the data collected? + Who created the dataset? What information was collected? + When was the data collected? Where was it collected? Why was it collected? How was it collected?
                      • ERDDAP displays the summary on the dataset's Data Access Form (datasetID.html), Make A Graph web page (datasetID.graph), - and other web pages. ERDDAP uses the summary when creating FGDC and ISO 19115 documents. + and other web pages. ERDDAP uses the summary when creating FGDC and + ISO 19115 documents.
                    • title (from the - CFCF (external link) and - ACDDACDD (external link) metadata standards) @@ -8278,16 +8712,18 @@

                      Details

                      (usually <=95 characters). For example,
                      <att name="title">VIIRSN Level-3 Mapped, Global, 4km, Chlorophyll a, Daily</att>
                        -
                      • Either the dataset's global sourceAttributes or its +
                      • Either the dataset's global + sourceAttributes or its global <addAttributes> MUST include this attribute.
                      • title is important because every list of datasets presented by ERDDAP (other than search results) lists the datasets in alphabetical order, by title. - So if you want to specify the order of datasets, or have some datasets grouped together, + So if you want to specify the order of datasets, or have some datasets + grouped together, you have to create titles with that in mind. - Many lists of datasets (for example, in response to a category search), show a subset - of the full list and in a different order. + Many lists of datasets (for example, in response to a category search), + show a subset of the full list and in a different order. So the title for each dataset should stand on its own.
                      • If the title contains the word "DEPRECATED" (all capital letters), then the dataset will get a lower ranking in searches. @@ -8295,11 +8731,13 @@

                        Details

                    -
                  • <axisVariable> is used to describe - a dimension (also called "axis"). +
                  • <axisVariable> is used to + describe a dimension (also called "axis").
                    For EDDGrid datasets, one or more axisVariable tags is REQUIRED, - and all dataVariables always share/use all axis variables. - (Why? What if they don't?) + and all dataVariables always + share/use all axis variables. + (Why? + What if they don't?)
                    There MUST be an axis variable for each dimension of the data variables.
                    Axis variables MUST be specified in the order that the data variables use them.
                    (EDDTable datasets can NOT use <axisVariable> tags.) @@ -8316,7 +8754,8 @@

                    Details

                    <axisVariable> supports the following subtags:
                      -
                    • <sourceName> - the data source's name for the variable. +
                    • <sourceName> - + the data source's name for the variable. This is the name that ERDDAP will use when requesting data from the data source. This is the name that ERDDAP will look for when data is returned from the data source. This is case sensitive. @@ -8327,11 +8766,13 @@

                      Details

                    • This is OPTIONAL. If absent, the sourceName is used.
                    • This is useful because it allows you to change a cryptic or odd sourceName.
                    • destinationName is case sensitive. -
                    • destinationNames MUST start with a letter (A-Z, a-z) and MUST be followed by 0 or more +
                    • destinationNames MUST start with a letter (A-Z, a-z) and MUST be + followed by 0 or more characters (A-Z, a-z, 0-9, and _). ('-' was allowed before ERDDAP version 1.10.) This restriction allows axis variable names to be used as variable names in a programming language (such as Matlab). -
                    • In EDDGrid datasets, the longitude, latitude, altitude, depth, and time +
                    • In EDDGrid datasets, the + longitude, latitude, altitude, depth, and time axis variables are special.
                  • <addAttributes> @@ -8370,7 +8811,8 @@

                    Details

                    <dataVariable> supports the following subtags:
                      -
                    • <sourceName> - the data source's name for the variable. +
                    • <sourceName> - the data source's + name for the variable. This is the name that ERDDAP will use when requesting data from the data source. This is the name that ERDDAP will look for when data is returned from the data source. This is case sensitive. @@ -8380,54 +8822,187 @@

                      Details

                      that isn't in the source dataset, use:
                      <sourceName>=fixedValue</sourceName>
                      The initial equals sign tells ERDDAP that a fixedValue will follow. -
                      The other tags for the <dataVariable> work as if this were a regular variable. -
                      For example, to create a variable called altitude with a fixed value of 0.0 (float), - use: +
                      The other tags for the <dataVariable> work as if this + were a regular variable. +
                      For example, to create a variable called altitude with a + fixed value of 0.0 (float), use:
                      <sourceName>=0</sourceName>
                      <destinationName>altitude</destinationName>
                      <dataType>float</dataType> -
                    • <destinationName> - the name for the variable that will be shown to - and used by ERDDAP users. +
                    • <destinationName> - + the name for the variable that will be shown to + and used by ERDDAP users.
                        -
                      • This is OPTIONAL. If absent, the sourceName is used. +
                      • This is OPTIONAL. If absent, the sourceName + is used.
                      • This is useful because it allows you to change a cryptic or odd sourceName.
                      • destinationName is case sensitive.
                      • destinationNames MUST start with a letter (A-Z, a-z) and MUST be followed by 0 or - more characters (A-Z, a-z, 0-9, and _). ('-' was allowed before ERDDAP version 1.10.) + more characters (A-Z, a-z, 0-9, and _). + ('-' was allowed before ERDDAP version 1.10.) This restriction allows data variable names to be used as variable names in a programming language (like Matlab).
                      • In EDDTable datasets, longitude, latitude, altitude (or depth), and time data variables are special.
                      -
                    • <dataType> - specifies the data type coming from the source. (In some cases, for example, when reading data from ASCII files, - it specifies how the data coming from the source should be stored.) +
                    • <dataType> - + specifies the data type coming from the source. + (In some cases, for example, when reading data from ASCII files, + it specifies how the data coming from the source should be stored.)
                      • This is REQUIRED by some dataset types and IGNORED by others. Dataset types that require this for their dataVariables are: EDDGridFromXxxFiles, EDDTableFromXxxFiles, EDDTableFromMWFS, EDDTableFromNOS, EDDTableFromSOS. Other dataset types ignore this tag because they get the information from the source. -
                      • Valid values are: double (64-bit floating point), float (32-bit floating point), - long (64-bit signed integer), - int (32-bit signed integer), short (16-bit signed integer), - byte (8-bit signed integer), - char (essentially: 16-bit unsigned integer), boolean, and String (any length). - These case-sensitive. +
                          + +
                      • Valid values are: +
                        byte (8-bit signed integer), +
                        short (16-bit signed integer), +
                        int (32-bit signed integer), +
                        long (64-bit signed integer), +
                        float (32-bit floating point), +
                        double (64-bit floating point), +
                        boolean (which becomes a byte with values of 0 or 1), +
                        char, and +
                        String (internally, a sequence of 2-byte characters that use the + Unicode UCS-2 character set (external link)). +
                        These are case-sensitive names. +
                          +
                      • "boolean" is a special case.
                          -
                        • Internally, ERDDAP doesn't support a boolean type because booleans can't store +
                        • Internally, ERDDAP doesn't support a boolean type because booleans + can't store missing values. -
                        • Also, DAP doesn't support booleans, so there is no standard way to query boolean +
                        • Also, DAP doesn't support booleans, so there is no standard way + to query boolean variables. -
                        • Specifying "boolean" for the dataType in datasets.xml will cause boolean values to +
                        • Specifying "boolean" for the dataType in datasets.xml will cause + boolean values to be stored and represented as bytes: 0=false, 1=true. -
                        • Clients can specify constraints by using the numeric values (for example, "isAlive=1"). +
                        • Clients can specify constraints by using the numeric values + (for example, "isAlive=1"). But ERDDAP administrators need to use the "boolean" dataType in datasets.xml to tell ERDDAP how to interact with the data source. +
                           
                        -
                      • If you want to change a data variable from the dataType in the source files + +
                      • "char" is a special case. + Internally, ERDDAP fully supports 2-byte UCS-16 (Unicode) char (character) + attribute and data values. + However, their use is discouraged because + many file types either don't support chars or only support + 1-byte chars. + So when ERDDAP writes char data to various file types, + it does the best it can to support 2-byte chars. +
                          +
                        • Because OPeNDAP has no separate char data type and only supports + 1-byte characters in Strings, + char data variables will appear as 1-character-long ISO-8859-1 + String variables in OPeNDAP .das, .dds, and .dods responses. +
                        • ERDDAP writes char data to JSON files as 1-character-long JSON-encoded Strings, + since JSON has no separate char data type. +
                        • ERDDAP writes to .csv files with the ISO-8859-1 charset (a 1-byte charset), + so characters #0 through #255 will be correctly represented, + but characters #256 and above will appear as '?' (a common technique). +
                        • You can use char variables to make graphs. ERDDAP will convert + the characters to their Unicode code point number, which + can be used as numeric data. +
                        +

                        Because of these problems, the use of the "char" datatype in ERDDAP + is discouraged. If your dataset has a char variable, + you can sometimes tell ERDDAP to treat it as a String variable. + +

                      • Char and String Character Sets and Encoding +
                        Internally, ERDDAP uses 2-byte characters for char and String variables, + which can contain any + UCS-2 character (external link). + Thus, ERDDAP can read from and write to file types that support all + UCS-2 characters, including files that support + UTF-8 encoding (external link) + of strings, such as all of the JSON variants. + +
                        However, for char (individual character) variables, many file types + (notably NetCDF-3 .nc, .ncHeader, .ncCF and .ncCFMA) only support + single byte characters, and thus can only support 256 different characters + from a specific, single-byte + character set / code page (external link). + When ERDDAP writes data to these files, it uses the + ISO-8859-1 character set (external link) + and converts any character that isn't in the ISO-8859-1 character set + into '?'. + +

                        Currently, CF still does not have a default charset for + char variables or a default encoding for "String" variables. + Nor is there an official way to specify a charset or an encoding. + Nor is there even an easy way to tell char variables from "String" variables + (which are actually stored in NetCDF-3 files as char arrays + with an extra dimension) when reading NetCDF-3 files. + So, currently, when ERDDAP writes char or String data to these files, + it uses the + ISO-8859-1 character set (external link) + and converts any character that isn't in the ISO-8859-1 character set + into '?'. + + +

                      • "long" is a special case. + Internally, ERDDAP fully supports 4-byte unsigned long integer + attribute and data values. + However, their use is discouraged because + many file types don't support longs. + So when ERDDAP writes long data to these file types, + it does the best it can. +
                          +
                        • For file types that don't support a long data type, + ERDDAP presents long variables and values as doubles. + Doubles can exactly represent long values in the range of + approximately +/-1.8014399e+16. Larger values are represented + as approximations. + Examples of this are OPeNDAP .das, .dds, and .dods responses + and .nc, .ncHeader, .ncml, .ncCF, and .ncCFMA NetCDF-3 files. +
                        • For many file types, like CSV, that represent numbers + as strings of digits, ERDDAP writes long values as long values + in the hope that the reader will properly read the data. + However, spreadsheet programs often convert these numbers + into double values. +
                        +

                        Because of these problems, the use of the "long" datatype in ERDDAP + is discouraged. If your dataset has a long variable + that only has data within the range -2^53 to 2^53, we strongly recommend + converting that variable to be a double variable by adding: +
                        <att name="scale_factor" type="double">1.0</att> +
                        to the variables <addAttributes>. + That way, users will see that variable as a double variable in all situations + in ERDDAP and in all response files. + +

                      • If you want to change a data variable from the dataType + in the source files (for example, short) into some other dataType in the dataset (for example, int), don't use <dataType> to specify what you want. (It works for some types of datasets, but not others.) @@ -8438,14 +9013,18 @@

                        Details

                        scale_factor attribute with the new dataType (for example, int) and a value of 1, for example,
                        <att name="scale_factor" type="int">1</att> +
                         
                    -
                  • <addAttributes> - defines a set of attributes + +
                  • <addAttributes> - + defines a set of attributes (name = value) which are added to the source's attributes for a variable, to make the combined attributes for a variable. This is OPTIONAL. -
                    If the variable's sourceAttributes or - <addAttributes> include +
                    If the variable's + sourceAttributes or + <addAttributes> include scale_factor and/or add_offset attributes, their values will be used to unpack the data from the source before distribution to the client. @@ -8455,7 +9034,8 @@

                    Details

                • Variable Attributes / Variable <addAttributes> - - <addAttributes> is an OPTIONAL tag within an <axisVariable> or + <addAttributes> is an OPTIONAL tag within an + <axisVariable> or <dataVariable> tag which is used to change the variable's attributes.
                    @@ -8463,7 +9043,8 @@

                    Details

                    to change the variable's attributes. ERDDAP combines a variable's attributes from the dataset's source (sourceAttributes) - and the variable's addAttributes which you define in datasets.xml (which have priority) + and the variable's addAttributes which you define in datasets.xml + (which have priority) to make the variable's "combinedAttributes", which are what ERDDAP users see. Thus, you can use addAttributes to redefine the values of sourceAttributes, add new attributes, or remove attributes. @@ -8502,14 +9083,16 @@

                    Details

                    • actual_range - (CDC COARDSCDC COARDS (external link)) is a RECOMMENDED variable attribute. For example,
                      <att name="actual_range" type="floatList">0.17 23.58</att>
                        -
                      • If present, it MUST be an array of two values of the same data type as the variable, +
                      • If present, it MUST be an array of two values of the same data type + as the variable, specifying the actual (not the theoretical or the allowed) minimum and maximum values of the data for that variable.
                      • If the data is packed with @@ -8530,7 +9113,8 @@

                        Details

                        type="doubleList">-180 180</att>
                      • For numeric time and timestamp variables, the values specified should be the - relevant source (not destination) numeric values. For example, if the source time values + relevant source (not destination) numeric values. For example, + if the source time values are stored as "days since 1985-01-01", then the actual_range should be specified in "days since 1985-01-01". And if you want to refer to NOW as the second value for near-real-time data that @@ -8539,7 +9123,8 @@

                        Details

                        1985-01-17 until NOW, use
                        <att name="actual_range" type="doubleList">16 NaN</att> -
                      • If actual_range is known (either by ERDDAP calculating it or by you adding it via +
                      • If actual_range is known (either by ERDDAP calculating it or by you + adding it via <addAttributes>), ERDDAP will display it to the user on the Data Access Form (datasetID.html) and Make A Graph web pages (datasetID.graph) for that dataset @@ -8551,11 +9136,14 @@

                        Details

                        href="https://coastwatch.pfeg.noaa.gov/erddap/tabledap/documentation.html#min" >min() and max() functions in requests, which is often very useful. -
                      • For all EDDTable... datasets, if actual_range is known (either by you specifying it or +
                      • For all EDDTable... datasets, if actual_range is known + (either by you specifying it or by ERDDAP calculating it), ERDDAP will be able to quickly reject any requests for - data outside that range. For example, if the dataset's lowest time value corresponds to + data outside that range. For example, if the dataset's lowest time value + corresponds to 1985-01-17, then a request for all data from 1985-01-01 through 1985-01-16 will be - immediately rejected with the error message "Your query produced no matching results." + immediately rejected with the error message + "Your query produced no matching results." This makes actual_range a very important piece of metadata, as it can save ERDDAP a lot of effort and save the user a lot of time.
                      • When a user selects a subset of data and requests a file type @@ -8565,19 +9153,74 @@

                        Details

                        which are an alternative way to specify the actual_range.
                      +
                    • charset +
                        +
                      • This attribute may only be used with char variables + (see also _Encoding which is + only used with String variables). +
                      • This attribute is strongly recommended. +
                      • This attribute is not from any standard, although charset + is part of the HTML content setting. +
                      • Internally in ERDDAP, char variables are 2-byte characters that use the + Unicode UCS-2 character set (external link). +
                      • Many file types only support 1-byte characters and thus need this + attribute to identify an associated +
                        charset (AKA code page) (external link) + which defines how to map the 256 possible values to a set of + 256 characters drawn from the UCS-2 character set. +
                      • Values for charset are case-insensitive. +
                      • In theory, ERDDAP could support charset identifiers from + this IANA list (external link), + but in practice, ERDDAP currently just supports ISO-8859-1 + (note that it has dashes, not underscores), which has the advantage + that it is identical to the first 256 characters of Unicode. +
                      • The default value is ISO-8859-1 . +
                      • UTF-8 is not a valid option for charset, + since UTF-8 requires between + 1 and 4 bytes per character and char variables are composed of individual + 1 or 2-byte characters. +
                      • This is an ongoing troublesome issue because many source files use + charsets that are different from ISO-8859-1, + but don't identify the charset. + For example, many source data files have some metadata copied and pasted from + Microsoft Word on Windows and thus have a fancy hyphens and apostrophies + from a Windows-specific charset instead of ASCII hyphens and apostrophies. + These characters then show up as odd characters or '?' in ERDDAP. +
                          +
                      +
                    • Color Bar Attributes - There are several OPTIONAL variable attributes which specify the suggested - default attributes for a color bar (used to convert data values into colors on images) + default attributes for a color bar + (used to convert data values into colors on images) for this variable.
                        -
                      • If present, this information is used as default information by griddap and tabledap +
                      • If present, this information is used as default information by + griddap and tabledap whenever you request an image that uses a color bar. -
                      • For example, when latitude-longitude gridded data is plotted as a coverage on a map, +
                      • For example, when latitude-longitude gridded data is plotted as a + coverage on a map, the color bar specifies how the data values are converted to colors. -
                      • Having these values allows ERDDAP to create images which use a consistent color bar +
                      • Having these values allows ERDDAP to create images which use a + consistent color bar across different requests, even when the time or other dimension values vary. -
                      • These attribute names were created for use in ERDDAP. They are not from a metadata standard. -
                      • WMS - The main requirements for a variable to be accessible via ERDDAP's WMS +
                      • These attribute names were created for use in ERDDAP. + They are not from a metadata standard. + +
                      • WMS - The main requirements for a variable to be + accessible via ERDDAP's WMS server are:
                        • The dataset must be an EDDGrid... dataset. @@ -8590,7 +9233,8 @@

                          Details

                      • The attributes related to the color bar are:
                          -
                        • colorBarMinimum specifies the minimum value on the colorBar. For example, +
                        • colorBarMinimum specifies the minimum value on the + colorBar. For example,
                          <att name="colorBarMinimum" type="double">-5</att>
                            @@ -8600,12 +9244,15 @@

                            Details

                          • Data values lower than colorBarMinimum are represented by the same color as colorBarMinimum values.
                          • The attribute should be of - type="double", regardless of the data variable's type. + type="double", + regardless of the data variable's type.
                          • The value is usually a nice round number. -
                          • Best practices: We recommend a value slightly higher than the minimum data value. +
                          • Best practices: We recommend a value slightly higher than the + minimum data value.
                          • There is no default value.
                          -
                        • colorBarMaximum specifies the maximum value on the colorBar. For example, +
                        • colorBarMaximum specifies the maximum value on the + colorBar. For example,
                          <att name="colorBarMaximum" type="double">5</att>
                          @@ -8616,29 +9263,36 @@

                          Details

                        • Data values higher than colorBarMaximum are represented by the same color as colorBarMaximum values.
                        • The attribute should be of - type="double", regardless of the data variable's type. + type="double", + regardless of the data variable's type.
                        • The value is usually a nice round number. -
                        • Best practices: We recommend a value slightly lower than the maximum data value. +
                        • Best practices: We recommend a value slightly lower than the + maximum data value.
                        • There is no default value.
                        -
                      • colorBarPalette specifies the palette for the colorBar. For example, +
                      • colorBarPalette specifies the palette for the colorBar. + For example,
                        <att name="colorBarPalette">WhiteRedBlack</att>
                        • All ERDDAP installations support these standard palettes: BlackBlueWhite, - BlackRedWhite, BlackWhite, BlueWhiteRed, LightRainbow, Ocean, OceanDepth, Rainbow, + BlackRedWhite, BlackWhite, BlueWhiteRed, LightRainbow, Ocean, + OceanDepth, Rainbow, RedWhiteBlue, ReverseRainbow, Topography, TopographyDepth [added in v1.74], WhiteBlack, WhiteBlueBlack, and WhiteRedBlack.
                        • If you have installed - additional palettes, you can refer to one of them. + additional palettes, you can refer to one of them.
                        • If this attribute isn't present, the default is BlueWhiteRed if -1*colorBarMinimum = colorBarMaximum; otherwise the default is Rainbow.
                        -
                      • colorBarScale specifies the scale for the colorBar. For example, +
                      • colorBarScale specifies the scale for the colorBar. + For example,
                        <att name="colorBarScale">Log</att>
                        • Valid values are Linear and Log. @@ -8659,7 +9313,9 @@

                          Details

                        • data_min and data_max - These are RECOMMENDED variable attributes defined in the - World Ocean CirculationWorld Ocean Circulation (external link) metadata description. For example, @@ -8677,7 +9333,8 @@

                          Details

                        • If present, ERDDAP will extract the information and display it to the user on the Data Access Form (datasetID.html) and Make A Graph web pages (datasetID.graph) for that dataset. -
                        • This is an alternative to actual_range. +
                        • This is an alternative to + actual_range. All of the documentation for actual_range applies to data_min and data_max.
                        @@ -8690,23 +9347,90 @@

                        Details


                        <att name="drawLandMask">under</att>
                          -
                        • For variables in EDDGrid datasets, this specifies whether the land mask on a map +
                        • For variables in EDDGrid datasets, this specifies whether the + land mask on a map is drawn over or under the grid data. - over is recommended for oceanographic data (so that grid data over land is obscured + over is recommended for oceanographic data + (so that grid data over land is obscured by the landmask). under is recommended for all other data. -
                        • For variables in EDDTable datasets: over makes the land mask on a map visible (land appears - as a uniform gray area). over is commonly used for purely oceanographic datasets. +
                        • For variables in EDDTable datasets: over + makes the land mask on a map visible (land appears + as a uniform gray area). over is commonly used for purely + oceanographic datasets. under makes the land mask invisible (topography information is displayed for ocean and land areas). under is commonly used for all other data. -
                        • If any other value (or no value) is specified, the drawLandMask value from the dataset's +
                        • If any other value (or no value) is specified, the drawLandMask value + from the dataset's global attributes is used.
                        +
                      • _Encoding +
                          +
                        • This attribute may only be used with String variables + . +
                        • This attribute is strongly recommended. +
                        • This attribute is from the + NetCDF User's Guide (NUG) (external link). +
                        • Internally in ERDDAP, Strings are a sequence of 2-byte characters + that use the + Unicode UCS-2 character set (external link). +
                        • Many file types only support 1-byte characters in Strings and thus need this + attribute to identify an associated +
                          charset (AKA code page) (external link) + which defines how to map the 256 possible values to a set of + 256 characters drawn from the UCS-2 character set + and/or the encoding system, e.g., + UTF-8 (external link) + (which requires between 1 and 4 bytes per character). +
                        • Values for _Encoding are case-insensitive. +
                        • In theory, ERDDAP could support _Encoding identifiers from + this IANA list (external link), + but in practice, ERDDAP currently just supports +
                            +
                          • ISO-8859-1 + (note that it has dashes, not underscores), which has the advantage + that it is identical to the first 256 characters of Unicode, and +
                          • UTF-8. +
                          +
                        • When reading source files, the default value is ISO-8859-1, + except for netcdf-4 files, where the default is UTF-8. +
                        • This is an ongoing troublesome issue because many source files use + charsets or encodings that are different from ISO-8859-1, + but don't identify the charset or encoding. + For example, many source data files have some metadata copied and pasted from + Microsoft Word on Windows and thus have a fancy hyphens and apostrophies + from a Windows-specific charset instead of ASCII hyphens and apostrophies. + These characters then show up as odd characters or '?' in ERDDAP. +
                            +
                        +
                      • ioos_category - This is a REQUIRED variable attribute if <variablesMustHaveIoosCategory> is set to true (the default) in - setup.xml; + setup.xml; otherwise, it is OPTIONAL.
                        For example, <att name="ioos_category">Salinity</att>
                        The categories are from @@ -8718,14 +9442,18 @@

                        Details

                      • (As of writing this) we aren't aware of formal definitions of these names.
                      • The core names are from Zdenka Willis' .ppt "Integrated Ocean Observing System (IOOS) NOAA's Approach to Building an Initial Operating Capability" and from the - US IOOS BlueprintUS IOOS Blueprint (external link) (page 1-5). + title="This link to an external web site does not constitute an endorsement."/> + (page 1-5).
                      • It is likely that this list will be revised in the future. If you have requests, please email bob.simons at noaa.gov.
                      • ERDDAP supports a larger list of categories than IOOS does because Bob Simons added additional names (mostly based on the the names of scientific fields, - for example, Biology, Ecology, Meteorology, Statistics, Taxonomy) for other types of data. + for example, Biology, Ecology, Meteorology, Statistics, Taxonomy) + for other types of data.
                      • The current valid values in ERDDAP are Bathymetry, Biology, Bottom Character, Colored Dissolved Organic Matter, Contaminants, Currents, Dissolved Nutrients, Dissolved O2, Ecology, Fish Abundance, @@ -8741,7 +9469,8 @@

                        Details

                        setup.xml file, users can easily find datasets with similar data via ERDDAP's "Search for Datasets by Category" on the home page. -
                        Try using ioos_category to search for datasets of interest. +
                        Try using ioos_category to search for datasets of interest.

                      You may be tempted to set <variablesMustHaveIoosCategory> @@ -8762,12 +9491,14 @@

                      Details

                      sea_surface_temperature vs. sea_water_temperature).
                      (Using ioos_category for this purpose is controlled by <categoryAttributes> in your setup.xml file.) -
                      Try using ioos_category to search for datasets of interest. +
                      Try using ioos_category to search for datasets of interest.
                    • These categories are from NOAA's Integrated Ocean Observing System (IOOS) (external link). These categories + title="This link to an external web site does not constitute an endorsement."/>. + These categories are fundamental to IOOS's description of IOOS's mission. If you are in NOAA, supporting ioos_category is a good One-NOAA thing to do. (Watch this @@ -8790,14 +9521,17 @@

                      Details

                  • long_name - (COARDSCOARDS (external link), - CFCF (external link) and - ACDDACDD (external link) metadata standards) @@ -8808,23 +9542,27 @@

                    Details

                  • Best practices: Capitalize the words in the long_name as if it were a title (capitalize the first word and all non-article words). Don't include the units in the long_name. - The long name shouldn't be very long (usually <20 characters), but should be more + The long name shouldn't be very long (usually <20 characters), + but should be more descriptive than the destinationName, which is often very concise.
                  • If "long_name" isn't defined in the variable's sourceAttributes or <addAttributes>, ERDDAP will generate it by cleaning up the - standard_name (if present) or the destinationName. + standard_name (if present) or + the destinationName.
                • missing_value and _FillValue - (COARDSCOARDS (external link) and - CFCF (external link)) are variable attributes which describe a number @@ -8834,8 +9572,8 @@

                  Details


                  For String variables, the default for both is "" (the empty string).
                  For numeric variables, the default for both is NaN.
                    -
                  • ERDDAP supports both missing_value and _FillValue, since some data sources assign - slightly different meanings to them. +
                  • ERDDAP supports both missing_value and _FillValue, since some data + sources assign slightly different meanings to them.
                  • If present, they should be of the same data type as the variable.
                  • If the data is packed with scale_factor and/or add_offset, @@ -8869,11 +9607,15 @@

                    Details

                  • scale_factor (default = 1) and add_offset (default = 0) - (COARDSCOARDS (external link) and - CFCF (external link)) are OPTIONAL variable attributes which describe data which is packed in a @@ -8881,13 +9623,16 @@

                    Details

                    • If present, their data type is different from the source data type and describes the data type of the destination values. -
                      For example, a data source might have stored float data values with one decimal digit - packed as short ints (int16), using scale_factor = 0.1 and add_offset = 0. For example, +
                      For example, a data source might have stored float data values with + one decimal digit + packed as short ints (int16), using scale_factor = 0.1 and add_offset = 0. + For example,
                      <att name="scale_factor" type="float">0.1</att>
                      <att name="add_offset" type="float">0</att> -
                      In this example, ERDDAP would unpack the data and present it to the user as float data values. +
                      In this example, ERDDAP would unpack the data and present it to the + user as float data values.
                    • If present, ERDDAP will extract the values from these attributes, remove the attributes, and automatically unpack the data for the user:
                        destinationValue = sourceValue * scale_factor + add_offset @@ -8905,23 +9650,31 @@

                      Details

                    • standard_name (from the - ACDDACDD (external link) metadata standard) is a RECOMMENDED variable attribute in ERDDAP. - (CFCF (external link) maintains a list of - CF standard names + maintains a list of + CF standard names (external link)) For example, + title="This link to an external web site does not constitute an endorsement."/>) + For example,
                      <att name="standard_name">eastward_sea_water_velocity</att>
                      • If you add standard_name to variables' attributes and add standard_name to the list of <categoryAttributes> in ERDDAP's - setup.xml + setup.xml file, users can easily find datasets with similar data via ERDDAP's "Search for Datasets by Category" on the home page.
                      • If you specify a CF standard_name for a variable, @@ -8934,15 +9687,17 @@

                        Details

                        MUST have units of K, degrees_C, degrees_F, or some UDUnits variant of those names, since they are all inter-convertible.
                      • Best practices: Part of the power of - controlled vocabulariescontrolled vocabularies (external link) comes from using only the terms in the list. So we recommend sticking to the terms defined in the controlled vocabulary, - and we recommend against making up a term if there isn't an appropriate one in the list. - If you need additional terms, see if the standards committee will add them to the - controlled vocabulary. + and we recommend against making up a term if there isn't an appropriate + one in the list. + If you need additional terms, see if the standards committee will add + them to the controlled vocabulary.
                    • time_precision
                    • units - (COARDSCOARDS (external link), - CFCF (external link) and - ACDDACDD (external link) metadata standard) defines the units of the data values. For example,
                      <att name="units">degree_C</att>
                        -
                      • "units" is REQUIRED as either a sourceAttribute or an addAttribute for "time" variables +
                      • "units" is REQUIRED as either a sourceAttribute or an addAttribute + for "time" variables and is STRONGLY RECOMMENDED for other variables whenever appropriate (which is almost always).
                      • In general, we recommend - UDUnitsUDUnits (external link)-compatible units which is required by the - COARDSCOARDS (external link) and - CF + and + CF (external link) standards. @@ -9047,13 +9810,15 @@

                        Details

                        - the Unified Code for Units of Measure. OGC (external link) services such as + title="This link to an external web site does not constitute an endorsement."/> + services such as SOS (external link), WCS (external link), and + title="This link to an external web site does not constitute an endorsement."/>, + and WMS (external link) @@ -9063,50 +9828,70 @@

                        Details

                        You should tell ERDDAP which standard you are using with <units_standard>, in your - setup.xml + setup.xml file.
                      • For time and timestamp variables, either the variable's sourceAttributes or <addAttributes> (which takes precedence) MUST have units which is either
                        • For time axis variables or time data variables with numeric data: - UDUnitsUDUnits (external link)-compatible - string (with the format units since baseTime) describing how to - interpret source time values (for example, seconds since 1970-01-01T00:00:00Z). + string (with the format units since baseTime) + describing how to + interpret source time values + (for example, seconds since 1970-01-01T00:00:00Z).

                          units can be any one of:
                          ms, msec, msecs, millis, millisec, millisecs, millisecond, milliseconds, -
                          s, sec, secs, second, seconds, m, min, mins, minute, minutes, h, hr, hrs, hour, hours, +
                          s, sec, secs, second, seconds, m, min, mins, minute, minutes, + h, hr, hrs, hour, hours,
                          d, day, days, week, weeks, mon, mons, month, months, yr, yrs, year, or years. -
                          Technically, ERDDAP does NOT follow the UDUNITS standard when converting "years since" and - "months since" time values to "seconds since". The UDUNITS standard defines a year as a - fixed, single value: 3.15569259747e7 seconds. And UDUNITS defines a month as year/12. +
                          Technically, ERDDAP does NOT follow the UDUNITS standard when + converting "years since" and + "months since" time values to "seconds since". The UDUNITS standard + defines a year as a + fixed, single value: 3.15569259747e7 seconds. + And UDUNITS defines a month as year/12. Unfortunately, most/all datasets that we have seen that use "years since" or "months since" clearly intend the values to be calendar years or calendar months. For example, 3 "months since 1970-01-01" is usually intended to mean 1970-04-01. - So, ERDDAP interprets "years since" and "months since" as calendar years and months, + So, ERDDAP interprets "years since" and "months since" as calendar + years and months, and does not strictly follow the UDUNITS standard. -

                          Ideally, the baseTime is an ISO 8601:2004(E) formatted date time string
                          (yyyy-MM-dd'T'HH:mm:ssZ, for example, 1970-01-01T00:00:00Z). - ERDDAP tries to work with a wide range of variations of that ideal format, for example, "1970-1-1 0:0:0" is supported. - If the time zone information is missing, it is assumed to be Zulu time zone (AKA GMT). +

                          Ideally, the baseTime is an ISO 8601:2004(E) + formatted date time string +
                          (yyyy-MM-dd'T'HH:mm:ssZ, for example, 1970-01-01T00:00:00Z). + ERDDAP tries to work with a wide range of variations of that ideal + format, for example, "1970-1-1 0:0:0" is supported. + If the time zone information is missing, it is assumed to be + Zulu time zone (AKA GMT). Even if another time zone is specified, ERDDAP never uses Daylight Savings Time.

                          You can test ERDDAP's ability to deal with a specific units since baseTime with ERDDAP's -
                          Time Converter. +
                          Time Converter. Hopefully, you can plug in a number (the first time value from the data source?) and a units string, click on Convert, and ERDDAP will be able to convert it into an ISO 8601:2004(E) formatted date time string. It will return an error message if the units string isn't recognizable. -

                        • For time data variables with String data: an org.joda.time.format.DateTimeFormat string - (which is mostly compatible with java.text.SimpleDateFormat) describing how to interpret +
                        • For time data variables with String data: specify a + java.time.DateTimeFormatter (external link) + pattern (which is mostly compatible with java.text.SimpleDateFormat) + which describes how to interpret the string times (for example, the ISO8601TZ_FORMAT yyyy-MM-dd'T'HH:mm:ssZ).
                          A Z (not the literal 'Z') at the end of the format string - tells Java/Joda/ERDDAP to + tells Java/ERDDAP to look for the character 'Z' (indicating the Zulu time zone with offset=0) or look for a time zone offset in the form +hh:mm, +hh, -hh:mm, or -hh. @@ -9120,16 +9905,11 @@

                          Details


                          2012-11-20T10:12 (missing seconds are assumed to be 0)
                          2012-11-20T17 (missing minutes are assumed to be 0)
                          2012-11-20 (missing hours are assumed to be 0) -
                          2012-11 (missing date is assumed to be 1) -
                          See - Joda DateTimeFormat (external link) . +
                          2012-11 (missing dates are assumed to be 1)
                        - The main time data variable (for tabular datasets) and the main time axis variable + The main time data variable (for tabular datasets) and the main time + axis variable (for gridded datasets) are recognized by the destinationName time and their units metadata (which must be suitable). @@ -9165,7 +9945,8 @@

                        Details

                        See more information about time variables.
                        ERDDAP has a utility to - Convert + Convert a Numeric Time to/from a String Time.
                        See How ERDDAP Deals with Time. @@ -9174,7 +9955,9 @@

                        Details

                      • valid_range, or valid_min and valid_max - These are OPTIONAL variable attributes defined in the - CFCF (external link) metadata conventions. For example, @@ -9225,7 +10008,8 @@

                        Contact

                        Or, you can join the ERDDAP Google Group / Mailing List by visiting https://groups.google.com/forum/#!forum/erddaphttps://groups.google.com/forum/#!forum/erddap (external link) and clicking on "Apply for membership". @@ -9234,9 +10018,10 @@

                        Contact


                         
                        -

                        ERDDAP, Version 1.74 +

                        ERDDAP, Version 1.76
                        Disclaimers | - Privacy Policy + Privacy Policy

                         

                         

                          diff --git a/images/favicon.ico b/images/favicon.ico index 7003a089ebea2d86692c1b8b21bdff3012870be0..54cc8c0f4172be45cc9726ceec4105e504d7f0d4 100644 GIT binary patch literal 4286 zcmcJTc}!GS6o(Ir%A#Uj+tjs6-2fLfu5oMpt7&V}gw$9yscp3?0(E5{kP!u~krJx{ zDrhSzwup*L)KJ4BvWOcf3L@e*L;lax$qEAwsR*oKD#)Eiwi-j`I>0c^>qYPR_X z$W8-|n@ciuAY1Z5Ae-~-AX}QK-lw0WR*SJLwj$d-IyhMNsqe5AJ2 zcd<*N0E%l_B7Ots|`ccMeKoLF)&m;W(bs7#x#u1NAymu+>{U0n;NqHw^n z*!S{fqT~ayPdnn5#sHQ?gHU^B`77P42aSlRA{n0NZP8qhb<-9EfvF^td}yB^rN&dqch=`^uqMqQ(ZfF_2TOej3QQEaNOd0?cE zW|E*N+ST~#PKNPiS(4^K0K_gJUC5zUG8cu-RzLfeleThT^1jS&2xAk zD^usI49&H)$FCLht+VOs=!v~GE8W4gVF+M^7a;L*l{;XhPaj=<-E;D_NvJ-tNL9E2 z47_(7%NB;jvee90iG9qbGm{?(#+rYYD2dv#w^k3#1&4R40{Vb%{XoFYle%HFuM0*` zv?yk>D+cukl=<}3C|@@5Tys-pmbx^?U0t~Gtv)bYDfw0q2`Kj@$ z)gE97{hZmZEnJDubz%WqbGDjZi|{g;@YZ6pH3Lq4}_}x3%2Li zTz7tjn&ND-xhFG~wfSqTF1D^r=i&pEE8W1zaSt19Ip1a;$_+RV7__gPgT$W(DObfE*OhXZWm$R!5vt3>JXmav7SB`xz&yCI9&T!=kh;^(FA zRIU94^qW4j#4_Ql(O7Xg6SKGYW8slRyu4e6=i=Am8M!}RNs7hl!pr!yycA1LBxCm0 z0IaX7z=p?FVqAjy0n*tqUP<1<`N;QkOHma4*7E*kADUm(9ZU~ehylGp2=Sz|PWMUh zaKIh&hoHJafsM~vJ=vMrN4P5opW3AA&r@~<<_~QZi@h(%X~Vw+%J*d&bZmvjn=*f^V4?X9=}N_ zpX!1C?v24qzlWoIN)J3r=i=(&1WfvVI&PiW7jyP(#)}DI7&mJGhL3R;*TLs^!(h-K zTH%L^syk1~k2n7QVqZc$yLeL$8>M{aT;|;3bFk(kKzi3;q?Z%N2m|}_-S33!KBXA< z0_6RQ7z<(_-O=JR$56TxBE6k(g$o$Be=*R@cHOO?Y5ZK0LGqykYe7E6)#2aYk}t&c zB~^7GyP9lGlI*vortv*V$sOPW+i2JKFI)d;?ljUdl8I#Zy=SML#~vrW_xDd*ANZMU zd6EK18KiQOg4ApkKXV&114(WE{*5`E5$Ost0?gI{3~2=Lr~&Af1JIczqkkW;pq)jU Smt(b4LpIqqGnmCpX8#Am85Row literal 894 zcmbu6ZAepL6vv;tKK8|wEz2*{w8VmfGRVTWv}u|Z<%f!bOk7kL8ATL=n5NTv=QgFe zvG65_&g_LO5S_Y+rb`jGyIr@;(rImAnQimsy0zQ6PHXhFbI&>4`#it%f6jR>LKr+D zAqa9fy1W4)9zrM<0+tXTf?Z#?u#E_@v2IL|xgCaS-OUf`_%S7yQgUtT`04&zZpUB{ zvi9nwJyYF6GtMC^kp)Lo4Q2tyk*N}y#GJ{V0`JPgpDG{io_LaOdVnY~keq+qB1Xmv zG~S->rQJ~Si(%|NLQ@4A3y@-Du0m!JGP9@rt%Z_FZ2|Bu=MYI?lo(@o3Ct?rJf*#A z@5-YT5hf9`R3p%bEojdq#yrlU8C?7 zPUMYtz3?u~SR3M?#H!?tw;pf~mV+|bK#N3_ut_4|OOEiR8QY{8QR38yhVut~K0oc6 zv?#XP8sq52*;SUA8hKZryQk?=s`^TH+l^C?E*xsgPix3asXw{vlc5)wpZ2~l`jP-_ zHyPwPJ2geQBQG0Ao~iUL5@VNUwDU39^YnS~#pe99!#IPoz;QRFFfVk(!w~pKA@I;<0GaAN}WyaJVil~_9mKL>=yAnYpo*XU}@Zn~_rE8y&TFE_nX)z!koE