From 8d914413cfc3489cca2b64312d56dba9a0dd2650 Mon Sep 17 00:00:00 2001 From: John Interrante Date: Thu, 8 Dec 2022 17:23:17 -0500 Subject: [PATCH] (squash) Make another 2 suggested changes Make save reset variables to original variables defined in schema, and put coverage off/on around unimplemented methods. DataProcessor.scala: Ensure variableMap is serialized as well as validationMode, but reset back to original variables when saving. Runtime2DataProcessor.scala: Put coverage off/on around unimplemented methods. --- .../daffodil/processors/DataProcessor.scala | 21 ++++++++++--------- .../runtime2/Runtime2DataProcessor.scala | 9 ++++---- 2 files changed, 15 insertions(+), 15 deletions(-) diff --git a/daffodil-runtime1/src/main/scala/org/apache/daffodil/processors/DataProcessor.scala b/daffodil-runtime1/src/main/scala/org/apache/daffodil/processors/DataProcessor.scala index bda11978bf..9c556fbdd1 100644 --- a/daffodil-runtime1/src/main/scala/org/apache/daffodil/processors/DataProcessor.scala +++ b/daffodil-runtime1/src/main/scala/org/apache/daffodil/processors/DataProcessor.scala @@ -88,19 +88,19 @@ object DataProcessor { * It enables us to implement restrictions on what you can/cannot do with a reloaded * processor versus an original one. * - * When we reload a processor, it will have default values for everything settable + * When we reload a processor, we want it to have default values for everything settable * like validation mode, debug mode, and debugger. * - * Note that this class does preserve validationMode. That is because validationMode - * may be needed by serializations other than our own save/reload (e.g., Apache Spark + * Note that this class does preserve variableMap and validationMode. That is because + * serializations other than our own save/reload may need such settings (e.g., Apache Spark * which serializes to move objects for remote execution). * - * Hence, we're depending on the save method to explicitly turn off validationMode. + * Hence, we're depending on the save method to explicitly reset them to default values. */ private class SerializableDataProcessor( ssrd: SchemaSetRuntimeData, tunables: DaffodilTunables, - variableMap: VariableMap, + variableMap: VariableMap, // must be explicitly reset by save method validationMode: ValidationMode.Type, // must be explicitly turned off by save method ) extends DataProcessor(ssrd, tunables, variableMap, validationMode) { @@ -270,13 +270,14 @@ class DataProcessor( // // Make a copy of this object so that we can make its saved state - // different than its original state. Note that the serialization - // system *does* preserve validationMode since it may be required by - // other software like Apache Spark. But for our save/reload purposes, - // we don't want validationMode preserved. + // different than its original state. Note other software like + // Apache Spark may require variableMap and validationMode to be + // preserved. But for our save/reload purposes, we want to reset + // them back to their original values. // val dpToSave = this.copy( - validationMode = ValidationMode.Off, // explicitly turn off, so restored processor won't be validating. + variableMap = ssrd.originalVariables, // reset to original variables defined in schema + validationMode = ValidationMode.Off, // explicitly turn off, so restored processor won't be validating ) try { diff --git a/daffodil-runtime2/src/main/scala/org/apache/daffodil/runtime2/Runtime2DataProcessor.scala b/daffodil-runtime2/src/main/scala/org/apache/daffodil/runtime2/Runtime2DataProcessor.scala index ef362c9fd0..41deaf5079 100644 --- a/daffodil-runtime2/src/main/scala/org/apache/daffodil/runtime2/Runtime2DataProcessor.scala +++ b/daffodil-runtime2/src/main/scala/org/apache/daffodil/runtime2/Runtime2DataProcessor.scala @@ -39,12 +39,12 @@ import org.apache.daffodil.util.Maybe import org.apache.daffodil.util.Maybe.Nope /** - * Effectively a scala proxy object that does its work via the underlying C-code. - * Will need to consider how to use features of underlying C-code to get infoset, - * walk infoset, generate XML for use by TDML tests. + * Effectively a scala proxy object that does its work via the underlying C-code + * to get infoset, walk infoset, and generate XML for use by TDML tests. */ class Runtime2DataProcessor(executableFile: os.Path) extends DFDL.DataProcessorBase { + //$COVERAGE-OFF$ override def withValidationMode(mode: ValidationMode.Type): DFDL.DataProcessor = ??? override def withTunable(name: String, value: String): DFDL.DataProcessor = ??? override def withTunables(tunables: Map[String, String]): DFDL.DataProcessor = ??? @@ -53,12 +53,11 @@ class Runtime2DataProcessor(executableFile: os.Path) extends DFDL.DataProcessorB override def withExternalVariables(extVars: Seq[Binding]): DFDL.DataProcessor = ??? override def withDebugger(dbg:AnyRef): DFDL.DataProcessor = ??? override def withDebugging(flag: Boolean): DFDL.DataProcessor = ??? - override def save(output: DFDL.Output): Unit = ??? - override def tunables: DaffodilTunables = ??? override def variableMap: VariableMap = ??? override def validationMode: ValidationMode.Type = ??? + //$COVERAGE-ON$ /** * Returns an object which contains the result, and/or diagnostic information.