diff --git a/daffodil-core/src/main/scala/org/apache/daffodil/dsom/SchemaSet.scala b/daffodil-core/src/main/scala/org/apache/daffodil/dsom/SchemaSet.scala index c6a9d48538..cc63ca3ad9 100644 --- a/daffodil-core/src/main/scala/org/apache/daffodil/dsom/SchemaSet.scala +++ b/daffodil-core/src/main/scala/org/apache/daffodil/dsom/SchemaSet.scala @@ -342,13 +342,9 @@ final class SchemaSet private ( } /** - * The root element can be specified by a deprecated API call on the compiler - * object or the ProcessorFactory class, but the call on the ProcessorFactory class - * just overrides anything coming from the compiler object. + * You can define the root by passing the root specification to the Compiler.compileX method. * - * The right way is to pass the root specification to the Compiler.compileX method. - * - * Or, you can leave it unspecified, and this method will determine from the + * Or, you can leave the root unspecified, and this method will determine it from the * first element declaration of the first schema file. */ lazy val root: Root = { @@ -360,12 +356,12 @@ final class SchemaSet private ( // if the root element and rootNamespace aren't provided at all, then // the first element of the first schema document is the root val sDocs = this.allSchemaDocuments - assuming(sDocs.length > 0) - val firstSchemaDocument = sDocs(0) + assuming(sDocs.nonEmpty) + val firstSchemaDocument = sDocs.head val gdecl = firstSchemaDocument.globalElementDecls val firstElement = { - schemaDefinitionUnless(gdecl.length >= 1, "No global elements in: " + firstSchemaDocument.uriString) - gdecl(0) + schemaDefinitionUnless(gdecl.nonEmpty, "No global elements in: " + firstSchemaDocument.uriString) + gdecl.head } firstElement } diff --git a/daffodil-core/src/main/scala/org/apache/daffodil/runtime1/SchemaSetRuntime1Mixin.scala b/daffodil-core/src/main/scala/org/apache/daffodil/runtime1/SchemaSetRuntime1Mixin.scala index 8fe83cf329..0399e4fd8a 100644 --- a/daffodil-core/src/main/scala/org/apache/daffodil/runtime1/SchemaSetRuntime1Mixin.scala +++ b/daffodil-core/src/main/scala/org/apache/daffodil/runtime1/SchemaSetRuntime1Mixin.scala @@ -18,7 +18,6 @@ package org.apache.daffodil.runtime1 import org.apache.daffodil.api.DFDL -import org.apache.daffodil.api.ValidationMode import org.apache.daffodil.dsom.SchemaSet import org.apache.daffodil.exceptions.Assert import org.apache.daffodil.grammar.VariableMapFactory @@ -75,7 +74,6 @@ trait SchemaSetRuntime1Mixin { root.schemaDefinitionUnless( !rootERD.dpathElementCompileInfo.isOutputValueCalc, "The root element cannot have the dfdl:outputValueCalc property.") - val validationMode = ValidationMode.Off val p = if (!root.isError) parser else null val u = if (!root.isError) unparser else null val ssrd = new SchemaSetRuntimeData( @@ -87,7 +85,7 @@ trait SchemaSetRuntime1Mixin { typeCalcMap) if (root.numComponents > root.numUniqueComponents) Logger.log.debug(s"Compiler: component counts: unique ${root.numUniqueComponents}, actual ${root.numComponents}.") - val dataProc = new DataProcessor(ssrd, tunable) + val dataProc = new DataProcessor(ssrd, tunable, variableMap.copy()) if (dataProc.isError) { } else { Logger.log.debug(s"Parser = ${ssrd.parser.toString}.") diff --git a/daffodil-core/src/test/scala/org/apache/daffodil/general/TestTunables.scala b/daffodil-core/src/test/scala/org/apache/daffodil/general/TestTunables.scala index 37c8317e88..fb38194a71 100644 --- a/daffodil-core/src/test/scala/org/apache/daffodil/general/TestTunables.scala +++ b/daffodil-core/src/test/scala/org/apache/daffodil/general/TestTunables.scala @@ -63,14 +63,14 @@ class TestTunables { val dp1 = pf1.onPath("/") var dp2 = pf2.onPath("/") - val t1 = dp1.getTunables() - val t2 = dp2.getTunables() + val t1 = dp1.tunables + val t2 = dp2.tunables /* Set tunable at run-time via data processor */ dp2 = dp2.withTunable("maxSkipLengthInBytes", "50") - val t3 = dp2.getTunables() // modified tunables at 'run-time' - val t4 = dp1.getTunables() // obtain first data processor to see if anything changed + val t3 = dp2.tunables // modified tunables at 'run-time' + val t4 = dp1.tunables // obtain first data processor to see if anything changed assertEquals(1026, t1.maxSkipLengthInBytes) // initial compiler-set value assertEquals(2048, t2.maxSkipLengthInBytes) // overwrite of compiler-set value diff --git a/daffodil-core/src/test/scala/org/apache/daffodil/infoset/TestInfosetCursor.scala b/daffodil-core/src/test/scala/org/apache/daffodil/infoset/TestInfosetCursor.scala index 1b068d5ed3..2a58f2c2b4 100644 --- a/daffodil-core/src/test/scala/org/apache/daffodil/infoset/TestInfosetCursor.scala +++ b/daffodil-core/src/test/scala/org/apache/daffodil/infoset/TestInfosetCursor.scala @@ -27,7 +27,6 @@ import org.apache.daffodil.processors.DataProcessor import org.apache.daffodil.compiler.Compiler import org.apache.daffodil.processors.SequenceRuntimeData import org.apache.daffodil.processors.ChoiceRuntimeData - import InfosetEventKind._ import org.apache.daffodil.processors.ElementRuntimeData @@ -91,7 +90,7 @@ class TestInfosetInputter { } val rootERD = u.ssrd.elementRuntimeData val infosetInputter = new ScalaXMLInfosetInputter(infosetXML) - infosetInputter.initialize(rootERD, u.getTunables()) + infosetInputter.initialize(rootERD, u.tunables) (infosetInputter, rootERD) } diff --git a/daffodil-core/src/test/scala/org/apache/daffodil/infoset/TestInfosetCursor1.scala b/daffodil-core/src/test/scala/org/apache/daffodil/infoset/TestInfosetCursor1.scala index e27eb8ea71..6059b3a50f 100644 --- a/daffodil-core/src/test/scala/org/apache/daffodil/infoset/TestInfosetCursor1.scala +++ b/daffodil-core/src/test/scala/org/apache/daffodil/infoset/TestInfosetCursor1.scala @@ -49,7 +49,7 @@ class TestInfosetInputter1 { } val rootERD = u.ssrd.elementRuntimeData val ic = new XMLTextInfosetInputter(is) - ic.initialize(rootERD, u.getTunables()) + ic.initialize(rootERD, u.tunables) ic } diff --git a/daffodil-core/src/test/scala/org/apache/daffodil/infoset/TestInfosetCursorFromReader.scala b/daffodil-core/src/test/scala/org/apache/daffodil/infoset/TestInfosetCursorFromReader.scala index 8c4aa0b6bd..834730cd00 100644 --- a/daffodil-core/src/test/scala/org/apache/daffodil/infoset/TestInfosetCursorFromReader.scala +++ b/daffodil-core/src/test/scala/org/apache/daffodil/infoset/TestInfosetCursorFromReader.scala @@ -55,7 +55,7 @@ class TestInfosetInputterFromReader { } val rootERD = u.ssrd.elementRuntimeData val inputter = new ScalaXMLInfosetInputter(infosetXML) - inputter.initialize(rootERD, u.getTunables()) + inputter.initialize(rootERD, u.tunables) val is = Adapter(inputter) (is, rootERD, inputter, u.tunables) } diff --git a/daffodil-core/src/test/scala/org/apache/daffodil/infoset/TestInfosetCursorFromReader2.scala b/daffodil-core/src/test/scala/org/apache/daffodil/infoset/TestInfosetCursorFromReader2.scala index b77d998f25..5f4df00de8 100644 --- a/daffodil-core/src/test/scala/org/apache/daffodil/infoset/TestInfosetCursorFromReader2.scala +++ b/daffodil-core/src/test/scala/org/apache/daffodil/infoset/TestInfosetCursorFromReader2.scala @@ -63,7 +63,7 @@ class TestInfosetInputterFromReader2 { val is = new StreamInputStream(strings) val inputter = new XMLTextInfosetInputter(is) - inputter.initialize(rootERD, u.getTunables()) + inputter.initialize(rootERD, u.tunables) val ic = Adapter(inputter) (ic, rootERD, inputter) } diff --git a/daffodil-core/src/test/scala/org/apache/daffodil/util/TestUtils.scala b/daffodil-core/src/test/scala/org/apache/daffodil/util/TestUtils.scala index aff60d3e1f..f08b277d3f 100644 --- a/daffodil-core/src/test/scala/org/apache/daffodil/util/TestUtils.scala +++ b/daffodil-core/src/test/scala/org/apache/daffodil/util/TestUtils.scala @@ -338,17 +338,16 @@ class Fakes private () { lazy val fakeGroupRefFactory = GroupRefFactory(fs1.xml, fs1, 1, false) class FakeDataProcessor extends DFDL.DataProcessor { - def getValidationMode(): ValidationMode.Type = { ValidationMode.Full } override def save(output: DFDL.Output): Unit = {} - def getVariables(): VariableMap = VariableMapFactory.create(Nil) override def parse(input: InputSourceDataInputStream, output: InfosetOutputter): DFDL.ParseResult = null override def unparse(inputter: InfosetInputter, output: DFDL.Output): DFDL.UnparseResult = null override def getDiagnostics: Seq[Diagnostic] = Seq.empty override def isError: Boolean = false - override def getTunables(): DaffodilTunables = { tunables } - override def validationMode: ValidationMode.Type = ValidationMode.Full + override def tunables: DaffodilTunables = DaffodilTunables() override def variableMap: VariableMap = VariableMapFactory.create(Nil) + override def validationMode: ValidationMode.Type = ValidationMode.Full + override def withExternalVariables(extVars: Seq[Binding]): DFDL.DataProcessor = this override def withExternalVariables(extVars: java.io.File): DFDL.DataProcessor = this override def withExternalVariables(extVars: Map[String,String]): DFDL.DataProcessor = this diff --git a/daffodil-runtime1/src/main/scala/org/apache/daffodil/api/DFDLParserUnparser.scala b/daffodil-runtime1/src/main/scala/org/apache/daffodil/api/DFDLParserUnparser.scala index e76720db80..4a9dbbd06a 100644 --- a/daffodil-runtime1/src/main/scala/org/apache/daffodil/api/DFDLParserUnparser.scala +++ b/daffodil-runtime1/src/main/scala/org/apache/daffodil/api/DFDLParserUnparser.scala @@ -190,11 +190,11 @@ object DFDL { def withDebugger(dbg:AnyRef): DataProcessor def withDebugging(flag: Boolean): DataProcessor - def validationMode: ValidationMode.Type - - def getTunables(): DaffodilTunables def save(output: DFDL.Output): Unit + + def tunables: DaffodilTunables def variableMap: VariableMap + def validationMode: ValidationMode.Type } trait DataProcessor extends DataProcessorBase with WithDiagnostics { diff --git a/daffodil-runtime1/src/main/scala/org/apache/daffodil/processors/DaffodilUnparseContentHandler.scala b/daffodil-runtime1/src/main/scala/org/apache/daffodil/processors/DaffodilUnparseContentHandler.scala index b0257d3a88..0d07d6f0e1 100644 --- a/daffodil-runtime1/src/main/scala/org/apache/daffodil/processors/DaffodilUnparseContentHandler.scala +++ b/daffodil-runtime1/src/main/scala/org/apache/daffodil/processors/DaffodilUnparseContentHandler.scala @@ -78,7 +78,7 @@ class DaffodilUnparseContentHandler( private var prefixMapping: NamespaceBinding = _ private lazy val prefixMappingTrackingStack = new MStackOf[NamespaceBinding] - private lazy val tunablesBatchSize = dp.getTunables().saxUnparseEventBatchSize + private lazy val tunablesBatchSize = dp.tunables.saxUnparseEventBatchSize /** * we always have an extra buffer in the array that we use for the inputter.hasNext call. For each diff --git a/daffodil-runtime1/src/main/scala/org/apache/daffodil/processors/DataProcessor.scala b/daffodil-runtime1/src/main/scala/org/apache/daffodil/processors/DataProcessor.scala index 482e1e47cb..9c556fbdd1 100644 --- a/daffodil-runtime1/src/main/scala/org/apache/daffodil/processors/DataProcessor.scala +++ b/daffodil-runtime1/src/main/scala/org/apache/daffodil/processors/DataProcessor.scala @@ -26,7 +26,6 @@ import java.nio.channels.Channels import java.nio.charset.StandardCharsets import java.nio.file.Files import java.util.zip.GZIPOutputStream -import scala.collection.immutable.Queue import org.apache.daffodil.Implicits._ object INoWarn4 { ImplicitsSuppressUnusedImportWarning() } @@ -84,27 +83,26 @@ class InvalidUsageException(msg: String, cause: Throwable = null) extends Except object DataProcessor { /** - * This is the DataProcessor constructed from a saved processor. + * This is the SerializableDataProcessor constructed from a saved processor. * * It enables us to implement restrictions on what you can/cannot do with a reloaded * processor versus an original one. * - * When we create one of these, it will have default values for everything - * settable like debuggers, debug mode. + * When we reload a processor, we want it to have default values for everything settable + * like validation mode, debug mode, and debugger. * - * Note that this does preserve the externalVars and validationMode. That is because - * those may be needed by serializations other than our own save/reload (e.g., Apache Spark which - * serializes to move things for remote execution). + * Note that this class does preserve variableMap and validationMode. That is because + * serializations other than our own save/reload may need such settings (e.g., Apache Spark + * which serializes to move objects for remote execution). * - * Hence, we're depending on the save method to explicitly reset validationMode and - * externalVars to initial values. + * Hence, we're depending on the save method to explicitly reset them to default values. */ private class SerializableDataProcessor( - val data: SchemaSetRuntimeData, - tunable: DaffodilTunables, - externalVars: Queue[Binding], // must be explicitly set to empty by save method - validationModeArg: ValidationMode.Type) // must be explicitly set from Full to Limited by save method. - extends DataProcessor(data, tunable, externalVars, validationModeArg) { + ssrd: SchemaSetRuntimeData, + tunables: DaffodilTunables, + variableMap: VariableMap, // must be explicitly reset by save method + validationMode: ValidationMode.Type, // must be explicitly turned off by save method + ) extends DataProcessor(ssrd, tunables, variableMap, validationMode) { override def withValidationMode(mode: ValidationMode.Type): DataProcessor = { if (mode == ValidationMode.Full) { @@ -119,7 +117,7 @@ object DataProcessor { * The very last aspects of compilation, and the start of the * back-end runtime. */ -class DataProcessor private ( +class DataProcessor( val ssrd: SchemaSetRuntimeData, val tunables: DaffodilTunables, // Compiler-set tunables val variableMap: VariableMap, @@ -129,11 +127,10 @@ class DataProcessor private ( // The values these will have (since this is a base class) are the correct default values that we want // back when the object is re-initialized. // - protected val areDebugging : Boolean, - protected val optDebugger : Option[Debugger], - val validationMode: ValidationMode.Type, - private val externalVars: Queue[Binding]) - extends DFDL.DataProcessor + val validationMode: ValidationMode.Type = ValidationMode.Off, + protected val areDebugging : Boolean = false, + protected val optDebugger : Option[Debugger] = None, +) extends DFDL.DataProcessor with Serializable with MultipleEventHandler { @@ -141,50 +138,33 @@ class DataProcessor private ( /** * In order to make this serializable, without serializing the unwanted current state of - * debugger, external var settings, etc. we replace, at serialization time, this object - * with a [[SerializableDataProcessor]] which is a private derived class that - * sets all these troublesome var slots back to the default values. + * validation mode, debugging mode, debugger, etc. we replace, at serialization time, this + * object with a [[SerializableDataProcessor]] which is a private derived class that + * sets all these troublesome slots back to the default values. * * But note: there is serialization for us to save/reload, and there is serialization * in other contexts like Apache Spark, which may serialize objects without notifying us. * - * So we preserve everything that something like Spark might need preserved (validation modes, external vars) + * So we preserve everything that something like Spark might need preserved (validation mode) * and reinitialize things that are *always* reinitialized e.g., debugger, areDebugging. * - * That means when we save for reloading, we must explicitly clobber validationMode and externalVars to - * initialized values. + * That means when we save for reloading, we must explicitly clobber validationMode in save(). * - * @throws java.io.ObjectStreamException + * @throws java.io.ObjectStreamException Must be part of writeReplace's API * @return the serializable object */ @throws(classOf[java.io.ObjectStreamException]) private def writeReplace() : Object = - new SerializableDataProcessor(ssrd, tunables, externalVars, validationMode) - - /** - * The compilerExternalVars argument supports the deprecated feature to assign external var bindings - * on the compiler object. - * - * These are just incorporated into the initial variable map of the data processor. - */ - - def this( - ssrd: SchemaSetRuntimeData, - tunables:DaffodilTunables, - compilerExternalVars: Queue[Binding] = Queue.empty, - validationMode: ValidationMode.Type = ValidationMode.Off) = - this(ssrd, tunables, ExternalVariablesLoader.loadVariables(compilerExternalVars, ssrd, ssrd.originalVariables), - false, None, validationMode, compilerExternalVars) + new SerializableDataProcessor(ssrd, tunables, variableMap.copy(), validationMode) def copy( ssrd: SchemaSetRuntimeData = ssrd, tunables: DaffodilTunables = tunables, - areDebugging : Boolean = areDebugging, - optDebugger : Option[Debugger] = optDebugger, + variableMap: VariableMap = variableMap.copy(), validationMode: ValidationMode.Type = validationMode, - variableMap : VariableMap = variableMap.copy, - externalVars: Queue[Binding] = externalVars) = - new DataProcessor(ssrd, tunables, variableMap, areDebugging, optDebugger, validationMode, externalVars) + areDebugging: Boolean = areDebugging, + optDebugger: Option[Debugger] = optDebugger, + ) = new DataProcessor(ssrd, tunables, variableMap, validationMode, areDebugging, optDebugger) // This thread local state is used by the PState when it needs buffers for // regex matching. This cannot be in PState because a PState does not last @@ -223,15 +203,12 @@ class DataProcessor private ( } } - // TODO Deprecate and replace usages with just tunables. - def getTunables: DaffodilTunables = tunables - def debugger = { Assert.invariant(areDebugging) optDebugger.get } - def withDebugger(dbg:AnyRef) = { + def withDebugger(dbg:AnyRef): DataProcessor = { val optDbg = if (dbg eq null) None else Some(dbg.asInstanceOf[Debugger]) copy(optDebugger = optDbg) } @@ -241,52 +218,33 @@ class DataProcessor private ( copy(areDebugging = flag, tunables = newTunables) } - private def loadExternalVariables(extVars: Map[String, String]): Queue[Binding] = { + def withExternalVariables(extVars: Map[String, String]): DataProcessor = { val bindings = ExternalVariablesLoader.mapToBindings(extVars) - val newVars = externalVars ++ bindings - ExternalVariablesLoader.loadVariables(bindings, ssrd, variableMap) - newVars + val newVariableMap = ExternalVariablesLoader.loadVariables(bindings, ssrd, variableMap.copy()) + copy(variableMap = newVariableMap) } - private def loadExternalVariables(extVars: File): Queue[Binding] = { + def withExternalVariables(extVars: File): DataProcessor = { val bindings = ExternalVariablesLoader.fileToBindings(extVars) - val newVars = externalVars ++ bindings - ExternalVariablesLoader.loadVariables(bindings, ssrd, variableMap) - newVars + val newVariableMap = ExternalVariablesLoader.loadVariables(bindings, ssrd, variableMap.copy()) + copy(variableMap = newVariableMap) } - private def loadExternalVariables(bindings: Seq[Binding]): Queue[Binding] = { - val newVars = externalVars ++ bindings - ExternalVariablesLoader.loadVariables(bindings, ssrd, variableMap) - newVars + def withExternalVariables(bindings: Seq[Binding]): DataProcessor = { + val newVariableMap = ExternalVariablesLoader.loadVariables(bindings, ssrd, variableMap.copy()) + copy(variableMap = newVariableMap) } - def withExternalVariables(extVars: Map[String, String]): DataProcessor = { - val newBindings = loadExternalVariables(extVars) - copy(externalVars = newBindings) + def withTunable(tunable: String, value: String): DataProcessor = { + val newTunables = tunables.withTunable(tunable, value) + copy(tunables = newTunables) } - def withExternalVariables(extVars: File): DataProcessor = { - val newBindings = loadExternalVariables(extVars) - copy(externalVars = newBindings) + def withTunables(tunablesArg: Map[String, String]): DataProcessor = { + val newTunables = tunables.withTunables(tunablesArg) + copy(tunables = newTunables) } - /** - * Note that tunables is not used. So this method is equivalent to - * the other similar method that doesn't take that parameter. - * - * @param extVars File containing configuration with external variable bindings in it. - * @param tunable This is ignored. - */ - def withExternalVariables(extVars: Seq[Binding]): DataProcessor = { - val newBindings = loadExternalVariables(extVars) - copy(externalVars = newBindings) - } - - def withTunable(tunable: String, value: String): DataProcessor = copy(tunables = tunables.withTunable(tunable, value)) - - def withTunables(tunablesArg: Map[String, String]): DataProcessor = copy(tunables = tunables.withTunables(tunablesArg)) - override def isError = false override def getDiagnostics = ssrd.diagnostics @@ -311,19 +269,15 @@ class DataProcessor private ( val oos = new ObjectOutputStream(new GZIPOutputStream(os)) // - // Make a copy of this object, so that our state mods below don't side-effect the user's object. - // Saving shouldn't have side-effects on the state of the object. - // - // - // Note that the serialization system *does* preserve these two settings. This is for general serialization - // that may be required by other software (e.g., Apache Spark) + // Make a copy of this object so that we can make its saved state + // different than its original state. Note other software like + // Apache Spark may require variableMap and validationMode to be + // preserved. But for our save/reload purposes, we want to reset + // them back to their original values. // - // But for our save/reload purposes, we don't want them preserved. - // - val dpToSave = this.copy( - externalVars = Queue.empty[Binding], // explicitly set these to empty so restored processor won't have them. - validationMode = ValidationMode.Off, // explicitly turn off, so restored processor won't be validating. + variableMap = ssrd.originalVariables, // reset to original variables defined in schema + validationMode = ValidationMode.Off, // explicitly turn off, so restored processor won't be validating ) try { @@ -518,7 +472,7 @@ class DataProcessor private ( } def unparse(inputter: InfosetInputter, outStream: java.io.OutputStream) = { - inputter.initialize(ssrd.elementRuntimeData, getTunables()) + inputter.initialize(ssrd.elementRuntimeData, tunables) val unparserState = UState.createInitialUState( outStream, diff --git a/daffodil-runtime1/src/main/scala/org/apache/daffodil/processors/parsers/PState.scala b/daffodil-runtime1/src/main/scala/org/apache/daffodil/processors/parsers/PState.scala index 05e7468e45..8244576a84 100644 --- a/daffodil-runtime1/src/main/scala/org/apache/daffodil/processors/parsers/PState.scala +++ b/daffodil-runtime1/src/main/scala/org/apache/daffodil/processors/parsers/PState.scala @@ -658,7 +658,7 @@ object PState { dataProc: DFDL.DataProcessor, areDebugging: Boolean): PState = { - val tunables = dataProc.getTunables() + val tunables = dataProc.tunables val doc = Infoset.newDocument(root).asInstanceOf[DIElement] createInitialPState( doc.asInstanceOf[InfosetDocument], @@ -684,11 +684,11 @@ object PState { * This is a full deep copy as variableMap is mutable. Reusing * dataProc.VariableMap without a copy would not be thread safe. */ - val variables = dataProc.variableMap.copy + val variables = dataProc.variableMap.copy() val diagnostics = Nil val mutablePState = MPState() - val tunables = dataProc.getTunables() + val tunables = dataProc.tunables val infosetWalker = InfosetWalker( doc.asInstanceOf[DIElement], output, diff --git a/daffodil-runtime1/src/main/scala/org/apache/daffodil/processors/unparsers/UState.scala b/daffodil-runtime1/src/main/scala/org/apache/daffodil/processors/unparsers/UState.scala index 7d2008d8be..8dc029d491 100644 --- a/daffodil-runtime1/src/main/scala/org/apache/daffodil/processors/unparsers/UState.scala +++ b/daffodil-runtime1/src/main/scala/org/apache/daffodil/processors/unparsers/UState.scala @@ -680,7 +680,7 @@ object UState { * This is a full deep copy as variableMap is mutable. Reusing * dataProc.VariableMap without a copy would not be thread safe. */ - val variables = dataProc.variableMap.copy + val variables = dataProc.variableMap.copy() val diagnostics = Nil val newState = new UStateMain( @@ -689,7 +689,7 @@ object UState { variables, diagnostics, dataProc.asInstanceOf[DataProcessor], - dataProc.getTunables(), + dataProc.tunables, areDebugging) newState } diff --git a/daffodil-runtime2/src/main/scala/org/apache/daffodil/runtime2/Runtime2DataProcessor.scala b/daffodil-runtime2/src/main/scala/org/apache/daffodil/runtime2/Runtime2DataProcessor.scala index 13cc26ef9a..41deaf5079 100644 --- a/daffodil-runtime2/src/main/scala/org/apache/daffodil/runtime2/Runtime2DataProcessor.scala +++ b/daffodil-runtime2/src/main/scala/org/apache/daffodil/runtime2/Runtime2DataProcessor.scala @@ -39,35 +39,25 @@ import org.apache.daffodil.util.Maybe import org.apache.daffodil.util.Maybe.Nope /** - * Effectively a scala proxy object that does its work via the underlying C-code. - * Will need to consider how to use features of underlying C-code to get infoset, - * walk infoset, generate XML for use by TDML tests. + * Effectively a scala proxy object that does its work via the underlying C-code + * to get infoset, walk infoset, and generate XML for use by TDML tests. */ class Runtime2DataProcessor(executableFile: os.Path) extends DFDL.DataProcessorBase { + //$COVERAGE-OFF$ override def withValidationMode(mode: ValidationMode.Type): DFDL.DataProcessor = ??? - override def withTunable(name: String, value: String): DFDL.DataProcessor = ??? - override def withTunables(tunables: Map[String, String]): DFDL.DataProcessor = ??? - override def withExternalVariables(extVars: Map[String, String]): DFDL.DataProcessor = ??? - override def withExternalVariables(extVars: File): DFDL.DataProcessor = ??? - override def withExternalVariables(extVars: Seq[Binding]): DFDL.DataProcessor = ??? - override def withDebugger(dbg:AnyRef): DFDL.DataProcessor = ??? - override def withDebugging(flag: Boolean): DFDL.DataProcessor = ??? - - override def validationMode: ValidationMode.Type = ??? - - override def getTunables(): DaffodilTunables = ??? - override def save(output: DFDL.Output): Unit = ??? - + override def tunables: DaffodilTunables = ??? override def variableMap: VariableMap = ??? + override def validationMode: ValidationMode.Type = ??? + //$COVERAGE-ON$ /** * Returns an object which contains the result, and/or diagnostic information.