Skip to content

Commit

Permalink
Update scaladocs mentioning deprecated API
Browse files Browse the repository at this point in the history
Ensure nothing mentions the deprecated API removed in previous commits
for DAFFODIL-2743.  Also fix some IDEA nits and improve DataProcessor
constructor.

SchemaSet.scala: Simplify scaladoc to stop mentioning deprecated API.
Fix some IDEA nits in the same code block with IDEA quick fixes.

SchemaSetRuntime1Mixin.scala: Make onPath call first DataProcessor
constructor instead of now-deleted second constructor.

DataProcessor.scala: Make SerializedDataProcessor extend first
DataProcessor constructor instead of now-deleted second
constructor. Change first DataProcessor constructor from private to
public, reorder its last 4 parameters, and make its last 2 parameters
optional.  Delete second DataProcessor constructor and remove its
scaladoc which had been saying it was using deprecated
compilerExternalVars.  Make copy call first constructor with reordered
parameters.  Remove another scaladoc which had been mentioning tunables
not even passed to method anymore.

DAFFODIL-2743
  • Loading branch information
tuxji committed Dec 7, 2022
1 parent debf1d4 commit 4254a76
Show file tree
Hide file tree
Showing 3 changed files with 25 additions and 49 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -342,13 +342,9 @@ final class SchemaSet private (
}

/**
* The root element can be specified by a deprecated API call on the compiler
* object or the ProcessorFactory class, but the call on the ProcessorFactory class
* just overrides anything coming from the compiler object.
* You can define the root by passing the root specification to the Compiler.compileX method.
*
* The right way is to pass the root specification to the Compiler.compileX method.
*
* Or, you can leave it unspecified, and this method will determine from the
* Or, you can leave the root unspecified, and this method will determine it from the
* first element declaration of the first schema file.
*/
lazy val root: Root = {
Expand All @@ -360,12 +356,12 @@ final class SchemaSet private (
// if the root element and rootNamespace aren't provided at all, then
// the first element of the first schema document is the root
val sDocs = this.allSchemaDocuments
assuming(sDocs.length > 0)
val firstSchemaDocument = sDocs(0)
assuming(sDocs.nonEmpty)
val firstSchemaDocument = sDocs.head
val gdecl = firstSchemaDocument.globalElementDecls
val firstElement = {
schemaDefinitionUnless(gdecl.length >= 1, "No global elements in: " + firstSchemaDocument.uriString)
gdecl(0)
schemaDefinitionUnless(gdecl.nonEmpty, "No global elements in: " + firstSchemaDocument.uriString)
gdecl.head
}
firstElement
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,8 @@ import org.apache.daffodil.processors.parsers.NotParsableParser
import org.apache.daffodil.processors.unparsers.NotUnparsableUnparser
import org.apache.daffodil.util.Logger

import scala.collection.immutable.Queue

trait SchemaSetRuntime1Mixin {
self: SchemaSet =>

Expand Down Expand Up @@ -75,7 +77,6 @@ trait SchemaSetRuntime1Mixin {
root.schemaDefinitionUnless(
!rootERD.dpathElementCompileInfo.isOutputValueCalc,
"The root element cannot have the dfdl:outputValueCalc property.")
val validationMode = ValidationMode.Off
val p = if (!root.isError) parser else null
val u = if (!root.isError) unparser else null
val ssrd = new SchemaSetRuntimeData(
Expand All @@ -87,7 +88,7 @@ trait SchemaSetRuntime1Mixin {
typeCalcMap)
if (root.numComponents > root.numUniqueComponents)
Logger.log.debug(s"Compiler: component counts: unique ${root.numUniqueComponents}, actual ${root.numComponents}.")
val dataProc = new DataProcessor(ssrd, tunable)
val dataProc = new DataProcessor(ssrd, tunable, variableMap.copy(), Queue.empty, ValidationMode.Off)
if (dataProc.isError) {
} else {
Logger.log.debug(s"Parser = ${ssrd.parser.toString}.")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -104,7 +104,9 @@ object DataProcessor {
tunable: DaffodilTunables,
externalVars: Queue[Binding], // must be explicitly set to empty by save method
validationModeArg: ValidationMode.Type) // must be explicitly set from Full to Limited by save method.
extends DataProcessor(data, tunable, externalVars, validationModeArg) {
extends DataProcessor(data, tunable,
ExternalVariablesLoader.loadVariables(externalVars, data, data.originalVariables),
externalVars, validationModeArg) {

override def withValidationMode(mode: ValidationMode.Type): DataProcessor = {
if (mode == ValidationMode.Full) {
Expand All @@ -119,7 +121,7 @@ object DataProcessor {
* The very last aspects of compilation, and the start of the
* back-end runtime.
*/
class DataProcessor private (
class DataProcessor(
val ssrd: SchemaSetRuntimeData,
val tunables: DaffodilTunables, // Compiler-set tunables
val variableMap: VariableMap,
Expand All @@ -129,10 +131,10 @@ class DataProcessor private (
// The values these will have (since this is a base class) are the correct default values that we want
// back when the object is re-initialized.
//
protected val areDebugging : Boolean,
protected val optDebugger : Option[Debugger],
private val externalVars: Queue[Binding],
val validationMode: ValidationMode.Type,
private val externalVars: Queue[Binding])
protected val areDebugging : Boolean = false,
protected val optDebugger : Option[Debugger] = None)
extends DFDL.DataProcessor
with Serializable
with MultipleEventHandler {
Expand All @@ -154,37 +156,21 @@ class DataProcessor private (
* That means when we save for reloading, we must explicitly clobber validationMode and externalVars to
* initialized values.
*
* @throws java.io.ObjectStreamException
* @throws java.io.ObjectStreamException Must be part of writeReplace's API
* @return the serializable object
*/
@throws(classOf[java.io.ObjectStreamException])
private def writeReplace() : Object =
new SerializableDataProcessor(ssrd, tunables, externalVars, validationMode)

/**
* The compilerExternalVars argument supports the deprecated feature to assign external var bindings
* on the compiler object.
*
* These are just incorporated into the initial variable map of the data processor.
*/

def this(
ssrd: SchemaSetRuntimeData,
tunables:DaffodilTunables,
compilerExternalVars: Queue[Binding] = Queue.empty,
validationMode: ValidationMode.Type = ValidationMode.Off) =
this(ssrd, tunables, ExternalVariablesLoader.loadVariables(compilerExternalVars, ssrd, ssrd.originalVariables),
false, None, validationMode, compilerExternalVars)

def copy(
ssrd: SchemaSetRuntimeData = ssrd,
tunables: DaffodilTunables = tunables,
areDebugging : Boolean = areDebugging,
optDebugger : Option[Debugger] = optDebugger,
validationMode: ValidationMode.Type = validationMode,
variableMap : VariableMap = variableMap.copy,
externalVars: Queue[Binding] = externalVars) =
new DataProcessor(ssrd, tunables, variableMap, areDebugging, optDebugger, validationMode, externalVars)
def copy(ssrd: SchemaSetRuntimeData = ssrd,
tunables: DaffodilTunables = tunables,
variableMap: VariableMap = variableMap.copy(),
externalVars: Queue[Binding] = externalVars,
validationMode: ValidationMode.Type = validationMode,
areDebugging: Boolean = areDebugging,
optDebugger: Option[Debugger] = optDebugger) =
new DataProcessor(ssrd, tunables, variableMap, externalVars, validationMode, areDebugging, optDebugger)

// This thread local state is used by the PState when it needs buffers for
// regex matching. This cannot be in PState because a PState does not last
Expand Down Expand Up @@ -271,13 +257,6 @@ class DataProcessor private (
copy(externalVars = newBindings)
}

/**
* Note that tunables is not used. So this method is equivalent to
* the other similar method that doesn't take that parameter.
*
* @param extVars File containing configuration with external variable bindings in it.
* @param tunable This is ignored.
*/
def withExternalVariables(extVars: Seq[Binding]): DataProcessor = {
val newBindings = loadExternalVariables(extVars)
copy(externalVars = newBindings)
Expand Down

0 comments on commit 4254a76

Please sign in to comment.