Skip to content

Commit

Permalink
#529 Fix tests failing on EOL issues on Windows.
Browse files Browse the repository at this point in the history
  • Loading branch information
yruslan committed Nov 16, 2022
1 parent 4f2f512 commit dccc53d
Show file tree
Hide file tree
Showing 3 changed files with 56 additions and 14 deletions.
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
/*
* Copyright 2018 ABSA Group Limited
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package za.co.absa.cobrix.cobol.base

import org.scalatest.TestSuite
import org.slf4j.Logger

/**
* This trait extends a test suite with the method for comparing small multiline strings.
*/
trait SimpleComparisonBase extends TestSuite {
def removeWhiteSpace(s: String): String = {
s.replaceAll("[\\r\\n ]", "")
}

def assertEqualsMultiline(actualResults: String, expectedResults: String)(implicit logger: Logger): Unit = {
if (actualResults.replaceAll("[\r\n]", "") != expectedResults.replaceAll("[\r\n]", "")) {
logger.error(s"EXPECTED:\n$expectedResults")
logger.error(s"ACTUAL:\n$actualResults")
fail("Actual data does not match the expected data (see above).")
}
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -16,13 +16,17 @@

package za.co.absa.cobrix.spark.cobol

import org.apache.spark.sql.types.{IntegerType, StringType, StructField, StructType}
import org.scalatest.{FunSuite, WordSpec}
import org.apache.spark.sql.types.StructType
import org.scalatest.WordSpec
import org.slf4j.{Logger, LoggerFactory}
import za.co.absa.cobrix.cobol.parser.CopybookParser
import za.co.absa.cobrix.cobol.reader.policies.SchemaRetentionPolicy
import za.co.absa.cobrix.spark.cobol.schema.CobolSchema
import za.co.absa.cobrix.spark.cobol.source.base.SimpleComparisonBase

class CobolSchemaSpec extends WordSpec with SimpleComparisonBase {
private implicit val logger: Logger = LoggerFactory.getLogger(this.getClass)

class CobolSchemaSpec extends WordSpec {
"for simple copybooks" should {
val copyBookContents: String =
""" 01 RECORD.
Expand Down Expand Up @@ -53,7 +57,7 @@ class CobolSchemaSpec extends WordSpec {
val cobolSchema = new CobolSchema(parsedSchema, SchemaRetentionPolicy.CollapseRoot, "", false)
val actualSchema = cobolSchema.getSparkSchema.treeString

assert(actualSchema == expectedSchema)
assertEqualsMultiline(actualSchema, expectedSchema)
}

"Generate record id field" in {
Expand All @@ -74,7 +78,7 @@ class CobolSchemaSpec extends WordSpec {
val cobolSchema = new CobolSchema(parsedSchema, SchemaRetentionPolicy.CollapseRoot, "", true)
val actualSchema = cobolSchema.getSparkSchema.treeString

assert(actualSchema == expectedSchema)
assertEqualsMultiline(actualSchema, expectedSchema)
}
}

Expand Down Expand Up @@ -102,7 +106,7 @@ class CobolSchemaSpec extends WordSpec {
val cobolSchema = new CobolSchema(parsedSchema, SchemaRetentionPolicy.KeepOriginal, "", true)
val actualSchema = cobolSchema.getSparkSchema.treeString

assert(actualSchema == expectedSchema)
assertEqualsMultiline(actualSchema, expectedSchema)
}

"keep original and no record id generation" in {
Expand All @@ -118,7 +122,7 @@ class CobolSchemaSpec extends WordSpec {
val cobolSchema = new CobolSchema(parsedSchema, SchemaRetentionPolicy.KeepOriginal, "", false)
val actualSchema = cobolSchema.getSparkSchema.treeString

assert(actualSchema == expectedSchema)
assertEqualsMultiline(actualSchema, expectedSchema)
}

"collapse root + record id generation" in {
Expand All @@ -135,7 +139,7 @@ class CobolSchemaSpec extends WordSpec {
val cobolSchema = new CobolSchema(parsedSchema, SchemaRetentionPolicy.CollapseRoot, "", true)
val actualSchema = cobolSchema.getSparkSchema.treeString

assert(actualSchema == expectedSchema)
assertEqualsMultiline(actualSchema, expectedSchema)
}

"collapse root and no record id generation" in {
Expand All @@ -149,7 +153,7 @@ class CobolSchemaSpec extends WordSpec {
val cobolSchema = new CobolSchema(parsedSchema, SchemaRetentionPolicy.CollapseRoot, "", false)
val actualSchema = cobolSchema.getSparkSchema.treeString

assert(actualSchema == expectedSchema)
assertEqualsMultiline(actualSchema, expectedSchema)
}
}

Expand Down Expand Up @@ -178,7 +182,7 @@ class CobolSchemaSpec extends WordSpec {
val cobolSchema = new CobolSchema(parsedSchema, SchemaRetentionPolicy.KeepOriginal, "", true, 2)
val actualSchema = cobolSchema.getSparkSchema.treeString

assert(actualSchema == expectedSchema)
assertEqualsMultiline(actualSchema, expectedSchema)
}

"multi-segment keep-original without record id generation" in {
Expand All @@ -195,7 +199,7 @@ class CobolSchemaSpec extends WordSpec {
val cobolSchema = new CobolSchema(parsedSchema, SchemaRetentionPolicy.KeepOriginal, "", false, 2)
val actualSchema = cobolSchema.getSparkSchema.treeString

assert(actualSchema == expectedSchema)
assertEqualsMultiline(actualSchema, expectedSchema)
}

"multi-segment collapse root with record id generation" in {
Expand All @@ -213,7 +217,7 @@ class CobolSchemaSpec extends WordSpec {
val cobolSchema = new CobolSchema(parsedSchema, SchemaRetentionPolicy.CollapseRoot, "", true, 2)
val actualSchema = cobolSchema.getSparkSchema.treeString

assert(actualSchema == expectedSchema)
assertEqualsMultiline(actualSchema, expectedSchema)
}

"multi-segment collapse root without record id generation" in {
Expand All @@ -228,7 +232,7 @@ class CobolSchemaSpec extends WordSpec {
val cobolSchema = new CobolSchema(parsedSchema, SchemaRetentionPolicy.CollapseRoot, "", false, 2)
val actualSchema = cobolSchema.getSparkSchema.treeString

assert(actualSchema == expectedSchema)
assertEqualsMultiline(actualSchema, expectedSchema)
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -105,7 +105,7 @@ class Test18AsciiNulChars extends WordSpec with SparkTestBase with BinaryFileFix
|123456789
|12345678901234567890123456789
|5678
|""".stripMargin
|""".stripMargin.replaceAll("\r", "")

"not generate redundant records" in {
withTempTextFile("ascii_nul", ".dat", StandardCharsets.UTF_8, text) { tmpFileName =>
Expand Down

0 comments on commit dccc53d

Please sign in to comment.