Skip to content

Commit

Permalink
Fixing compilation
Browse files Browse the repository at this point in the history
  • Loading branch information
Alexey Kudinkin committed Jun 15, 2022
1 parent aeb98cf commit 9419230
Show file tree
Hide file tree
Showing 3 changed files with 1 addition and 29 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -199,7 +199,7 @@ protected void initSparkContexts(String appName) {

if (sparkSessionExtensionsInjector.isPresent()) {
// In case we need to inject extensions into Spark Session, we have
// to stop any session that might still be active and since Spark will try
// to stop any session that might still be active, since Spark will try
// to re-use it
HoodieConversionUtils.toJavaOption(SparkSession.getActiveSession())
.ifPresent(SparkSession::stop);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,8 +26,6 @@ import org.apache.hudi.common.model.HoodieTableType.{COPY_ON_WRITE, MERGE_ON_REA
import org.apache.hudi.common.table.timeline.HoodieInstant
import org.apache.hudi.common.table.{HoodieTableMetaClient, TableSchemaResolver}
import org.apache.hudi.exception.HoodieException
import org.apache.hudi.internal.schema.InternalSchema
import org.apache.hudi.metadata.HoodieTableMetadata
import org.apache.hudi.metadata.HoodieTableMetadata.isMetadataTable
import org.apache.log4j.LogManager
import org.apache.spark.sql.execution.streaming.{Sink, Source}
Expand Down Expand Up @@ -226,26 +224,6 @@ class DefaultSource extends RelationProvider
new HoodieStreamSource(sqlContext, metadataPath, schema, parameters)
}

private def resolveBaseFileOnlyRelation(sqlContext: SQLContext,
globPaths: Seq[Path],
userSchema: Option[StructType],
metaClient: HoodieTableMetaClient,
optParams: Map[String, String]): BaseRelation = {
val baseRelation = new BaseFileOnlyRelation(sqlContext, metaClient, optParams, userSchema, globPaths)
val enableSchemaOnRead: Boolean = !tryFetchInternalSchema(metaClient).isEmptySchema

// NOTE: We fallback to [[HadoopFsRelation]] in all of the cases except ones requiring usage of
// [[BaseFileOnlyRelation]] to function correctly. This is necessary to maintain performance parity w/
// vanilla Spark, since some of the Spark optimizations are predicated on the using of [[HadoopFsRelation]].
//
// You can check out HUDI-3896 for more details
if (enableSchemaOnRead) {
baseRelation
} else {
baseRelation.toHadoopFsRelation
}
}

private def resolveBaseFileOnlyRelation(sqlContext: SQLContext,
globPaths: Seq[Path],
userSchema: Option[StructType],
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -94,12 +94,6 @@ class TestCOWDataSource extends HoodieClientTestBase {
System.gc()
}

override def getSparkSessionExtensionsInjector: util.Option[Consumer[SparkSessionExtensions]] =
toJavaOption(
Some(
JFunction.toJava((receiver: SparkSessionExtensions) => new HoodieSparkSessionExtension().apply(receiver)))
)

@Test def testShortNameStorage() {
// Insert Operation
val records = recordsToStrings(dataGen.generateInserts("000", 100)).toList
Expand Down

0 comments on commit 9419230

Please sign in to comment.