Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[BEAM-13964] Bump kotlin to 1.6.x #16882

Merged
Merged
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions examples/kotlin/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
import groovy.json.JsonOutput

plugins { id 'org.apache.beam.module'
id 'org.jetbrains.kotlin.jvm' version '1.3.72'
id 'org.jetbrains.kotlin.jvm' version '1.6.10'
}

applyJavaNature(exportJavadoc: false, automaticModuleName: 'org.apache.beam.examples.kotlin')
Expand All @@ -46,7 +46,7 @@ configurations.sparkRunnerPreCommit {
exclude group: "org.slf4j", module: "slf4j-jdk14"
}

def kotlin_version = "1.4.32"
def kotlin_version = "1.6.10"

dependencies {
implementation enforcedPlatform(library.java.google_cloud_platform_libraries_bom)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -45,13 +45,12 @@ class WriteOneFilePerWindow(private val filenamePrefix: String, private val numS

override fun expand(input: PCollection<String>): PDone {
val resource = FileBasedSink.convertToFileResourceIfPossible(filenamePrefix)
var write = TextIO.write()
val write = TextIO.write()
.to(PerWindowFiles(resource))
.withTempDirectory(resource.currentDirectory)
.withWindowedWrites()

write = numShards?.let { write.withNumShards(it) } ?: write
return input.apply(write)
return input.apply(numShards?.let { write.withNumShards(it) } ?: write)
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -111,7 +111,7 @@ object Snippets {
.apply<PCollection<Double>>(
MapElements.into(TypeDescriptors.doubles())
.via(SerializableFunction<TableRow, Double> {
it["max_temperature"] as Double
it["max_temperature"] as Double?
})
)
// [END BigQueryReadTable]
Expand All @@ -121,7 +121,7 @@ object Snippets {
val tableSpec = "clouddataflow-readonly:samples.weather_stations"
// [START BigQueryReadFunction]
val maxTemperatures = pipeline.apply(
BigQueryIO.read { it.record["max_temperature"] as Double }
BigQueryIO.read { it.record["max_temperature"] as Double? }
.from(tableSpec)
.withCoder(DoubleCoder.of()))
// [END BigQueryReadFunction]
Expand All @@ -130,7 +130,7 @@ object Snippets {
run {
// [START BigQueryReadQuery]
val maxTemperatures = pipeline.apply(
BigQueryIO.read { it.record["max_temperature"] as Double }
BigQueryIO.read { it.record["max_temperature"] as Double? }
.fromQuery(
"SELECT max_temperature FROM [clouddataflow-readonly:samples.weather_stations]")
.withCoder(DoubleCoder.of()))
Expand All @@ -140,7 +140,7 @@ object Snippets {
run {
// [START BigQueryReadQueryStdSQL]
val maxTemperatures = pipeline.apply(
BigQueryIO.read { it.record["max_temperature"] as Double }
BigQueryIO.read { it.record["max_temperature"] as Double? }
.fromQuery(
"SELECT max_temperature FROM `clouddataflow-readonly.samples.weather_stations`")
.usingStandardSql()
Expand Down