Skip to content

Commit

Permalink
Merge pull request #14906: [BEAM-12281] Drop support for Flink 1.10
Browse files Browse the repository at this point in the history
  • Loading branch information
iemejia authored Jun 2, 2021
2 parents 4f0ea5f + 22575c7 commit 511fb05
Show file tree
Hide file tree
Showing 41 changed files with 57 additions and 355 deletions.
2 changes: 1 addition & 1 deletion .test-infra/dataproc/flink_cluster.sh
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@
# HARNESS_IMAGES_TO_PULL='gcr.io/<IMAGE_REPOSITORY>/python:latest gcr.io/<IMAGE_REPOSITORY>/java:latest' \
# JOB_SERVER_IMAGE=gcr.io/<IMAGE_REPOSITORY>/job-server-flink:latest \
# ARTIFACTS_DIR=gs://<bucket-for-artifacts> \
# FLINK_DOWNLOAD_URL=https://archive.apache.org/dist/flink/flink-1.10.1/flink-1.10.1-bin-scala_2.11.tgz \
# FLINK_DOWNLOAD_URL=https://archive.apache.org/dist/flink/flink-1.12.3/flink-1.12.3-bin-scala_2.11.tgz \
# HADOOP_DOWNLOAD_URL=https://repo.maven.apache.org/maven2/org/apache/flink/flink-shaded-hadoop-2-uber/2.8.3-9.0/flink-shaded-hadoop-2-uber-2.8.3-9.0.jar \
# FLINK_NUM_WORKERS=2 \
# FLINK_TASKMANAGER_SLOTS=1 \
Expand Down
4 changes: 2 additions & 2 deletions .test-infra/jenkins/Flink.groovy
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,8 @@
*/

class Flink {
private static final String flinkDownloadUrl = 'https://archive.apache.org/dist/flink/flink-1.10.1/flink-1.10.1-bin-scala_2.11.tgz'
private static final String hadoopDownloadUrl = 'https://repo.maven.apache.org/maven2/org/apache/flink/flink-shaded-hadoop-2-uber/2.8.3-9.0/flink-shaded-hadoop-2-uber-2.8.3-9.0.jar'
private static final String flinkDownloadUrl = 'https://archive.apache.org/dist/flink/flink-1.12.3/flink-1.12.3-bin-scala_2.11.tgz'
private static final String hadoopDownloadUrl = 'https://repo.maven.apache.org/maven2/org/apache/flink/flink-shaded-hadoop-2-uber/2.8.3-10.0/flink-shaded-hadoop-2-uber-2.8.3-10.0.jar'
private static final String FLINK_DIR = '"$WORKSPACE/src/.test-infra/dataproc"'
private static final String FLINK_SCRIPT = 'flink_cluster.sh'
private def job
Expand Down
2 changes: 1 addition & 1 deletion .test-infra/jenkins/job_LoadTests_Combine_Flink_Go.groovy
Original file line number Diff line number Diff line change
Expand Up @@ -105,7 +105,7 @@ def loadTestJob = { scope, triggeringContext, mode ->
"${DOCKER_CONTAINER_REGISTRY}/beam_go_sdk:latest"
],
initialParallelism,
"${DOCKER_CONTAINER_REGISTRY}/beam_flink1.10_job_server:latest")
"${DOCKER_CONTAINER_REGISTRY}/beam_flink1.12_job_server:latest")

// Execute all scenarios connected with initial parallelism.
loadTestsBuilder.loadTests(scope, CommonTestProperties.SDK.GO, initialScenarios, 'combine', mode)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -132,7 +132,7 @@ def loadTestJob = { scope, triggeringContext, mode ->
"${DOCKER_CONTAINER_REGISTRY}/${DOCKER_BEAM_SDK_IMAGE}"
],
initialParallelism,
"${DOCKER_CONTAINER_REGISTRY}/beam_flink1.10_job_server:latest")
"${DOCKER_CONTAINER_REGISTRY}/beam_flink1.12_job_server:latest")

// Execute all scenarios connected with initial parallelism.
loadTestsBuilder.loadTests(scope, CommonTestProperties.SDK.PYTHON, initialScenarios, 'Combine', mode)
Expand Down
2 changes: 1 addition & 1 deletion .test-infra/jenkins/job_LoadTests_GBK_Flink_Go.groovy
Original file line number Diff line number Diff line change
Expand Up @@ -198,7 +198,7 @@ def loadTestJob = { scope, triggeringContext, mode ->
"${DOCKER_CONTAINER_REGISTRY}/beam_go_sdk:latest"
],
initialParallelism,
"${DOCKER_CONTAINER_REGISTRY}/beam_flink1.10_job_server:latest")
"${DOCKER_CONTAINER_REGISTRY}/beam_flink1.12_job_server:latest")

// Execute all scenarios connected with initial parallelism.
loadTestsBuilder.loadTests(scope, CommonTestProperties.SDK.GO, initialScenarios, 'group_by_key', mode)
Expand Down
2 changes: 1 addition & 1 deletion .test-infra/jenkins/job_LoadTests_GBK_Flink_Python.groovy
Original file line number Diff line number Diff line change
Expand Up @@ -146,7 +146,7 @@ def loadTest = { scope, triggeringContext ->
"${DOCKER_CONTAINER_REGISTRY}/${DOCKER_BEAM_SDK_IMAGE}"
],
numberOfWorkers,
"${DOCKER_CONTAINER_REGISTRY}/beam_flink1.10_job_server:latest")
"${DOCKER_CONTAINER_REGISTRY}/beam_flink1.12_job_server:latest")

def configurations = testScenarios.findAll { it.pipelineOptions?.parallelism?.value == numberOfWorkers }
loadTestsBuilder.loadTests(scope, sdk, configurations, "GBK", "batch")
Expand Down
2 changes: 1 addition & 1 deletion .test-infra/jenkins/job_LoadTests_ParDo_Flink_Go.groovy
Original file line number Diff line number Diff line change
Expand Up @@ -126,7 +126,7 @@ def loadTestJob = { scope, triggeringContext, mode ->
"${DOCKER_CONTAINER_REGISTRY}/beam_go_sdk:latest"
],
numberOfWorkers,
"${DOCKER_CONTAINER_REGISTRY}/beam_flink1.10_job_server:latest")
"${DOCKER_CONTAINER_REGISTRY}/beam_flink1.12_job_server:latest")

loadTestsBuilder.loadTests(scope, CommonTestProperties.SDK.GO, batchScenarios(), 'ParDo', mode)
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -320,7 +320,7 @@ def loadTestJob = { scope, triggeringContext, mode ->
"${DOCKER_CONTAINER_REGISTRY}/${DOCKER_BEAM_SDK_IMAGE}"
],
numberOfWorkers,
"${DOCKER_CONTAINER_REGISTRY}/beam_flink1.10_job_server:latest")
"${DOCKER_CONTAINER_REGISTRY}/beam_flink1.12_job_server:latest")

loadTestsBuilder.loadTests(scope, CommonTestProperties.SDK.PYTHON, testScenarios, 'ParDo', mode)
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@ def loadTestJob = { scope, triggeringContext, mode ->
"${DOCKER_CONTAINER_REGISTRY}/beam_go_sdk:latest"
],
numberOfWorkers,
"${DOCKER_CONTAINER_REGISTRY}/beam_flink1.10_job_server:latest")
"${DOCKER_CONTAINER_REGISTRY}/beam_flink1.12_job_server:latest")

loadTestsBuilder.loadTests(scope, CommonTestProperties.SDK.GO,
batchScenarios(), 'SideInput', mode)
Expand Down
2 changes: 1 addition & 1 deletion .test-infra/jenkins/job_LoadTests_coGBK_Flink_Go.groovy
Original file line number Diff line number Diff line change
Expand Up @@ -158,7 +158,7 @@ def loadTestJob = { scope, triggeringContext, mode ->
"${DOCKER_CONTAINER_REGISTRY}/beam_go_sdk:latest"
],
numberOfWorkers,
"${DOCKER_CONTAINER_REGISTRY}/beam_flink1.10_job_server:latest")
"${DOCKER_CONTAINER_REGISTRY}/beam_flink1.12_job_server:latest")

loadTestsBuilder.loadTests(scope, CommonTestProperties.SDK.GO, batchScenarios(), 'CoGBK', mode)
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -137,7 +137,7 @@ def loadTest = { scope, triggeringContext ->
"${DOCKER_CONTAINER_REGISTRY}/${DOCKER_BEAM_SDK_IMAGE}"
],
numberOfWorkers,
"${DOCKER_CONTAINER_REGISTRY}/beam_flink1.10_job_server:latest")
"${DOCKER_CONTAINER_REGISTRY}/beam_flink1.12_job_server:latest")

loadTestsBuilder.loadTests(scope, CommonTestProperties.SDK.PYTHON, testScenarios, 'CoGBK', 'batch')
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ def chicagoTaxiJob = { scope ->
"${DOCKER_CONTAINER_REGISTRY}/${beamSdkDockerImage}"
],
numberOfWorkers,
"${DOCKER_CONTAINER_REGISTRY}/beam_flink1.10_job_server:latest")
"${DOCKER_CONTAINER_REGISTRY}/beam_flink1.12_job_server:latest")

def pipelineOptions = [
parallelism : numberOfWorkers,
Expand Down
1 change: 1 addition & 0 deletions CHANGES.md
Original file line number Diff line number Diff line change
Expand Up @@ -75,6 +75,7 @@
* Kafka Beam SQL tables now ascribe meaning to the LOCATION field; previously
it was ignored if provided.
* `TopCombineFn` disallow `compare` as its argument (Python) ([BEAM-7372](https://issues.apache.org/jira/browse/BEAM-7372)).
* Drop support for Flink 1.10 ([BEAM-12281](https://issues.apache.org/jira/browse/BEAM-12281)).

## Deprecations

Expand Down
2 changes: 1 addition & 1 deletion gradle.properties
Original file line number Diff line number Diff line change
Expand Up @@ -32,5 +32,5 @@ javaVersion=1.8
docker_image_default_repo_root=apache
docker_image_default_repo_prefix=beam_

flink_versions=1.10,1.11,1.12,1.13
flink_versions=1.11,1.12,1.13

2 changes: 1 addition & 1 deletion release/build.gradle.kts
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ task("runJavaExamplesValidationTask") {
dependsOn(":runners:direct-java:runQuickstartJavaDirect")
dependsOn(":runners:google-cloud-dataflow-java:runQuickstartJavaDataflow")
dependsOn(":runners:spark:2:runQuickstartJavaSpark")
dependsOn(":runners:flink:1.10:runQuickstartJavaFlinkLocal")
dependsOn(":runners:flink:1.13:runQuickstartJavaFlinkLocal")
dependsOn(":runners:direct-java:runMobileGamingJavaDirect")
dependsOn(":runners:google-cloud-dataflow-java:runMobileGamingJavaDataflow")
dependsOn(":runners:twister2:runQuickstartJavaTwister2")
Expand Down
2 changes: 1 addition & 1 deletion release/src/main/scripts/run_rc_validation.sh
Original file line number Diff line number Diff line change
Expand Up @@ -199,7 +199,7 @@ if [[ "$java_quickstart_flink_local" = true ]]; then
echo "*************************************************************"
echo "* Running Java Quickstart with Flink local runner"
echo "*************************************************************"
./gradlew :runners:flink:1.10:runQuickstartJavaFlinkLocal \
./gradlew :runners:flink:1.13:runQuickstartJavaFlinkLocal \
-Prepourl=${REPO_URL} \
-Pver=${RELEASE_VER}
else
Expand Down
33 changes: 0 additions & 33 deletions runners/flink/1.10/build.gradle

This file was deleted.

26 changes: 0 additions & 26 deletions runners/flink/1.10/job-server-container/build.gradle

This file was deleted.

31 changes: 0 additions & 31 deletions runners/flink/1.10/job-server/build.gradle

This file was deleted.

This file was deleted.

This file was deleted.

This file was deleted.

Loading

0 comments on commit 511fb05

Please sign in to comment.