From b774ff5fe9f851ad6a8522afdc9942a9961e9cee Mon Sep 17 00:00:00 2001 From: Danny McCormick Date: Thu, 16 Jun 2022 16:35:56 -0400 Subject: [PATCH] Update references to jira to GH for the Runners (#21835) * Update references to jira to GH for the Runners * Spotless format * Switch to urls * Spotless apply * Spotless apply * Fix awkward formatting --- .../runners/core/construction/External.java | 3 +- .../core/construction/NativeTransforms.java | 2 +- .../construction/PTransformTranslation.java | 3 +- .../core/construction/ParDoTranslation.java | 3 +- .../core/construction/SplittableParDo.java | 6 ++- .../graph/ProjectionProducerVisitor.java | 2 +- .../graph/ProjectionPushdownOptimizer.java | 2 +- .../construction/PTransformMatchersTest.java | 4 +- .../ProjectionPushdownOptimizerTest.java | 4 +- .../beam/runners/core/WatermarkHold.java | 3 +- .../runners/core/metrics/HistogramCell.java | 3 +- .../core/metrics/MetricsContainerStepMap.java | 3 +- .../runners/core/SimpleDoFnRunnerTest.java | 4 +- runners/direct-java/build.gradle | 2 +- .../direct/BoundedReadEvaluatorFactory.java | 3 +- .../beam/runners/direct/DirectRunner.java | 3 +- .../ExecutorServiceParallelExecutor.java | 3 +- .../direct/TransformExecutorServices.java | 6 ++- .../StatefulParDoEvaluatorFactoryTest.java | 4 +- runners/flink/flink_runner.gradle | 6 +-- .../flink/job-server/flink_job_server.gradle | 8 +-- .../beam/runners/flink/FlinkRunner.java | 3 +- .../FlinkExecutableStageFunction.java | 3 +- .../ExecutableStageDoFnOperator.java | 6 +-- .../flink/FlinkRequiresStableInputTest.java | 2 +- .../runners/flink/FlinkSavepointTest.java | 4 +- .../FlinkStreamingPipelineTranslatorTest.java | 3 +- .../flink/PortableStateExecutionTest.java | 3 +- .../flink/PortableTimersExecutionTest.java | 3 +- .../wrappers/streaming/DoFnOperatorTest.java | 4 +- .../google-cloud-dataflow-java/build.gradle | 36 ++++++------- .../BatchStatefulParDoOverridesTest.java | 3 +- .../runners/dataflow/DataflowRunnerTest.java | 3 +- .../worker/build.gradle | 4 +- .../worker/legacy-worker/build.gradle | 6 +-- .../worker/BatchModeExecutionContext.java | 6 ++- .../worker/StreamingDataflowWorker.java | 3 +- .../CreateExecutableStageNodeFunction.java | 6 ++- .../worker/IsmSideInputReaderTest.java | 2 +- .../worker/StreamingDataflowWorkerTest.java | 3 +- .../worker/UserParDoFnFactoryTest.java | 3 +- .../fnexecution/control/SdkHarnessClient.java | 5 +- .../environment/DockerEnvironmentFactory.java | 2 +- .../StaticGrpcProvisionService.java | 2 +- .../control/ProcessBundleDescriptorsTest.java | 3 +- .../control/RemoteExecutionTest.java | 4 +- runners/samza/build.gradle | 24 ++++----- runners/samza/job-server/build.gradle | 54 +++++++++---------- .../beam/runners/samza/SamzaRunner.java | 4 +- .../samza/metrics/SamzaMetricsContainer.java | 2 +- .../translation/ReshuffleTranslator.java | 2 +- .../SamzaPortablePipelineTranslator.java | 2 +- .../translation/SamzaTransformOverrides.java | 3 +- .../runtime/SamzaStoreStateInternalsTest.java | 4 +- .../translation/ConfigGeneratorTest.java | 3 +- .../spark/job-server/spark_job_server.gradle | 16 +++--- runners/spark/spark_runner.gradle | 2 +- .../beam/runners/spark/SparkRunner.java | 3 +- .../runners/spark/SparkRunnerDebugger.java | 3 +- .../spark/SparkTransformOverrides.java | 3 +- .../SparkStructuredStreamingRunner.java | 3 +- .../translation/SparkTransformOverrides.java | 3 +- ...rkStreamingPortablePipelineTranslator.java | 5 +- .../StreamingTransformTranslator.java | 3 +- .../ResumeFromCheckpointStreamingTest.java | 2 +- .../beam/runners/twister2/Twister2Runner.java | 6 ++- 66 files changed, 200 insertions(+), 146 deletions(-) diff --git a/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/External.java b/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/External.java index bf7b8d408eed..0a10ab20a2b9 100644 --- a/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/External.java +++ b/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/External.java @@ -243,7 +243,8 @@ public OutputT expand(InputT input) { PValues.expandInput(PBegin.in(p)), ImmutableMap.of(entry.getKey(), (PCollection) entry.getValue()), Impulse.create(), - // TODO(BEAM-12082): Add proper support for Resource Hints with XLang. + // TODO(https://github.com/apache/beam/issues/18371): Add proper support for + // Resource Hints with XLang. ResourceHints.create(), p); // using fake Impulses to provide inputs diff --git a/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/NativeTransforms.java b/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/NativeTransforms.java index 9ea47a10835b..19127acfca39 100644 --- a/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/NativeTransforms.java +++ b/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/NativeTransforms.java @@ -46,7 +46,7 @@ public class NativeTransforms { * Returns true if an only if the Runner understands this transform and can handle it directly. */ public static boolean isNative(RunnerApi.PTransform pTransform) { - // TODO(BEAM-10109) Use default (context) classloader. + // TODO(https://github.com/apache/beam/issues/20192) Use default (context) classloader. Iterator matchers = ServiceLoader.load(IsNativeTransform.class, NativeTransforms.class.getClassLoader()) .iterator(); diff --git a/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/PTransformTranslation.java b/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/PTransformTranslation.java index 9010b7203c85..e701ae60bb57 100644 --- a/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/PTransformTranslation.java +++ b/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/PTransformTranslation.java @@ -442,7 +442,8 @@ public RunnerApi.PTransform translate( // Required runner implemented transforms should not have an environment id. if (!RUNNER_IMPLEMENTED_TRANSFORMS.contains(spec.getUrn())) { - // TODO(BEAM-9309): Remove existing hacks around deprecated READ transform. + // TODO(https://github.com/apache/beam/issues/20094): Remove existing hacks around + // deprecated READ transform. if (spec.getUrn().equals(READ_TRANSFORM_URN)) { // Only assigning environment to Bounded reads. Not assigning an environment to // Unbounded diff --git a/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/ParDoTranslation.java b/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/ParDoTranslation.java index 20472fa1eff3..78d54426da3d 100644 --- a/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/ParDoTranslation.java +++ b/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/ParDoTranslation.java @@ -599,7 +599,8 @@ public RunnerApi.StateSpec dispatchOrderedList(Coder elementCoder) { .setOrderedListSpec( RunnerApi.OrderedListStateSpec.newBuilder() .setElementCoderId(registerCoderOrThrow(components, elementCoder))) - // TODO(BEAM-10650): Update with correct protocol once the protocol is defined and + // TODO(https://github.com/apache/beam/issues/20486): Update with correct protocol + // once the protocol is defined and // the SDK harness uses it. .build(); } diff --git a/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/SplittableParDo.java b/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/SplittableParDo.java index 1cdee8d1cd87..a29421e69068 100644 --- a/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/SplittableParDo.java +++ b/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/SplittableParDo.java @@ -691,7 +691,8 @@ public void tearDown() { * PrimitiveBoundedRead} and {@link PrimitiveUnboundedRead} if either the experiment {@code * use_deprecated_read} or {@code beam_fn_api_use_deprecated_read} are specified. * - *

TODO(BEAM-10670): Remove the primitive Read and make the splittable DoFn the only option. + *

TODO(https://github.com/apache/beam/issues/20530): Remove the primitive Read and make the + * splittable DoFn the only option. */ public static void convertReadBasedSplittableDoFnsToPrimitiveReadsIfNecessary(Pipeline pipeline) { if (!ExperimentalOptions.hasExperiment(pipeline.getOptions(), "use_sdf_read") @@ -706,7 +707,8 @@ public static void convertReadBasedSplittableDoFnsToPrimitiveReadsIfNecessary(Pi * Converts {@link Read} based Splittable DoFn expansions to primitive reads implemented by {@link * PrimitiveBoundedRead} and {@link PrimitiveUnboundedRead}. * - *

TODO(BEAM-10670): Remove the primitive Read and make the splittable DoFn the only option. + *

TODO(https://github.com/apache/beam/issues/20530): Remove the primitive Read and make the + * splittable DoFn the only option. */ public static void convertReadBasedSplittableDoFnsToPrimitiveReads(Pipeline pipeline) { pipeline.replaceAll( diff --git a/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/graph/ProjectionProducerVisitor.java b/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/graph/ProjectionProducerVisitor.java index da3f66ef43fb..4e0fa3fb734e 100644 --- a/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/graph/ProjectionProducerVisitor.java +++ b/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/graph/ProjectionProducerVisitor.java @@ -57,7 +57,7 @@ class ProjectionProducerVisitor extends PipelineVisitor.Defaults { public CompositeBehavior enterCompositeTransform(Node node) { PTransform transform = node.getTransform(); - // TODO(BEAM-13658) Support inputs other than PBegin. + // TODO(https://github.com/apache/beam/issues/21359) Support inputs other than PBegin. if (!node.getInputs().isEmpty()) { return CompositeBehavior.DO_NOT_ENTER_TRANSFORM; } diff --git a/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/graph/ProjectionPushdownOptimizer.java b/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/graph/ProjectionPushdownOptimizer.java index b7d0470b6dce..a3c6bb20cc25 100644 --- a/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/graph/ProjectionPushdownOptimizer.java +++ b/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/graph/ProjectionPushdownOptimizer.java @@ -95,7 +95,7 @@ public static void optimize(Pipeline pipeline) { } } - // TODO(BEAM-13658) Support inputs other than PBegin. + // TODO(https://github.com/apache/beam/issues/21359) Support inputs other than PBegin. private static class PushdownOverrideFactory< OutputT extends POutput, TransformT extends PTransform> implements PTransformOverrideFactory { diff --git a/runners/core-construction-java/src/test/java/org/apache/beam/runners/core/construction/PTransformMatchersTest.java b/runners/core-construction-java/src/test/java/org/apache/beam/runners/core/construction/PTransformMatchersTest.java index 094c9c8c68b5..2acc8c81db00 100644 --- a/runners/core-construction-java/src/test/java/org/apache/beam/runners/core/construction/PTransformMatchersTest.java +++ b/runners/core-construction-java/src/test/java/org/apache/beam/runners/core/construction/PTransformMatchersTest.java @@ -83,7 +83,9 @@ @RunWith(JUnit4.class) @SuppressWarnings({ "rawtypes", // TODO(https://github.com/apache/beam/issues/20447) - "unused" // TODO(BEAM-13271): Remove when new version of errorprone is released (2.11.0) + // TODO(https://github.com/apache/beam/issues/21230): Remove when new version of + // errorprone is released (2.11.0) + "unused" }) public class PTransformMatchersTest implements Serializable { @Rule diff --git a/runners/core-construction-java/src/test/java/org/apache/beam/runners/core/construction/graph/ProjectionPushdownOptimizerTest.java b/runners/core-construction-java/src/test/java/org/apache/beam/runners/core/construction/graph/ProjectionPushdownOptimizerTest.java index 03d49221e8c0..81843d682564 100644 --- a/runners/core-construction-java/src/test/java/org/apache/beam/runners/core/construction/graph/ProjectionPushdownOptimizerTest.java +++ b/runners/core-construction-java/src/test/java/org/apache/beam/runners/core/construction/graph/ProjectionPushdownOptimizerTest.java @@ -104,8 +104,8 @@ public void testIntermediateProducer() { FieldAccessDescriptor.withFieldNames("foo", "bar"); p.apply(source).apply(originalT).apply(new FieldAccessTransform(downstreamFieldAccess)); - // TODO(BEAM-13658) Support pushdown on intermediate transforms. - // For now, test that the pushdown optimizer ignores immediate transforms. + // TODO(https://github.com/apache/beam/issues/21359) Support pushdown on intermediate + // transforms. For now, test that the pushdown optimizer ignores immediate transforms. ProjectionPushdownOptimizer.optimize(p); Assert.assertTrue(pipelineHasTransform(p, originalT)); } diff --git a/runners/core-java/src/main/java/org/apache/beam/runners/core/WatermarkHold.java b/runners/core-java/src/main/java/org/apache/beam/runners/core/WatermarkHold.java index baef5b5756c8..60a30039914a 100644 --- a/runners/core-java/src/main/java/org/apache/beam/runners/core/WatermarkHold.java +++ b/runners/core-java/src/main/java/org/apache/beam/runners/core/WatermarkHold.java @@ -73,7 +73,8 @@ StateTag watermarkHoldTagForTimestampCombiner( StateTags.makeSystemTagInternal( StateTags.watermarkStateInternal("extra", TimestampCombiner.EARLIEST)); - // [BEAM-420] Seems likely these should all be transient or this class should not be Serializable + // [https://github.com/apache/beam/issues/18014] Seems likely these should all be transient or + // this class should not be Serializable @SuppressFBWarnings("SE_BAD_FIELD") private final TimerInternals timerInternals; diff --git a/runners/core-java/src/main/java/org/apache/beam/runners/core/metrics/HistogramCell.java b/runners/core-java/src/main/java/org/apache/beam/runners/core/metrics/HistogramCell.java index 4f8894e93669..2a594401754c 100644 --- a/runners/core-java/src/main/java/org/apache/beam/runners/core/metrics/HistogramCell.java +++ b/runners/core-java/src/main/java/org/apache/beam/runners/core/metrics/HistogramCell.java @@ -70,7 +70,8 @@ public void update(HistogramCell other) { dirty.afterModification(); } - // TODO(BEAM-12103): Update this function to allow incrementing the infinite buckets as well. + // TODO(https://github.com/apache/beam/issues/20853): Update this function to allow incrementing + // the infinite buckets as well. // and remove the incTopBucketCount and incBotBucketCount methods. // Using 0 and length -1 as the bucketIndex. public void incBucketCount(int bucketIndex, long count) { diff --git a/runners/core-java/src/main/java/org/apache/beam/runners/core/metrics/MetricsContainerStepMap.java b/runners/core-java/src/main/java/org/apache/beam/runners/core/metrics/MetricsContainerStepMap.java index f3ec4d498fd3..02296b5b0da6 100644 --- a/runners/core-java/src/main/java/org/apache/beam/runners/core/metrics/MetricsContainerStepMap.java +++ b/runners/core-java/src/main/java/org/apache/beam/runners/core/metrics/MetricsContainerStepMap.java @@ -63,7 +63,8 @@ public MetricsContainerImpl getUnboundContainer() { /** Returns the container for the given step name. */ public MetricsContainerImpl getContainer(String stepName) { if (stepName == null) { - // TODO(BEAM-6538): Disallow this in the future, some tests rely on an empty step name today. + // TODO(https://github.com/apache/beam/issues/19275): Disallow this in the future, some tests + // rely on an empty step name today. return getUnboundContainer(); } return metricsContainers.computeIfAbsent( diff --git a/runners/core-java/src/test/java/org/apache/beam/runners/core/SimpleDoFnRunnerTest.java b/runners/core-java/src/test/java/org/apache/beam/runners/core/SimpleDoFnRunnerTest.java index 5d065f417fb6..791f2d17dd11 100644 --- a/runners/core-java/src/test/java/org/apache/beam/runners/core/SimpleDoFnRunnerTest.java +++ b/runners/core-java/src/test/java/org/apache/beam/runners/core/SimpleDoFnRunnerTest.java @@ -68,7 +68,9 @@ @RunWith(JUnit4.class) @SuppressWarnings({ "rawtypes", // TODO(https://github.com/apache/beam/issues/20447) - "unused" // TODO(BEAM-13271): Remove when new version of errorprone is released (2.11.0) + // TODO(https://github.com/apache/beam/issues/21230): Remove when new version of + // errorprone is released (2.11.0) + "unused" }) public class SimpleDoFnRunnerTest { @Rule public ExpectedException thrown = ExpectedException.none(); diff --git a/runners/direct-java/build.gradle b/runners/direct-java/build.gradle index cd23ec86fbcb..86e30d4b9c2a 100644 --- a/runners/direct-java/build.gradle +++ b/runners/direct-java/build.gradle @@ -249,7 +249,7 @@ task examplesIntegrationTest(type: Test) { testClassesDirs = files(project(":examples:java").sourceSets.test.output.classesDirs) useJUnit { filter{ - // TODO (BEAM-14019) Fix integration Tests to run with DirectRunner: Timeout error + // TODO (https://github.com/apache/beam/issues/21344) Fix integration Tests to run with DirectRunner: Timeout error excludeTestsMatching 'org.apache.beam.examples.complete.TfIdfIT' excludeTestsMatching 'org.apache.beam.examples.WindowedWordCountIT.testWindowedWordCountInBatchDynamicSharding' } diff --git a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/BoundedReadEvaluatorFactory.java b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/BoundedReadEvaluatorFactory.java index 682e8193ec9a..8f8d73d237bd 100644 --- a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/BoundedReadEvaluatorFactory.java +++ b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/BoundedReadEvaluatorFactory.java @@ -65,7 +65,8 @@ final class BoundedReadEvaluatorFactory implements TransformEvaluatorFactory { private final EvaluationContext evaluationContext; private final PipelineOptions options; - // TODO: (BEAM-723) Create a shared ExecutorService for maintenance tasks in the DirectRunner. + // TODO: (https://github.com/apache/beam/issues/18079) Create a shared ExecutorService for + // maintenance tasks in the DirectRunner. @VisibleForTesting final ExecutorService executor = Executors.newCachedThreadPool( diff --git a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/DirectRunner.java b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/DirectRunner.java index 3afe7b991134..34ce76ed7c9f 100644 --- a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/DirectRunner.java +++ b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/DirectRunner.java @@ -253,7 +253,8 @@ void performRewrites(Pipeline pipeline) { // The last set of overrides includes GBK overrides used in WriteView pipeline.replaceAll(groupByKeyOverrides()); - // TODO(BEAM-10670): Use SDF read as default when we address performance issue. + // TODO(https://github.com/apache/beam/issues/20530): Use SDF read as default when we address + // performance issue. SplittableParDo.convertReadBasedSplittableDoFnsToPrimitiveReadsIfNecessary(pipeline); } diff --git a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ExecutorServiceParallelExecutor.java b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ExecutorServiceParallelExecutor.java index 0840b1c51e48..3441c037966d 100644 --- a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ExecutorServiceParallelExecutor.java +++ b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ExecutorServiceParallelExecutor.java @@ -147,7 +147,8 @@ private RemovalListener shutdownExecutorSe } @Override - // TODO: [BEAM-4563] Pass Future back to consumer to check for async errors + // TODO: [https://github.com/apache/beam/issues/18968] Pass Future back to consumer to check for + // async errors @SuppressWarnings("FutureReturnValueIgnored") public void start(DirectGraph graph, RootProviderRegistry rootProviderRegistry) { int numTargetSplits = Math.max(3, targetParallelism); diff --git a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/TransformExecutorServices.java b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/TransformExecutorServices.java index abf54df5d598..7ec618f451e6 100644 --- a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/TransformExecutorServices.java +++ b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/TransformExecutorServices.java @@ -70,7 +70,8 @@ private ParallelTransformExecutor(ExecutorService executor) { } @Override - // TODO: [BEAM-4563] Pass Future back to consumer to check for async errors + // TODO: [https://github.com/apache/beam/issues/18968] Pass Future back to consumer to check for + // async errors @SuppressWarnings("FutureReturnValueIgnored") public void schedule(TransformExecutor work) { if (active.get()) { @@ -154,7 +155,8 @@ public void shutdown() { workQueue.clear(); } - // TODO: [BEAM-4563] Pass Future back to consumer to check for async errors + // TODO: [https://github.com/apache/beam/issues/18968] Pass Future back to consumer to check for + // async errors @SuppressWarnings("FutureReturnValueIgnored") private void updateCurrentlyEvaluating() { if (currentlyEvaluating.get() == null) { diff --git a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/StatefulParDoEvaluatorFactoryTest.java b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/StatefulParDoEvaluatorFactoryTest.java index 0d24db06c625..41a602495ba4 100644 --- a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/StatefulParDoEvaluatorFactoryTest.java +++ b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/StatefulParDoEvaluatorFactoryTest.java @@ -88,7 +88,9 @@ @RunWith(JUnit4.class) @SuppressWarnings({ "rawtypes", // TODO(https://github.com/apache/beam/issues/20447) - "unused" // TODO(BEAM-13271): Remove when new version of errorprone is released (2.11.0) + // TODO(https://github.com/apache/beam/issues/21230): Remove when new version of + // errorprone is released (2.11.0) + "unused" }) public class StatefulParDoEvaluatorFactoryTest implements Serializable { @Mock private transient EvaluationContext mockEvaluationContext; diff --git a/runners/flink/flink_runner.gradle b/runners/flink/flink_runner.gradle index 7d44c44d4160..fe164f0f148d 100644 --- a/runners/flink/flink_runner.gradle +++ b/runners/flink/flink_runner.gradle @@ -225,7 +225,7 @@ class ValidatesRunnerConfig { } def sickbayTests = [ - // TODO(BEAM-13573) + // TODO(https://github.com/apache/beam/issues/21306) 'org.apache.beam.sdk.transforms.ParDoTest$TimestampTests.testOnWindowTimestampSkew', ] @@ -358,9 +358,9 @@ tasks.register("examplesIntegrationTest", Test) { testClassesDirs = files(project(":examples:java").sourceSets.test.output.classesDirs) useJUnit { filter{ - // TODO (BEAM-14019) Fix integration Tests to run with FlinkRunner: Assertion error + // TODO (https://github.com/apache/beam/issues/21344) Fix integration Tests to run with FlinkRunner: Assertion error excludeTestsMatching 'org.apache.beam.examples.WindowedWordCountIT.testWindowedWordCountInBatchDynamicSharding' - // TODO (BEAM-14019) Fix integration Tests to run with FlinkRunner: Error deleting table, Not found: Dataset + // TODO (https://github.com/apache/beam/issues/21344) Fix integration Tests to run with FlinkRunner: Error deleting table, Not found: Dataset excludeTestsMatching 'org.apache.beam.examples.cookbook.BigQueryTornadoesIT.testE2eBigQueryTornadoesWithStorageApiUsingQuery' } } diff --git a/runners/flink/job-server/flink_job_server.gradle b/runners/flink/job-server/flink_job_server.gradle index 58fc7682f607..b77e086a5325 100644 --- a/runners/flink/job-server/flink_job_server.gradle +++ b/runners/flink/job-server/flink_job_server.gradle @@ -193,17 +193,17 @@ def portableValidatesRunnerTask(String name, boolean streaming, boolean checkpoi excludeCategories 'org.apache.beam.sdk.testing.UsesPerKeyOrderInBundle' }, testFilter: { - // TODO(BEAM-10016) + // TODO(https://github.com/apache/beam/issues/20269) excludeTestsMatching 'org.apache.beam.sdk.transforms.FlattenTest.testFlattenWithDifferentInputAndOutputCoders2' - // TODO(BEAM-12039) + // TODO(https://github.com/apache/beam/issues/20843) excludeTestsMatching 'org.apache.beam.sdk.testing.TestStreamTest.testDiscardingMode' - // TODO(BEAM-12038) + // TODO(https://github.com/apache/beam/issues/20844) excludeTestsMatching 'org.apache.beam.sdk.testing.TestStreamTest.testLateDataAccumulating' // TODO(BEAM-12710) excludeTestsMatching 'org.apache.beam.sdk.testing.TestStreamTest.testFirstElementLate' // TODO(BEAM-13498) excludeTestsMatching 'org.apache.beam.sdk.transforms.ParDoTest$TimestampTests.testProcessElementSkew' - // TODO(BEAM-13952) + // TODO(https://github.com/apache/beam/issues/21472) excludeTestsMatching 'org.apache.beam.sdk.transforms.GroupByKeyTest$BasicTests.testAfterProcessingTimeContinuationTriggerUsingState' }, ) diff --git a/runners/flink/src/main/java/org/apache/beam/runners/flink/FlinkRunner.java b/runners/flink/src/main/java/org/apache/beam/runners/flink/FlinkRunner.java index 25668a6f99b2..02bd2b3b93db 100644 --- a/runners/flink/src/main/java/org/apache/beam/runners/flink/FlinkRunner.java +++ b/runners/flink/src/main/java/org/apache/beam/runners/flink/FlinkRunner.java @@ -77,7 +77,8 @@ protected FlinkRunner(FlinkPipelineOptions options) { @Override public PipelineResult run(Pipeline pipeline) { // Portable flink only support SDF as read. - // TODO(BEAM-10670): Use SDF read as default when we address performance issue. + // TODO(https://github.com/apache/beam/issues/20530): Use SDF read as default when we address + // performance issue. if (!ExperimentalOptions.hasExperiment(pipeline.getOptions(), "beam_fn_api")) { SplittableParDo.convertReadBasedSplittableDoFnsToPrimitiveReadsIfNecessary(pipeline); } diff --git a/runners/flink/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkExecutableStageFunction.java b/runners/flink/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkExecutableStageFunction.java index 87e092e25ce6..afc5ae106d92 100644 --- a/runners/flink/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkExecutableStageFunction.java +++ b/runners/flink/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkExecutableStageFunction.java @@ -173,7 +173,8 @@ public void onCompleted(ProcessBundleResponse response) { metricContainer.updateMetrics(stepName, response.getMonitoringInfosList()); } }; - // TODO(BEAM-11021): Support bundle finalization in portable batch. + // TODO(https://github.com/apache/beam/issues/19526): Support bundle finalization in portable + // batch. finalizationHandler = bundleId -> { throw new UnsupportedOperationException( diff --git a/runners/flink/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/ExecutableStageDoFnOperator.java b/runners/flink/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/ExecutableStageDoFnOperator.java index ee973f9621e2..485d8b3c055d 100644 --- a/runners/flink/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/ExecutableStageDoFnOperator.java +++ b/runners/flink/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/ExecutableStageDoFnOperator.java @@ -708,9 +708,9 @@ public void flushData() throws Exception { // Manually drain processing time timers since Flink will ignore pending // processing-time timers when upstream operators have shut down and will also // shut down this operator with pending processing-time timers. - // TODO(BEAM-11210, FLINK-18647): It doesn't work efficiently when the watermark of upstream - // advances - // to MAX_TIMESTAMP immediately. + // TODO(https://github.com/apache/beam/issues/20600, FLINK-18647): It doesn't work + // efficiently when the watermark of upstream advances to MAX_TIMESTAMP + // immediately. if (numProcessingTimeTimers() > 0) { timerInternals.processPendingProcessingTimeTimers(); } diff --git a/runners/flink/src/test/java/org/apache/beam/runners/flink/FlinkRequiresStableInputTest.java b/runners/flink/src/test/java/org/apache/beam/runners/flink/FlinkRequiresStableInputTest.java index b8f384502e10..5a71819f9aa8 100644 --- a/runners/flink/src/test/java/org/apache/beam/runners/flink/FlinkRequiresStableInputTest.java +++ b/runners/flink/src/test/java/org/apache/beam/runners/flink/FlinkRequiresStableInputTest.java @@ -121,7 +121,7 @@ public static void afterClass() throws Exception { * restore the savepoint to check if we produce impotent results. */ @Test(timeout = 30_000) - @Ignore("BEAM-13575") + @Ignore("https://github.com/apache/beam/issues/21333") public void testParDoRequiresStableInput() throws Exception { FlinkPipelineOptions options = FlinkPipelineOptions.defaults(); options.setParallelism(1); diff --git a/runners/flink/src/test/java/org/apache/beam/runners/flink/FlinkSavepointTest.java b/runners/flink/src/test/java/org/apache/beam/runners/flink/FlinkSavepointTest.java index 410fa8e9d269..32fd7f8a1060 100644 --- a/runners/flink/src/test/java/org/apache/beam/runners/flink/FlinkSavepointTest.java +++ b/runners/flink/src/test/java/org/apache/beam/runners/flink/FlinkSavepointTest.java @@ -84,7 +84,9 @@ */ @SuppressWarnings({ "rawtypes", // TODO(https://github.com/apache/beam/issues/20447) - "unused" // TODO(BEAM-13271): Remove when new version of errorprone is released (2.11.0) + // TODO(https://github.com/apache/beam/issues/21230): Remove when new version of + // errorprone is released (2.11.0) + "unused" }) public class FlinkSavepointTest implements Serializable { diff --git a/runners/flink/src/test/java/org/apache/beam/runners/flink/FlinkStreamingPipelineTranslatorTest.java b/runners/flink/src/test/java/org/apache/beam/runners/flink/FlinkStreamingPipelineTranslatorTest.java index b340b6e06c75..849f8be952cb 100644 --- a/runners/flink/src/test/java/org/apache/beam/runners/flink/FlinkStreamingPipelineTranslatorTest.java +++ b/runners/flink/src/test/java/org/apache/beam/runners/flink/FlinkStreamingPipelineTranslatorTest.java @@ -55,7 +55,8 @@ import org.junit.Test; /** Tests if overrides are properly applied. */ -// TODO(BEAM-13271): Remove when new version of errorprone is released (2.11.0) +// TODO(https://github.com/apache/beam/issues/21230): Remove when new version of errorprone is +// released (2.11.0) @SuppressWarnings("unused") public class FlinkStreamingPipelineTranslatorTest { diff --git a/runners/flink/src/test/java/org/apache/beam/runners/flink/PortableStateExecutionTest.java b/runners/flink/src/test/java/org/apache/beam/runners/flink/PortableStateExecutionTest.java index d87de4e5451c..846889385535 100644 --- a/runners/flink/src/test/java/org/apache/beam/runners/flink/PortableStateExecutionTest.java +++ b/runners/flink/src/test/java/org/apache/beam/runners/flink/PortableStateExecutionTest.java @@ -58,7 +58,8 @@ * org.apache.beam.runners.flink.translation.wrappers.streaming.ExecutableStageDoFnOperator}. */ @RunWith(Parameterized.class) -// TODO(BEAM-13271): Remove when new version of errorprone is released (2.11.0) +// TODO(https://github.com/apache/beam/issues/21230): Remove when new version of errorprone is +// released (2.11.0) @SuppressWarnings("unused") public class PortableStateExecutionTest implements Serializable { diff --git a/runners/flink/src/test/java/org/apache/beam/runners/flink/PortableTimersExecutionTest.java b/runners/flink/src/test/java/org/apache/beam/runners/flink/PortableTimersExecutionTest.java index f84763b79d39..828e23a96ee2 100644 --- a/runners/flink/src/test/java/org/apache/beam/runners/flink/PortableTimersExecutionTest.java +++ b/runners/flink/src/test/java/org/apache/beam/runners/flink/PortableTimersExecutionTest.java @@ -71,7 +71,8 @@ * of a given timer is run. */ @RunWith(Parameterized.class) -// TODO(BEAM-13271): Remove when new version of errorprone is released (2.11.0) +// TODO(https://github.com/apache/beam/issues/21230): Remove when new version of errorprone is +// released (2.11.0) @SuppressWarnings("unused") public class PortableTimersExecutionTest implements Serializable { diff --git a/runners/flink/src/test/java/org/apache/beam/runners/flink/translation/wrappers/streaming/DoFnOperatorTest.java b/runners/flink/src/test/java/org/apache/beam/runners/flink/translation/wrappers/streaming/DoFnOperatorTest.java index fad521e92789..5ddef5935b20 100644 --- a/runners/flink/src/test/java/org/apache/beam/runners/flink/translation/wrappers/streaming/DoFnOperatorTest.java +++ b/runners/flink/src/test/java/org/apache/beam/runners/flink/translation/wrappers/streaming/DoFnOperatorTest.java @@ -117,7 +117,9 @@ @RunWith(JUnit4.class) @SuppressWarnings({ "keyfor", - "unused" // TODO(BEAM-13271): Remove when new version of errorprone is released (2.11.0) + // TODO(https://github.com/apache/beam/issues/21230): Remove when new version of + // errorprone is released (2.11.0) + "unused" }) public class DoFnOperatorTest { diff --git a/runners/google-cloud-dataflow-java/build.gradle b/runners/google-cloud-dataflow-java/build.gradle index b2a2be9cdfcc..84925cbd2bf2 100644 --- a/runners/google-cloud-dataflow-java/build.gradle +++ b/runners/google-cloud-dataflow-java/build.gradle @@ -23,7 +23,7 @@ applyJavaNature( automaticModuleName: 'org.apache.beam.runners.dataflow', classesTriggerCheckerBugs: [ 'PrimitiveParDoSingleFactory': 'https://github.com/typetools/checker-framework/issues/3791', - // TODO(BEAM-12687): This currently crashes with checkerframework 3.10.0 + // TODO(https://github.com/apache/beam/issues/21068): This currently crashes with checkerframework 3.10.0 // when compiling :runners:google-cloud-dataflow-java:compileJava with: // message: class file for com.google.api.services.bigquery.model.TableRow not found // ; The Checker Framework crashed. Please report the crash. @@ -31,7 +31,7 @@ applyJavaNature( // Last visited tree at line 57 column 1: // @AutoService(CoderCloudObjectTranslatorRegistrar.class) // Exception: com.sun.tools.javac.code.Symbol$CompletionFailure: class file for com.google.api.services.bigquery.model.TableRow not found; com.sun.tools.javac.code.Symbol$CompletionFailure: class file for com.google.api.services.bigquery.model.TableRow not found - 'DefaultCoderCloudObjectTranslatorRegistrar': 'TODO(BEAM-12687): Report the crash if still occurring on newest version', + 'DefaultCoderCloudObjectTranslatorRegistrar': 'TODO(https://github.com/apache/beam/issues/21068): Report the crash if still occurring on newest version', ], ) @@ -161,7 +161,7 @@ def runnerV2PipelineOptions = [ "--region=${dataflowRegion}", "--tempRoot=${dataflowValidatesTempRoot}", "--sdkContainerImage=${dockerJavaImageContainer}:${dockerTag}", - // TODO(BEAM-11779) remove shuffle_mode=appliance with runner v2 once issue is resolved. + // TODO(https://github.com/apache/beam/issues/20806) remove shuffle_mode=appliance with runner v2 once issue is resolved. "--experiments=use_unified_worker,use_runner_v2,shuffle_mode=appliance", ] @@ -371,7 +371,7 @@ task validatesRunner { dependsOn(createLegacyWorkerValidatesRunnerTest( name: 'validatesRunnerLegacyWorkerTest', excludedTests: [ - // TODO(BEAM-13952) + // TODO(https://github.com/apache/beam/issues/21472) 'org.apache.beam.sdk.transforms.GroupByKeyTest$BasicTests.testAfterProcessingTimeContinuationTriggerUsingState', ] )) @@ -390,7 +390,7 @@ task validatesRunnerStreaming { 'org.apache.beam.sdk.testing.UsesSetState', ], excludedTests: [ - // TODO(BEAM-13952) + // TODO(https://github.com/apache/beam/issues/21472) 'org.apache.beam.sdk.transforms.GroupByKeyTest$BasicTests.testAfterProcessingTimeContinuationTriggerUsingState' ] )) @@ -418,7 +418,7 @@ createCrossLanguageValidatesRunnerTask( "--project=${dataflowProject}", "--region=${dataflowRegion}", "--sdk_harness_container_image_overrides=.*java.*,${dockerJavaImageContainer}:${dockerTag}", - // TODO(BEAM-11779) remove shuffle_mode=appliance with runner v2 once issue is resolved + // TODO(https://github.com/apache/beam/issues/20806) remove shuffle_mode=appliance with runner v2 once issue is resolved "--experiments=shuffle_mode=appliance", ], javaPipelineOptions: [ @@ -428,7 +428,7 @@ createCrossLanguageValidatesRunnerTask( "--tempRoot=${dataflowValidatesTempRoot}", "--sdkContainerImage=${dockerJavaImageContainer}:${dockerTag}", "--sdkHarnessContainerImageOverrides=.*python.*,${dockerPythonImageContainer}:${dockerTag}", - // TODO(BEAM-11779) remove shuffle_mode=appliance with runner v2 once issue is resolved. + // TODO(https://github.com/apache/beam/issues/20806) remove shuffle_mode=appliance with runner v2 once issue is resolved. "--experiments=shuffle_mode=appliance", ], pytestOptions: [ @@ -481,12 +481,12 @@ task validatesRunnerV2 { 'org.apache.beam.sdk.transforms.GroupByKeyTest$BasicTests.testCombiningAccumulatingProcessingTime', 'org.apache.beam.sdk.transforms.GroupByKeyTest$BasicTests.testLargeKeys100MB', 'org.apache.beam.sdk.transforms.GroupByKeyTest$BasicTests.testLargeKeys10MB', - // TODO(BEAM-12353): Identify whether it's bug or a feature gap. + // TODO(https://github.com/apache/beam/issues/20931): Identify whether it's bug or a feature gap. 'org.apache.beam.sdk.transforms.GroupByKeyTest$WindowTests.testRewindowWithTimestampCombiner', 'org.apache.beam.sdk.transforms.FlattenTest.testFlattenWithDifferentInputAndOutputCoders2', - // TODO(BEAM-13952) + // TODO(https://github.com/apache/beam/issues/21472) 'org.apache.beam.sdk.transforms.GroupByKeyTest$BasicTests.testAfterProcessingTimeContinuationTriggerUsingState', ] )) @@ -502,7 +502,7 @@ task validatesRunnerV2Streaming { 'org.apache.beam.sdk.testing.LargeKeys$Above10KB', 'org.apache.beam.sdk.testing.UsesBoundedSplittableParDo', 'org.apache.beam.sdk.testing.UsesCommittedMetrics', - 'org.apache.beam.sdk.testing.UsesStrictTimerOrdering' /* BEAM-8543 */, + 'org.apache.beam.sdk.testing.UsesStrictTimerOrdering' /* https://github.com/apache/beam/issues/19957 */, 'org.apache.beam.sdk.testing.UsesTestStream', 'org.apache.beam.sdk.testing.UsesOnWindowExpiration', ], @@ -518,7 +518,7 @@ task validatesRunnerV2Streaming { 'org.apache.beam.sdk.extensions.sql.BeamSqlDslAggregationTest.testTriggeredTumble', 'org.apache.beam.sdk.transforms.ReshuffleTest.testReshuffleWithTimestampsStreaming', - // TODO(BEAM-11858) reading a side input twice fails + // TODO(https://github.com/apache/beam/issues/20726) reading a side input twice fails 'org.apache.beam.sdk.transforms.ParDoTest$MultipleInputsAndOutputTests.testSameSideInputReadTwice', 'org.apache.beam.sdk.transforms.CombineFnsTest.testComposedCombineWithContext', 'org.apache.beam.sdk.transforms.CombineTest$CombineWithContextTests.testSimpleCombineWithContextEmpty', @@ -531,7 +531,7 @@ task validatesRunnerV2Streaming { 'org.apache.beam.sdk.transforms.GroupByKeyTest.testCombiningAccumulatingProcessingTime', - // TODO(BEAM-11306): Pipeline is hanging for these 3 tests. + // TODO(https://github.com/apache/beam/issues/20601): Pipeline is hanging for these 3 tests. 'org.apache.beam.sdk.transforms.SplittableDoFnTest.testPairWithIndexBasicUnbounded', 'org.apache.beam.sdk.transforms.SplittableDoFnTest.testOutputAfterCheckpointUnbounded', 'org.apache.beam.sdk.transforms.SplittableDoFnTest.testBundleFinalizationOccursOnUnboundedSplittableDoFn', @@ -550,7 +550,7 @@ task validatesRunnerV2Streaming { 'org.apache.beam.sdk.transforms.WaitTest.testWaitBoundedInDefaultWindow', 'org.apache.beam.sdk.transforms.WaitTest.testWaitWithSomeSignalWindowsEmpty', - // TODO(BEAM-3245): respect ParDo lifecycle. + // TODO(https://github.com/apache/beam/issues/18592): respect ParDo lifecycle. 'org.apache.beam.sdk.transforms.ParDoLifecycleTest.testTeardownCalledAfterExceptionInFinishBundle', 'org.apache.beam.sdk.transforms.ParDoLifecycleTest.testTeardownCalledAfterExceptionInFinishBundleStateful', 'org.apache.beam.sdk.transforms.ParDoLifecycleTest.testTeardownCalledAfterExceptionInProcessElement', @@ -560,20 +560,20 @@ task validatesRunnerV2Streaming { 'org.apache.beam.sdk.transforms.ParDoLifecycleTest.testTeardownCalledAfterExceptionInStartBundle', 'org.apache.beam.sdk.transforms.ParDoLifecycleTest.testTeardownCalledAfterExceptionInStartBundleStateful', - // TODO(BEAM-11917) Empty flatten fails in streaming + // TODO(https://github.com/apache/beam/issues/20734) Empty flatten fails in streaming "org.apache.beam.sdk.transforms.FlattenTest.testEmptyFlattenAsSideInput", "org.apache.beam.sdk.transforms.FlattenTest.testFlattenPCollectionsEmptyThenParDo", "org.apache.beam.sdk.transforms.FlattenTest.testFlattenPCollectionsEmpty", 'org.apache.beam.sdk.io.CountingSourceTest.testBoundedSourceSplits', - // TODO(BEAM-12353): Identify whether it's bug or a feature gap. + // TODO(https://github.com/apache/beam/issues/20931): Identify whether it's bug or a feature gap. 'org.apache.beam.sdk.transforms.GroupByKeyTest$WindowTests.testRewindowWithTimestampCombiner', - // TODO(BEAM-13525) + // TODO(https://github.com/apache/beam/issues/21424) 'org.apache.beam.sdk.transforms.ParDoTest$TimestampTests.testProcessElementSkew', 'org.apache.beam.sdk.transforms.ParDoTest$TimestampTests.testOnWindowTimestampSkew', - // TODO(BEAM-13952) + // TODO(https://github.com/apache/beam/issues/21472) 'org.apache.beam.sdk.transforms.GroupByKeyTest$BasicTests.testAfterProcessingTimeContinuationTriggerUsingState', ] )) @@ -697,7 +697,7 @@ task examplesJavaRunnerV2IntegrationTest(type: Test) { exclude '**/WindowedWordCountIT.class' exclude '**/TopWikipediaSessionsIT.class' exclude '**/AutoCompleteIT.class' - // TODO(BEAM-11201): test times out. + // TODO(https://github.com/apache/beam/issues/20593): test times out. exclude '**/FhirIOReadIT.class' maxParallelForks 4 diff --git a/runners/google-cloud-dataflow-java/src/test/java/org/apache/beam/runners/dataflow/BatchStatefulParDoOverridesTest.java b/runners/google-cloud-dataflow-java/src/test/java/org/apache/beam/runners/dataflow/BatchStatefulParDoOverridesTest.java index 5950d7b4fe0a..a77c540a48cb 100644 --- a/runners/google-cloud-dataflow-java/src/test/java/org/apache/beam/runners/dataflow/BatchStatefulParDoOverridesTest.java +++ b/runners/google-cloud-dataflow-java/src/test/java/org/apache/beam/runners/dataflow/BatchStatefulParDoOverridesTest.java @@ -58,7 +58,8 @@ /** Tests for {@link BatchStatefulParDoOverrides}. */ @RunWith(JUnit4.class) -// TODO(BEAM-13271): Remove when new version of errorprone is released (2.11.0) +// TODO(https://github.com/apache/beam/issues/21230): Remove when new version of errorprone is +// released (2.11.0) @SuppressWarnings("unused") public class BatchStatefulParDoOverridesTest implements Serializable { diff --git a/runners/google-cloud-dataflow-java/src/test/java/org/apache/beam/runners/dataflow/DataflowRunnerTest.java b/runners/google-cloud-dataflow-java/src/test/java/org/apache/beam/runners/dataflow/DataflowRunnerTest.java index afd75bb843cc..9bd6a5d97197 100644 --- a/runners/google-cloud-dataflow-java/src/test/java/org/apache/beam/runners/dataflow/DataflowRunnerTest.java +++ b/runners/google-cloud-dataflow-java/src/test/java/org/apache/beam/runners/dataflow/DataflowRunnerTest.java @@ -186,7 +186,8 @@ *

Implements {@link Serializable} because it is caught in closures. */ @RunWith(JUnit4.class) -// TODO(BEAM-13271): Remove when new version of errorprone is released (2.11.0) +// TODO(https://github.com/apache/beam/issues/21230): Remove when new version of errorprone is +// released (2.11.0) @SuppressWarnings("unused") public class DataflowRunnerTest implements Serializable { diff --git a/runners/google-cloud-dataflow-java/worker/build.gradle b/runners/google-cloud-dataflow-java/worker/build.gradle index f879eefb2bf7..94dd383155b8 100644 --- a/runners/google-cloud-dataflow-java/worker/build.gradle +++ b/runners/google-cloud-dataflow-java/worker/build.gradle @@ -124,8 +124,8 @@ dependencies { shadowTest library.java.mockito_core } -//TODO(BEAM-5657): checktyle task should be enabled in the future. +//TODO(https://github.com/apache/beam/issues/19115): checktyle task should be enabled in the future. checkstyleMain.enabled = false checkstyleTest.enabled = false -//TODO(BEAM-5659): javadoc task should be enabled in the future. +//TODO(https://github.com/apache/beam/issues/19119): javadoc task should be enabled in the future. javadoc.enabled = false diff --git a/runners/google-cloud-dataflow-java/worker/legacy-worker/build.gradle b/runners/google-cloud-dataflow-java/worker/legacy-worker/build.gradle index 34d3542383a6..82ec50b2eb93 100644 --- a/runners/google-cloud-dataflow-java/worker/legacy-worker/build.gradle +++ b/runners/google-cloud-dataflow-java/worker/legacy-worker/build.gradle @@ -90,7 +90,7 @@ applyJavaNature( shadowJarValidationExcludes: [ "org/apache/beam/runners/dataflow/worker/**", "org/apache/beam/repackaged/beam_runners_google_cloud_dataflow_java_legacy_worker/**", - // TODO(BEAM-6137): Move DataflowRunnerHarness class under org.apache.beam.runners.dataflow.worker namespace + // TODO(https://github.com/apache/beam/issues/19114): Move DataflowRunnerHarness class under org.apache.beam.runners.dataflow.worker namespace "com/google/cloud/dataflow/worker/DataflowRunnerHarness.class", // Allow slf4j implementation worker for logging during pipeline execution "org/slf4j/impl/**" @@ -267,8 +267,8 @@ project.task('validateShadedJarContainsSlf4jJdk14', dependsOn: 'shadowJar') { tasks.check.dependsOn project.tasks.validateShadedJarContainsSlf4jJdk14 -//TODO(BEAM-5657): checktyle task should be enabled in the future. +//TODO(https://github.com/apache/beam/issues/19115): checktyle task should be enabled in the future. checkstyleMain.enabled = false checkstyleTest.enabled = false -//TODO(BEAM-5659): javadoc task should be enabled in the future. +//TODO(https://github.com/apache/beam/issues/19119): javadoc task should be enabled in the future. javadoc.enabled = false diff --git a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/BatchModeExecutionContext.java b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/BatchModeExecutionContext.java index f14afbc3779f..efdfea0de010 100644 --- a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/BatchModeExecutionContext.java +++ b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/BatchModeExecutionContext.java @@ -68,7 +68,8 @@ public class BatchModeExecutionContext private final MetricsContainerRegistry containerRegistry; - // TODO(BEAM-7863): Move throttle time Metric to a dedicated namespace. + // TODO(https://github.com/apache/beam/issues/19632): Move throttle time Metric to a dedicated + // namespace. protected static final String DATASTORE_THROTTLE_TIME_NAMESPACE = "org.apache.beam.sdk.io.gcp.datastore.DatastoreV1$DatastoreWriterFn"; protected static final String HTTP_CLIENT_API_THROTTLE_TIME_NAMESPACE = @@ -533,7 +534,8 @@ public Iterable extractMsecCounters(boolean isFinalUpdate) { public Long extractThrottleTime() { long totalThrottleMsecs = 0L; for (MetricsContainerImpl container : containerRegistry.getContainers()) { - // TODO(BEAM-7863): Update throttling counters to use generic throttling-msecs metric. + // TODO(https://github.com/apache/beam/issues/19632): Update throttling counters to use + // generic throttling-msecs metric. CounterCell dataStoreThrottlingTime = container.tryGetCounter( MetricName.named(DATASTORE_THROTTLE_TIME_NAMESPACE, THROTTLE_TIME_COUNTER_NAME)); diff --git a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/StreamingDataflowWorker.java b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/StreamingDataflowWorker.java index 6552dd620e84..55b0f5d7c9f9 100644 --- a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/StreamingDataflowWorker.java +++ b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/StreamingDataflowWorker.java @@ -209,7 +209,8 @@ public class StreamingDataflowWorker { /** Maximum number of failure stacktraces to report in each update sent to backend. */ private static final int MAX_FAILURES_TO_REPORT_IN_UPDATE = 1000; - // TODO(BEAM-7863): Update throttling counters to use generic throttling-msecs metric. + // TODO(https://github.com/apache/beam/issues/19632): Update throttling counters to use generic + // throttling-msecs metric. public static final MetricName BIGQUERY_STREAMING_INSERT_THROTTLE_TIME = MetricName.named( "org.apache.beam.sdk.io.gcp.bigquery.BigQueryServicesImpl$DatasetServiceImpl", diff --git a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/graph/CreateExecutableStageNodeFunction.java b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/graph/CreateExecutableStageNodeFunction.java index 3e769f894e30..ee79ee39a466 100644 --- a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/graph/CreateExecutableStageNodeFunction.java +++ b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/graph/CreateExecutableStageNodeFunction.java @@ -257,7 +257,8 @@ public Node apply(MutableNetwork input) { .setSpec(RunnerApi.FunctionSpec.newBuilder().setPayload(output.toByteString())) .build()); // For non-java coder, hope it's GlobalWindows by default. - // TODO(BEAM-6231): Actually discover the right windowing strategy. + // TODO(https://github.com/apache/beam/issues/19363): Actually discover the right + // windowing strategy. windowingStrategyId = globalWindowingStrategyId; } } catch (IOException e) { @@ -268,7 +269,8 @@ public Node apply(MutableNetwork input) { e); } - // TODO(BEAM-6275): Set correct IsBounded on generated PCollections + // TODO(https://github.com/apache/beam/issues/19297): Set correct IsBounded on generated + // PCollections String pcollectionId = node.getPcollectionId(); RunnerApi.PCollection pCollection = RunnerApi.PCollection.newBuilder() diff --git a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/IsmSideInputReaderTest.java b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/IsmSideInputReaderTest.java index 35bc6292e44b..5d4a058c3b2c 100644 --- a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/IsmSideInputReaderTest.java +++ b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/IsmSideInputReaderTest.java @@ -1551,7 +1551,7 @@ private void verifyList(List expected, List actual) { } } - // TODO(BEAM-13460): Add assertions on contains() calls + // TODO(https://github.com/apache/beam/issues/21294): Add assertions on contains() calls @SuppressWarnings("ReturnValueIgnored") private static void verifyMap( Map expectedMap, Map mapView, Comparator valueComparator) { diff --git a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/StreamingDataflowWorkerTest.java b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/StreamingDataflowWorkerTest.java index 540b15061af0..338a1d7eb14d 100644 --- a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/StreamingDataflowWorkerTest.java +++ b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/StreamingDataflowWorkerTest.java @@ -172,7 +172,8 @@ /** Unit tests for {@link StreamingDataflowWorker}. */ @RunWith(Parameterized.class) -// TODO(BEAM-13271): Remove when new version of errorprone is released (2.11.0) +// TODO(https://github.com/apache/beam/issues/21230): Remove when new version of errorprone is +// released (2.11.0) @SuppressWarnings("unused") public class StreamingDataflowWorkerTest { diff --git a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/UserParDoFnFactoryTest.java b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/UserParDoFnFactoryTest.java index ca6e94b18ffa..c8c46bf6caec 100644 --- a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/UserParDoFnFactoryTest.java +++ b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/UserParDoFnFactoryTest.java @@ -77,7 +77,8 @@ /** Tests for {@link UserParDoFnFactory}. */ @RunWith(JUnit4.class) -// TODO(BEAM-13271): Remove when new version of errorprone is released (2.11.0) +// TODO(https://github.com/apache/beam/issues/21230): Remove when new version of errorprone is +// released (2.11.0) @SuppressWarnings("unused") public class UserParDoFnFactoryTest { static class TestDoFn extends DoFn { diff --git a/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/control/SdkHarnessClient.java b/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/control/SdkHarnessClient.java index 4c6d88db2f81..9a9752a3e3cc 100644 --- a/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/control/SdkHarnessClient.java +++ b/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/control/SdkHarnessClient.java @@ -512,9 +512,10 @@ public void close() throws Exception { finalizationHandler.requestsFinalization(bundleId); } } else { - // TODO: [BEAM-3962] Handle aborting the bundle being processed. + // TODO: [https://github.com/apache/beam/issues/18756] Handle aborting the bundle being + // processed. throw new IllegalStateException( - "Processing bundle failed, TODO: [BEAM-3962] abort bundle."); + "Processing bundle failed, TODO: [https://github.com/apache/beam/issues/18756] abort bundle."); } } catch (Exception e) { if (exception == null) { diff --git a/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/environment/DockerEnvironmentFactory.java b/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/environment/DockerEnvironmentFactory.java index edeaeab33936..ee816a944e5a 100644 --- a/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/environment/DockerEnvironmentFactory.java +++ b/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/environment/DockerEnvironmentFactory.java @@ -179,7 +179,7 @@ private List gcsCredentialArgs() { firstNonNull( System.getenv("CLOUDSDK_CONFIG"), Paths.get(System.getProperty("user.home"), ".config", "gcloud").toString()); - // TODO(BEAM-4729): Allow this to be disabled manually. + // TODO(https://github.com/apache/beam/issues/19061): Allow this to be disabled manually. if (Files.exists(Paths.get(localGcloudConfig))) { return ImmutableList.of( "--mount", diff --git a/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/provisioning/StaticGrpcProvisionService.java b/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/provisioning/StaticGrpcProvisionService.java index b274890edf60..f6475603141c 100644 --- a/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/provisioning/StaticGrpcProvisionService.java +++ b/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/provisioning/StaticGrpcProvisionService.java @@ -59,7 +59,7 @@ public void getProvisionInfo( ProvisionApi.GetProvisionInfoRequest request, StreamObserver responseObserver) { if (!environments.containsKey(headerAccessor.getSdkWorkerId())) { - // TODO(BEAM-9818): Remove once the JRH is gone. + // TODO(https://github.com/apache/beam/issues/20253): Remove once the JRH is gone. responseObserver.onNext(GetProvisionInfoResponse.newBuilder().setInfo(info).build()); responseObserver.onCompleted(); return; diff --git a/runners/java-fn-execution/src/test/java/org/apache/beam/runners/fnexecution/control/ProcessBundleDescriptorsTest.java b/runners/java-fn-execution/src/test/java/org/apache/beam/runners/fnexecution/control/ProcessBundleDescriptorsTest.java index e6a3e3f6a7df..788fc38af54b 100644 --- a/runners/java-fn-execution/src/test/java/org/apache/beam/runners/fnexecution/control/ProcessBundleDescriptorsTest.java +++ b/runners/java-fn-execution/src/test/java/org/apache/beam/runners/fnexecution/control/ProcessBundleDescriptorsTest.java @@ -63,7 +63,8 @@ import org.junit.Test; /** Tests for {@link ProcessBundleDescriptors}. */ -// TODO(BEAM-13271): Remove when new version of errorprone is released (2.11.0) +// TODO(https://github.com/apache/beam/issues/21230): Remove when new version of errorprone is +// released (2.11.0) @SuppressWarnings("unused") public class ProcessBundleDescriptorsTest implements Serializable { diff --git a/runners/java-fn-execution/src/test/java/org/apache/beam/runners/fnexecution/control/RemoteExecutionTest.java b/runners/java-fn-execution/src/test/java/org/apache/beam/runners/fnexecution/control/RemoteExecutionTest.java index 396a67fb6ca0..21052c9ff6a3 100644 --- a/runners/java-fn-execution/src/test/java/org/apache/beam/runners/fnexecution/control/RemoteExecutionTest.java +++ b/runners/java-fn-execution/src/test/java/org/apache/beam/runners/fnexecution/control/RemoteExecutionTest.java @@ -162,7 +162,9 @@ @SuppressWarnings({ "rawtypes", // TODO(https://github.com/apache/beam/issues/20447) "keyfor", - "unused" // TODO(BEAM-13271): Remove when new version of errorprone is released (2.11.0) + // TODO(https://github.com/apache/beam/issues/21230): Remove when new version of + // errorprone is released (2.11.0) + "unused" }) public class RemoteExecutionTest implements Serializable { diff --git a/runners/samza/build.gradle b/runners/samza/build.gradle index e4ae3c4f17de..528b4824eeca 100644 --- a/runners/samza/build.gradle +++ b/runners/samza/build.gradle @@ -87,25 +87,25 @@ configurations.all { } def sickbayTests = [ - // TODO(BEAM-12750) + // TODO(https://github.com/apache/beam/issues/21033) 'org.apache.beam.sdk.transforms.GroupIntoBatchesTest.testInGlobalWindowBatchSizeByteSizeFn', 'org.apache.beam.sdk.transforms.GroupIntoBatchesTest.testInStreamingMode', 'org.apache.beam.sdk.transforms.GroupIntoBatchesTest.testWithShardedKeyInGlobalWindow', - // TODO(BEAM-12749) + // TODO(https://github.com/apache/beam/issues/21036) 'org.apache.beam.sdk.transforms.MapElementsTest.testMapSimpleFunction', - // TODO(BEAM-12748) + // TODO(https://github.com/apache/beam/issues/21035) 'org.apache.beam.sdk.transforms.ViewTest.testEmptySingletonSideInput', 'org.apache.beam.sdk.transforms.ViewTest.testNonSingletonSideInput', - // TODO(BEAM-12747) + // TODO(https://github.com/apache/beam/issues/21037) 'org.apache.beam.sdk.transforms.WithTimestampsTest.withTimestampsBackwardsInTimeShouldThrow', 'org.apache.beam.sdk.transforms.WithTimestampsTest.withTimestampsWithNullTimestampShouldThrow', - // TODO(BEAM-12746) + // TODO(https://github.com/apache/beam/issues/21039) 'org.apache.beam.sdk.io.FileIOTest*', - // TODO(BEAM-12745) + // TODO(https://github.com/apache/beam/issues/21038) 'org.apache.beam.sdk.io.AvroIOTest*', - // TODO(BEAM-12744) + // TODO(https://github.com/apache/beam/issues/21040) 'org.apache.beam.sdk.PipelineTest.testEmptyPipeline', - // TODO(BEAM-12743) + // TODO(https://github.com/apache/beam/issues/21041) 'org.apache.beam.sdk.coders.PCollectionCustomCoderTest.testEncodingNPException', 'org.apache.beam.sdk.coders.PCollectionCustomCoderTest.testEncodingIOException', 'org.apache.beam.sdk.coders.PCollectionCustomCoderTest.testDecodingNPException', @@ -148,13 +148,13 @@ tasks.register("validatesRunner", Test) { } // TODO(BEAM-10025) excludeTestsMatching 'org.apache.beam.sdk.transforms.ParDoTest$TimerTests.testOutputTimestampDefaultUnbounded' - // TODO(BEAM-11479) + // TODO(https://github.com/apache/beam/issues/20703) excludeTestsMatching 'org.apache.beam.sdk.transforms.ParDoTest$TimerTests.testOutputTimestamp' - // TODO(BEAM-11479) + // TODO(https://github.com/apache/beam/issues/20703) excludeTestsMatching 'org.apache.beam.sdk.transforms.ParDoTest$TimerTests.testRelativeTimerWithOutputTimestamp' - // TODO(BEAM-12035) + // TODO(https://github.com/apache/beam/issues/20847) excludeTestsMatching 'org.apache.beam.sdk.testing.TestStreamTest.testFirstElementLate' - // TODO(BEAM-12036) + // TODO(https://github.com/apache/beam/issues/20846) excludeTestsMatching 'org.apache.beam.sdk.testing.TestStreamTest.testLateDataAccumulating' // These tests fail since there is no support for side inputs in Samza's unbounded splittable DoFn integration diff --git a/runners/samza/job-server/build.gradle b/runners/samza/job-server/build.gradle index 8234c94bdef2..c1c74701a6d8 100644 --- a/runners/samza/job-server/build.gradle +++ b/runners/samza/job-server/build.gradle @@ -101,70 +101,70 @@ def portableValidatesRunnerTask(String name, boolean docker) { excludeCategories 'org.apache.beam.sdk.testing.UsesOrderedListState' excludeCategories 'org.apache.beam.sdk.testing.UsesBoundedSplittableParDo' excludeCategories 'org.apache.beam.sdk.testing.UsesTestStreamWithProcessingTime' - // TODO(BEAM-12821) + // TODO(https://github.com/apache/beam/issues/21023) excludeCategories 'org.apache.beam.sdk.testing.UsesTestStreamWithMultipleStages' excludeCategories 'org.apache.beam.sdk.testing.UsesUnboundedSplittableParDo' excludeCategories 'org.apache.beam.sdk.testing.UsesLoopingTimer' }, testFilter: { - // TODO(BEAM-12677) + // TODO(https://github.com/apache/beam/issues/21042) excludeTestsMatching "org.apache.beam.sdk.transforms.FlattenTest.testFlattenWithDifferentInputAndOutputCoders2" excludeTestsMatching "org.apache.beam.sdk.transforms.FlattenTest.testEmptyFlattenAsSideInput" excludeTestsMatching "org.apache.beam.sdk.transforms.FlattenTest.testFlattenPCollectionsEmptyThenParDo" excludeTestsMatching "org.apache.beam.sdk.transforms.FlattenTest.testFlattenPCollectionsEmpty" // TODO(BEAM-10025) excludeTestsMatching 'org.apache.beam.sdk.transforms.ParDoTest$TimerTests.testOutputTimestampDefaultUnbounded' - // TODO(BEAM-11479) + // TODO(https://github.com/apache/beam/issues/20703) excludeTestsMatching 'org.apache.beam.sdk.transforms.ParDoTest$TimerTests.testOutputTimestamp' - // TODO(BEAM-12035) + // TODO(https://github.com/apache/beam/issues/20847) excludeTestsMatching 'org.apache.beam.sdk.testing.TestStreamTest.testFirstElementLate' - // TODO(BEAM-12036) + // TODO(https://github.com/apache/beam/issues/20846) excludeTestsMatching 'org.apache.beam.sdk.testing.TestStreamTest.testLateDataAccumulating' - // TODO(BEAM-12886) + // TODO(https://github.com/apache/beam/issues/21142) excludeTestsMatching 'org.apache.beam.sdk.transforms.GroupByKeyTest$WindowTests.testWindowFnPostMerging' - // TODO(BEAM-12887) + // TODO(https://github.com/apache/beam/issues/21143) excludeTestsMatching 'org.apache.beam.sdk.transforms.ParDoTest$TimestampTests.testParDoShiftTimestampInvalid' - // TODO(BEAM-12888) + // TODO(https://github.com/apache/beam/issues/21144) excludeTestsMatching 'org.apache.beam.sdk.transforms.ParDoTest$TimestampTests.testParDoShiftTimestampInvalidZeroAllowed' - // TODO(BEAM-12889) + // TODO(https://github.com/apache/beam/issues/21145) excludeTestsMatching 'org.apache.beam.sdk.transforms.DeduplicateTest.testEventTime' - // TODO(BEAM-12890) + // TODO(https://github.com/apache/beam/issues/21146) excludeTestsMatching 'org.apache.beam.sdk.io.TFRecordIOTest.testReadInvalidRecord' - // TODO(BEAM-12891) + // TODO(https://github.com/apache/beam/issues/21147) excludeTestsMatching 'org.apache.beam.sdk.io.TFRecordIOTest.testReadInvalidDataMask' - // TODO(BEAM-12892) + // TODO(https://github.com/apache/beam/issues/21148) excludeTestsMatching 'org.apache.beam.sdk.io.TFRecordIOTest.testReadInvalidLengthMask' - // TODO(BEAM-12893) + // TODO(https://github.com/apache/beam/issues/21149) excludeTestsMatching 'org.apache.beam.sdk.io.TextIOReadTest$CompressedReadTest.testCompressedReadWithoutExtension' - // TODO(BEAM-12894) + // TODO(https://github.com/apache/beam/issues/21150) excludeTestsMatching 'org.apache.beam.sdk.io.WriteFilesTest.testWithRunnerDeterminedShardingUnbounded' - // TODO(BEAM-128945) + // TODO(https://github.com/apache/beam/issues/211505) excludeTestsMatching 'org.apache.beam.sdk.transforms.ParDoTest$MultipleInputsAndOutputTests.testParDoWritingToUndeclaredTag' - // TODO(BEAM-12896) + // TODO(https://github.com/apache/beam/issues/21152) excludeTestsMatching 'org.apache.beam.sdk.transforms.ParDoTest$MultipleInputsAndOutputTests.testParDoReadingFromUnknownSideInput' - // TODO(BEAM-12897) + // TODO(https://github.com/apache/beam/issues/21153) excludeTestsMatching 'org.apache.beam.sdk.transforms.ViewTest.testMapSideInputWithNullValuesCatchesDuplicates' - // TODO(BEAM-12743) + // TODO(https://github.com/apache/beam/issues/21041) excludeTestsMatching 'org.apache.beam.sdk.coders.PCollectionCustomCoderTest.testEncodingNPException' excludeTestsMatching 'org.apache.beam.sdk.coders.PCollectionCustomCoderTest.testEncodingIOException' excludeTestsMatching 'org.apache.beam.sdk.coders.PCollectionCustomCoderTest.testDecodingNPException' excludeTestsMatching 'org.apache.beam.sdk.coders.PCollectionCustomCoderTest.testDecodingIOException' - // TODO(BEAM-12744) + // TODO(https://github.com/apache/beam/issues/21040) excludeTestsMatching 'org.apache.beam.sdk.PipelineTest.testEmptyPipeline' - // TODO(BEAM-12745) + // TODO(https://github.com/apache/beam/issues/21038) excludeTestsMatching 'org.apache.beam.sdk.io.AvroIOTest*' - // TODO(BEAM-12746) + // TODO(https://github.com/apache/beam/issues/21039) excludeTestsMatching 'org.apache.beam.sdk.io.FileIOTest*' - // TODO(BEAM-12747) + // TODO(https://github.com/apache/beam/issues/21037) excludeTestsMatching 'org.apache.beam.sdk.transforms.WithTimestampsTest.withTimestampsBackwardsInTimeShouldThrow' excludeTestsMatching 'org.apache.beam.sdk.transforms.WithTimestampsTest.withTimestampsWithNullTimestampShouldThrow' - // TODO(BEAM-12748) + // TODO(https://github.com/apache/beam/issues/21035) excludeTestsMatching 'org.apache.beam.sdk.transforms.ViewTest.testEmptySingletonSideInput' excludeTestsMatching 'org.apache.beam.sdk.transforms.ViewTest.testNonSingletonSideInput' - // TODO(BEAM-12749) + // TODO(https://github.com/apache/beam/issues/21036) excludeTestsMatching 'org.apache.beam.sdk.transforms.MapElementsTest.testMapSimpleFunction' - // TODO(BEAM-12750) + // TODO(https://github.com/apache/beam/issues/21033) excludeTestsMatching 'org.apache.beam.sdk.transforms.GroupIntoBatchesTest.testInGlobalWindowBatchSizeByteSizeFn' excludeTestsMatching 'org.apache.beam.sdk.transforms.GroupIntoBatchesTest.testInStreamingMode' excludeTestsMatching 'org.apache.beam.sdk.transforms.GroupIntoBatchesTest.testWithShardedKeyInGlobalWindow' @@ -172,9 +172,9 @@ def portableValidatesRunnerTask(String name, boolean docker) { excludeTestsMatching 'org.apache.beam.sdk.transforms.GroupIntoBatchesTest.testInGlobalWindowBatchSizeByteSize' // TODO(BEAM-10025) excludeTestsMatching 'org.apache.beam.sdk.transforms.ParDoTest$TimerTests.testOutputTimestampDefaultUnbounded' - // TODO(BEAM-11479) + // TODO(https://github.com/apache/beam/issues/20703) excludeTestsMatching 'org.apache.beam.sdk.transforms.ParDoTest$TimerTests.testOutputTimestamp' - // TODO(BEAM-11479) + // TODO(https://github.com/apache/beam/issues/20703) excludeTestsMatching 'org.apache.beam.sdk.transforms.ParDoTest$TimerTests.testRelativeTimerWithOutputTimestamp' // TODO(BEAM-13498) excludeTestsMatching 'org.apache.beam.sdk.transforms.ParDoTest$TimestampTests.testProcessElementSkew' diff --git a/runners/samza/src/main/java/org/apache/beam/runners/samza/SamzaRunner.java b/runners/samza/src/main/java/org/apache/beam/runners/samza/SamzaRunner.java index dd7dd68627ae..a45448b305ec 100644 --- a/runners/samza/src/main/java/org/apache/beam/runners/samza/SamzaRunner.java +++ b/runners/samza/src/main/java/org/apache/beam/runners/samza/SamzaRunner.java @@ -114,8 +114,8 @@ public PortablePipelineResult runPortablePipeline(RunnerApi.Pipeline pipeline, J @Override public SamzaPipelineResult run(Pipeline pipeline) { - // TODO(BEAM-10670): Use SDF read as default for non-portable execution when we address - // performance issue. + // TODO(https://github.com/apache/beam/issues/20530): Use SDF read as default for non-portable + // execution when we address performance issue. if (!ExperimentalOptions.hasExperiment(pipeline.getOptions(), "beam_fn_api")) { SplittableParDo.convertReadBasedSplittableDoFnsToPrimitiveReadsIfNecessary(pipeline); } diff --git a/runners/samza/src/main/java/org/apache/beam/runners/samza/metrics/SamzaMetricsContainer.java b/runners/samza/src/main/java/org/apache/beam/runners/samza/metrics/SamzaMetricsContainer.java index d1ce4c1b2576..7415735d44fe 100644 --- a/runners/samza/src/main/java/org/apache/beam/runners/samza/metrics/SamzaMetricsContainer.java +++ b/runners/samza/src/main/java/org/apache/beam/runners/samza/metrics/SamzaMetricsContainer.java @@ -72,7 +72,7 @@ public void updateMetrics(String stepName) { final GaugeUpdater updateGauge = new GaugeUpdater(); results.getGauges().forEach(updateGauge); - // TODO(BEAM-12614): add distribution metrics to Samza + // TODO(https://github.com/apache/beam/issues/21043): add distribution metrics to Samza } private class CounterUpdater implements Consumer> { diff --git a/runners/samza/src/main/java/org/apache/beam/runners/samza/translation/ReshuffleTranslator.java b/runners/samza/src/main/java/org/apache/beam/runners/samza/translation/ReshuffleTranslator.java index c7f8acc22f50..62bc2224f355 100644 --- a/runners/samza/src/main/java/org/apache/beam/runners/samza/translation/ReshuffleTranslator.java +++ b/runners/samza/src/main/java/org/apache/beam/runners/samza/translation/ReshuffleTranslator.java @@ -115,7 +115,7 @@ public static class IsSamzaNativeTransform implements NativeTransforms.IsNativeT @Override public boolean test(RunnerApi.PTransform pTransform) { return false; - // Re-enable after BEAM-12999 is completed + // Re-enable after https://github.com/apache/beam/issues/21188 is completed // return PTransformTranslation.RESHUFFLE_URN.equals( // PTransformTranslation.urnForTransformOrNull(pTransform)); } diff --git a/runners/samza/src/main/java/org/apache/beam/runners/samza/translation/SamzaPortablePipelineTranslator.java b/runners/samza/src/main/java/org/apache/beam/runners/samza/translation/SamzaPortablePipelineTranslator.java index b3218eff5bf2..e5bc2cd8f91a 100644 --- a/runners/samza/src/main/java/org/apache/beam/runners/samza/translation/SamzaPortablePipelineTranslator.java +++ b/runners/samza/src/main/java/org/apache/beam/runners/samza/translation/SamzaPortablePipelineTranslator.java @@ -103,7 +103,7 @@ public static class SamzaTranslators implements SamzaPortableTranslatorRegistrar @Override public Map> getTransformTranslators() { return ImmutableMap.>builder() - // Re-enable after BEAM-12999 is completed + // Re-enable after https://github.com/apache/beam/issues/21188 is completed // .put(PTransformTranslation.RESHUFFLE_URN, new ReshuffleTranslator<>()) .put(PTransformTranslation.GROUP_BY_KEY_TRANSFORM_URN, new GroupByKeyTranslator<>()) .put(PTransformTranslation.FLATTEN_TRANSFORM_URN, new FlattenPCollectionsTranslator<>()) diff --git a/runners/samza/src/main/java/org/apache/beam/runners/samza/translation/SamzaTransformOverrides.java b/runners/samza/src/main/java/org/apache/beam/runners/samza/translation/SamzaTransformOverrides.java index 94100689caad..df18ada4984f 100644 --- a/runners/samza/src/main/java/org/apache/beam/runners/samza/translation/SamzaTransformOverrides.java +++ b/runners/samza/src/main/java/org/apache/beam/runners/samza/translation/SamzaTransformOverrides.java @@ -50,7 +50,8 @@ public static List getDefaultOverrides() { PTransformMatchers.splittableProcessKeyedBounded(), new SplittableParDoNaiveBounded.OverrideFactory())) - // TODO: [BEAM-5362] Support @RequiresStableInput on Samza runner + // TODO: [https://github.com/apache/beam/issues/19132] Support @RequiresStableInput on Samza + // runner .add( PTransformOverride.of( PTransformMatchers.requiresStableInputParDoMulti(), diff --git a/runners/samza/src/test/java/org/apache/beam/runners/samza/runtime/SamzaStoreStateInternalsTest.java b/runners/samza/src/test/java/org/apache/beam/runners/samza/runtime/SamzaStoreStateInternalsTest.java index e7116a4f0c21..457a43e53e47 100644 --- a/runners/samza/src/test/java/org/apache/beam/runners/samza/runtime/SamzaStoreStateInternalsTest.java +++ b/runners/samza/src/test/java/org/apache/beam/runners/samza/runtime/SamzaStoreStateInternalsTest.java @@ -79,7 +79,9 @@ /** Tests for SamzaStoreStateInternals. */ @SuppressWarnings({ "rawtypes", // TODO(https://github.com/apache/beam/issues/20447) - "unused" // TODO(BEAM-13271): Remove when new version of errorprone is released (2.11.0) + // TODO(https://github.com/apache/beam/issues/21230): Remove when new version of + // errorprone is released (2.11.0) + "unused" }) public class SamzaStoreStateInternalsTest implements Serializable { @Rule diff --git a/runners/samza/src/test/java/org/apache/beam/runners/samza/translation/ConfigGeneratorTest.java b/runners/samza/src/test/java/org/apache/beam/runners/samza/translation/ConfigGeneratorTest.java index dc41e2193ffd..bb39c186a9f2 100644 --- a/runners/samza/src/test/java/org/apache/beam/runners/samza/translation/ConfigGeneratorTest.java +++ b/runners/samza/src/test/java/org/apache/beam/runners/samza/translation/ConfigGeneratorTest.java @@ -55,7 +55,8 @@ import org.junit.Test; /** Test config generations for {@link org.apache.beam.runners.samza.SamzaRunner}. */ -// TODO(BEAM-13271): Remove when new version of errorprone is released (2.11.0) +// TODO(https://github.com/apache/beam/issues/21230): Remove when new version of errorprone is +// released (2.11.0) @SuppressWarnings("unused") public class ConfigGeneratorTest { private static final String APP_RUNNER_CLASS = "app.runner.class"; diff --git a/runners/spark/job-server/spark_job_server.gradle b/runners/spark/job-server/spark_job_server.gradle index ea7df2c3c0f1..d4bc26382ace 100644 --- a/runners/spark/job-server/spark_job_server.gradle +++ b/runners/spark/job-server/spark_job_server.gradle @@ -123,24 +123,24 @@ def portableValidatesRunnerTask(String name, boolean streaming, boolean docker, excludeCategories 'org.apache.beam.sdk.testing.UsesKeyInParDo' excludeCategories 'org.apache.beam.sdk.testing.UsesOnWindowExpiration' excludeCategories 'org.apache.beam.sdk.testing.UsesTestStream' - // TODO (BEAM-7222) SplittableDoFnTests + // TODO (https://github.com/apache/beam/issues/19468) SplittableDoFnTests excludeCategories 'org.apache.beam.sdk.testing.UsesBoundedSplittableParDo' excludeCategories 'org.apache.beam.sdk.testing.UsesUnboundedSplittableParDo' excludeCategories 'org.apache.beam.sdk.testing.UsesStrictTimerOrdering' excludeCategories 'org.apache.beam.sdk.testing.UsesBundleFinalizer' // Currently unsupported in portable streaming: - // TODO (BEAM-10712) + // TODO (https://github.com/apache/beam/issues/20395) excludeCategories 'org.apache.beam.sdk.testing.UsesSideInputs' - // TODO (BEAM-10754) + // TODO (https://github.com/apache/beam/issues/20396) excludeCategories 'org.apache.beam.sdk.testing.UsesStatefulParDo' - // TODO (BEAM-10755) + // TODO (https://github.com/apache/beam/issues/20397) excludeCategories 'org.apache.beam.sdk.testing.UsesTimersInParDo' } testFilter = { - // TODO (BEAM-10094) + // TODO (https://github.com/apache/beam/issues/20189) excludeTestsMatching 'org.apache.beam.sdk.transforms.FlattenTest.testFlattenWithDifferentInputAndOutputCoders2' - // TODO (BEAM-10784) Currently unsupported in portable streaming: + // TODO (https://github.com/apache/beam/issues/20429) Currently unsupported in portable streaming: // // Timeout error excludeTestsMatching 'org.apache.beam.sdk.testing.PAssertTest.testWindowedContainsInAnyOrder' excludeTestsMatching 'org.apache.beam.sdk.testing.PAssertTest.testWindowedSerializablePredicate' @@ -182,14 +182,14 @@ def portableValidatesRunnerTask(String name, boolean streaming, boolean docker, excludeCategories 'org.apache.beam.sdk.testing.UsesKeyInParDo' excludeCategories 'org.apache.beam.sdk.testing.UsesOnWindowExpiration' excludeCategories 'org.apache.beam.sdk.testing.UsesTestStream' - // TODO (BEAM-7222) SplittableDoFnTests + // TODO (https://github.com/apache/beam/issues/19468) SplittableDoFnTests excludeCategories 'org.apache.beam.sdk.testing.UsesBoundedSplittableParDo' excludeCategories 'org.apache.beam.sdk.testing.UsesUnboundedSplittableParDo' excludeCategories 'org.apache.beam.sdk.testing.UsesStrictTimerOrdering' excludeCategories 'org.apache.beam.sdk.testing.UsesBundleFinalizer' } testFilter = { - // TODO (BEAM-10094) + // TODO (https://github.com/apache/beam/issues/20189) excludeTestsMatching 'org.apache.beam.sdk.transforms.FlattenTest.testFlattenWithDifferentInputAndOutputCoders2' for (String test : sickbayTests) { diff --git a/runners/spark/spark_runner.gradle b/runners/spark/spark_runner.gradle index 81b10f280c36..2fac13b0bf7a 100644 --- a/runners/spark/spark_runner.gradle +++ b/runners/spark/spark_runner.gradle @@ -371,7 +371,7 @@ tasks.register("examplesIntegrationTest", Test) { testClassesDirs = files(project(":examples:java").sourceSets.test.output.classesDirs) useJUnit { filter { - // TODO (BEAM-14019) Fix integration Tests to run with SparkRunner: Failed to read from sharded output + // TODO (https://github.com/apache/beam/issues/21344) Fix integration Tests to run with SparkRunner: Failed to read from sharded output excludeTestsMatching 'org.apache.beam.examples.WindowedWordCountIT.testWindowedWordCountInStreamingStaticSharding' excludeTestsMatching 'org.apache.beam.examples.WindowedWordCountIT.testWindowedWordCountInBatchDynamicSharding' } diff --git a/runners/spark/src/main/java/org/apache/beam/runners/spark/SparkRunner.java b/runners/spark/src/main/java/org/apache/beam/runners/spark/SparkRunner.java index 5dff798e7b0c..f913f7cc033d 100644 --- a/runners/spark/src/main/java/org/apache/beam/runners/spark/SparkRunner.java +++ b/runners/spark/src/main/java/org/apache/beam/runners/spark/SparkRunner.java @@ -156,7 +156,8 @@ public SparkPipelineResult run(final Pipeline pipeline) { detectTranslationMode(pipeline); // Default to using the primitive versions of Read.Bounded and Read.Unbounded. - // TODO(BEAM-10670): Use SDF read as default when we address performance issue. + // TODO(https://github.com/apache/beam/issues/20530): Use SDF read as default when we address + // performance issue. if (!ExperimentalOptions.hasExperiment(pipeline.getOptions(), "beam_fn_api")) { SplittableParDo.convertReadBasedSplittableDoFnsToPrimitiveReadsIfNecessary(pipeline); } diff --git a/runners/spark/src/main/java/org/apache/beam/runners/spark/SparkRunnerDebugger.java b/runners/spark/src/main/java/org/apache/beam/runners/spark/SparkRunnerDebugger.java index a6f59dbfbd44..5f1da96e29d7 100644 --- a/runners/spark/src/main/java/org/apache/beam/runners/spark/SparkRunnerDebugger.java +++ b/runners/spark/src/main/java/org/apache/beam/runners/spark/SparkRunnerDebugger.java @@ -82,7 +82,8 @@ public SparkPipelineResult run(Pipeline pipeline) { options.isStreaming() || options.as(TestSparkPipelineOptions.class).isForceStreaming(); // Default to using the primitive versions of Read.Bounded and Read.Unbounded. - // TODO(BEAM-10670): Use SDF read as default when we address performance issue. + // TODO(https://github.com/apache/beam/issues/20530): Use SDF read as default when we address + // performance issue. if (!ExperimentalOptions.hasExperiment(pipeline.getOptions(), "beam_fn_api")) { SplittableParDo.convertReadBasedSplittableDoFnsToPrimitiveReadsIfNecessary(pipeline); } diff --git a/runners/spark/src/main/java/org/apache/beam/runners/spark/SparkTransformOverrides.java b/runners/spark/src/main/java/org/apache/beam/runners/spark/SparkTransformOverrides.java index a536e59c723d..748e754364b7 100644 --- a/runners/spark/src/main/java/org/apache/beam/runners/spark/SparkTransformOverrides.java +++ b/runners/spark/src/main/java/org/apache/beam/runners/spark/SparkTransformOverrides.java @@ -34,7 +34,8 @@ class SparkTransformOverrides { public static List getDefaultOverrides(boolean streaming) { ImmutableList.Builder builder = ImmutableList.builder(); - // TODO: [BEAM-5358] Support @RequiresStableInput on Spark runner + // TODO: [https://github.com/apache/beam/issues/19107] Support @RequiresStableInput on Spark + // runner builder.add( PTransformOverride.of( PTransformMatchers.requiresStableInputParDoMulti(), diff --git a/runners/spark/src/main/java/org/apache/beam/runners/spark/structuredstreaming/SparkStructuredStreamingRunner.java b/runners/spark/src/main/java/org/apache/beam/runners/spark/structuredstreaming/SparkStructuredStreamingRunner.java index 162c8f09cea4..d66e0c771864 100644 --- a/runners/spark/src/main/java/org/apache/beam/runners/spark/structuredstreaming/SparkStructuredStreamingRunner.java +++ b/runners/spark/src/main/java/org/apache/beam/runners/spark/structuredstreaming/SparkStructuredStreamingRunner.java @@ -173,7 +173,8 @@ private AbstractTranslationContext translatePipeline(Pipeline pipeline) { // Default to using the primitive versions of Read.Bounded and Read.Unbounded for non-portable // execution. - // TODO(BEAM-10670): Use SDF read as default when we address performance issue. + // TODO(https://github.com/apache/beam/issues/20530): Use SDF read as default when we address + // performance issue. if (!ExperimentalOptions.hasExperiment(pipeline.getOptions(), "beam_fn_api")) { SplittableParDo.convertReadBasedSplittableDoFnsToPrimitiveReadsIfNecessary(pipeline); } diff --git a/runners/spark/src/main/java/org/apache/beam/runners/spark/structuredstreaming/translation/SparkTransformOverrides.java b/runners/spark/src/main/java/org/apache/beam/runners/spark/structuredstreaming/translation/SparkTransformOverrides.java index 5760a81f3099..996f60cb7478 100644 --- a/runners/spark/src/main/java/org/apache/beam/runners/spark/structuredstreaming/translation/SparkTransformOverrides.java +++ b/runners/spark/src/main/java/org/apache/beam/runners/spark/structuredstreaming/translation/SparkTransformOverrides.java @@ -34,7 +34,8 @@ class SparkTransformOverrides { public static List getDefaultOverrides(boolean streaming) { ImmutableList.Builder builder = ImmutableList.builder(); - // TODO: [BEAM-5358] Support @RequiresStableInput on Spark runner + // TODO: [https://github.com/apache/beam/issues/19107] Support @RequiresStableInput on Spark + // runner builder.add( PTransformOverride.of( PTransformMatchers.requiresStableInputParDoMulti(), diff --git a/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/SparkStreamingPortablePipelineTranslator.java b/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/SparkStreamingPortablePipelineTranslator.java index 1f61cdfbb643..08b418fa3666 100644 --- a/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/SparkStreamingPortablePipelineTranslator.java +++ b/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/SparkStreamingPortablePipelineTranslator.java @@ -236,7 +236,7 @@ private static void translateExecutableStage( Coder windowCoder = getWindowingStrategy(inputPCollectionId, components).getWindowFn().windowCoder(); - // TODO (BEAM-10712): handle side inputs. + // TODO (https://github.com/apache/beam/issues/20395): handle side inputs. if (stagePayload.getSideInputsCount() > 0) { throw new UnsupportedOperationException( "Side inputs to executable stage are currently unsupported."); @@ -321,7 +321,8 @@ private static void translateFlatten( // create a single RDD stream. Queue>> q = new LinkedBlockingQueue<>(); q.offer(((BoundedDataset) dataset).getRDD()); - // TODO (BEAM-10789): this is not recoverable from checkpoint! + // TODO (https://github.com/apache/beam/issues/20426): this is not recoverable from + // checkpoint! JavaDStream> dStream = context.getStreamingContext().queueStream(q); dStreams.add(dStream); } diff --git a/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/streaming/StreamingTransformTranslator.java b/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/streaming/StreamingTransformTranslator.java index a8108105560d..077b646697f8 100644 --- a/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/streaming/StreamingTransformTranslator.java +++ b/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/streaming/StreamingTransformTranslator.java @@ -271,7 +271,8 @@ public void evaluate(Flatten.PCollections transform, EvaluationContext contex // create a single RDD stream. Queue>> q = new LinkedBlockingQueue<>(); q.offer(((BoundedDataset) dataset).getRDD()); - // TODO (BEAM-10789): this is not recoverable from checkpoint! + // TODO (https://github.com/apache/beam/issues/20426): this is not recoverable from + // checkpoint! JavaDStream> dStream = context.getStreamingContext().queueStream(q); dStreams.add(dStream); } diff --git a/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/streaming/ResumeFromCheckpointStreamingTest.java b/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/streaming/ResumeFromCheckpointStreamingTest.java index 3767d5c1f33a..bf197b3eeb7a 100644 --- a/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/streaming/ResumeFromCheckpointStreamingTest.java +++ b/runners/spark/src/test/java/org/apache/beam/runners/spark/translation/streaming/ResumeFromCheckpointStreamingTest.java @@ -352,7 +352,7 @@ public void process(ProcessContext c) { /** * A custom PAssert that avoids using {@link org.apache.beam.sdk.transforms.Flatten} until - * BEAM-1444 is resolved. + * https://github.com/apache/beam/issues/18144 is resolved. */ private static class PAssertWithoutFlatten extends PTransform>, PDone> { diff --git a/runners/twister2/src/main/java/org/apache/beam/runners/twister2/Twister2Runner.java b/runners/twister2/src/main/java/org/apache/beam/runners/twister2/Twister2Runner.java index d48bcb85b45d..890172f89343 100644 --- a/runners/twister2/src/main/java/org/apache/beam/runners/twister2/Twister2Runner.java +++ b/runners/twister2/src/main/java/org/apache/beam/runners/twister2/Twister2Runner.java @@ -91,7 +91,8 @@ public PipelineResult run(Pipeline pipeline) { LOG.info("Translating pipeline to Twister2 program."); pipeline.replaceAll(getDefaultOverrides()); - // TODO(BEAM-10670): Use SDF read as default when we address performance issue. + // TODO(https://github.com/apache/beam/issues/20530): Use SDF read as default when we address + // performance issue. if (!ExperimentalOptions.hasExperiment(pipeline.getOptions(), "beam_fn_api")) { SplittableParDo.convertReadBasedSplittableDoFnsToPrimitiveReadsIfNecessary(pipeline); } @@ -152,7 +153,8 @@ public PipelineResult runTest(Pipeline pipeline) { LOG.info("Translating pipeline to Twister2 program."); pipeline.replaceAll(getDefaultOverrides()); - // TODO(BEAM-10670): Use SDF read as default when we address performance issue. + // TODO(https://github.com/apache/beam/issues/20530): Use SDF read as default when we address + // performance issue. if (!ExperimentalOptions.hasExperiment(pipeline.getOptions(), "beam_fn_api")) { SplittableParDo.convertReadBasedSplittableDoFnsToPrimitiveReadsIfNecessary(pipeline); }