Skip to content

Commit

Permalink
Update references to jira to GH for the Runners (apache#21835)
Browse files Browse the repository at this point in the history
* Update references to jira to GH for the Runners

* Spotless format

* Switch to urls

* Spotless apply

* Spotless apply

* Fix awkward formatting
  • Loading branch information
damccorm authored Jun 16, 2022
1 parent 37a8f3c commit b774ff5
Show file tree
Hide file tree
Showing 66 changed files with 200 additions and 146 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -243,7 +243,8 @@ public OutputT expand(InputT input) {
PValues.expandInput(PBegin.in(p)),
ImmutableMap.of(entry.getKey(), (PCollection<?>) entry.getValue()),
Impulse.create(),
// TODO(BEAM-12082): Add proper support for Resource Hints with XLang.
// TODO(https://github.com/apache/beam/issues/18371): Add proper support for
// Resource Hints with XLang.
ResourceHints.create(),
p);
// using fake Impulses to provide inputs
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ public class NativeTransforms {
* Returns true if an only if the Runner understands this transform and can handle it directly.
*/
public static boolean isNative(RunnerApi.PTransform pTransform) {
// TODO(BEAM-10109) Use default (context) classloader.
// TODO(https://github.com/apache/beam/issues/20192) Use default (context) classloader.
Iterator<IsNativeTransform> matchers =
ServiceLoader.load(IsNativeTransform.class, NativeTransforms.class.getClassLoader())
.iterator();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -442,7 +442,8 @@ public RunnerApi.PTransform translate(

// Required runner implemented transforms should not have an environment id.
if (!RUNNER_IMPLEMENTED_TRANSFORMS.contains(spec.getUrn())) {
// TODO(BEAM-9309): Remove existing hacks around deprecated READ transform.
// TODO(https://github.com/apache/beam/issues/20094): Remove existing hacks around
// deprecated READ transform.
if (spec.getUrn().equals(READ_TRANSFORM_URN)) {
// Only assigning environment to Bounded reads. Not assigning an environment to
// Unbounded
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -599,7 +599,8 @@ public RunnerApi.StateSpec dispatchOrderedList(Coder<?> elementCoder) {
.setOrderedListSpec(
RunnerApi.OrderedListStateSpec.newBuilder()
.setElementCoderId(registerCoderOrThrow(components, elementCoder)))
// TODO(BEAM-10650): Update with correct protocol once the protocol is defined and
// TODO(https://github.com/apache/beam/issues/20486): Update with correct protocol
// once the protocol is defined and
// the SDK harness uses it.
.build();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -691,7 +691,8 @@ public void tearDown() {
* PrimitiveBoundedRead} and {@link PrimitiveUnboundedRead} if either the experiment {@code
* use_deprecated_read} or {@code beam_fn_api_use_deprecated_read} are specified.
*
* <p>TODO(BEAM-10670): Remove the primitive Read and make the splittable DoFn the only option.
* <p>TODO(https://github.com/apache/beam/issues/20530): Remove the primitive Read and make the
* splittable DoFn the only option.
*/
public static void convertReadBasedSplittableDoFnsToPrimitiveReadsIfNecessary(Pipeline pipeline) {
if (!ExperimentalOptions.hasExperiment(pipeline.getOptions(), "use_sdf_read")
Expand All @@ -706,7 +707,8 @@ public static void convertReadBasedSplittableDoFnsToPrimitiveReadsIfNecessary(Pi
* Converts {@link Read} based Splittable DoFn expansions to primitive reads implemented by {@link
* PrimitiveBoundedRead} and {@link PrimitiveUnboundedRead}.
*
* <p>TODO(BEAM-10670): Remove the primitive Read and make the splittable DoFn the only option.
* <p>TODO(https://github.com/apache/beam/issues/20530): Remove the primitive Read and make the
* splittable DoFn the only option.
*/
public static void convertReadBasedSplittableDoFnsToPrimitiveReads(Pipeline pipeline) {
pipeline.replaceAll(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ class ProjectionProducerVisitor extends PipelineVisitor.Defaults {
public CompositeBehavior enterCompositeTransform(Node node) {
PTransform<?, ?> transform = node.getTransform();

// TODO(BEAM-13658) Support inputs other than PBegin.
// TODO(https://github.com/apache/beam/issues/21359) Support inputs other than PBegin.
if (!node.getInputs().isEmpty()) {
return CompositeBehavior.DO_NOT_ENTER_TRANSFORM;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,7 @@ public static void optimize(Pipeline pipeline) {
}
}

// TODO(BEAM-13658) Support inputs other than PBegin.
// TODO(https://github.com/apache/beam/issues/21359) Support inputs other than PBegin.
private static class PushdownOverrideFactory<
OutputT extends POutput, TransformT extends PTransform<PBegin, OutputT>>
implements PTransformOverrideFactory<PBegin, OutputT, TransformT> {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,9 @@
@RunWith(JUnit4.class)
@SuppressWarnings({
"rawtypes", // TODO(https://github.com/apache/beam/issues/20447)
"unused" // TODO(BEAM-13271): Remove when new version of errorprone is released (2.11.0)
// TODO(https://github.com/apache/beam/issues/21230): Remove when new version of
// errorprone is released (2.11.0)
"unused"
})
public class PTransformMatchersTest implements Serializable {
@Rule
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -104,8 +104,8 @@ public void testIntermediateProducer() {
FieldAccessDescriptor.withFieldNames("foo", "bar");
p.apply(source).apply(originalT).apply(new FieldAccessTransform(downstreamFieldAccess));

// TODO(BEAM-13658) Support pushdown on intermediate transforms.
// For now, test that the pushdown optimizer ignores immediate transforms.
// TODO(https://github.com/apache/beam/issues/21359) Support pushdown on intermediate
// transforms. For now, test that the pushdown optimizer ignores immediate transforms.
ProjectionPushdownOptimizer.optimize(p);
Assert.assertTrue(pipelineHasTransform(p, originalT));
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,8 @@ StateTag<WatermarkHoldState> watermarkHoldTagForTimestampCombiner(
StateTags.makeSystemTagInternal(
StateTags.watermarkStateInternal("extra", TimestampCombiner.EARLIEST));

// [BEAM-420] Seems likely these should all be transient or this class should not be Serializable
// [https://github.com/apache/beam/issues/18014] Seems likely these should all be transient or
// this class should not be Serializable
@SuppressFBWarnings("SE_BAD_FIELD")
private final TimerInternals timerInternals;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,8 @@ public void update(HistogramCell other) {
dirty.afterModification();
}

// TODO(BEAM-12103): Update this function to allow incrementing the infinite buckets as well.
// TODO(https://github.com/apache/beam/issues/20853): Update this function to allow incrementing
// the infinite buckets as well.
// and remove the incTopBucketCount and incBotBucketCount methods.
// Using 0 and length -1 as the bucketIndex.
public void incBucketCount(int bucketIndex, long count) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,8 @@ public MetricsContainerImpl getUnboundContainer() {
/** Returns the container for the given step name. */
public MetricsContainerImpl getContainer(String stepName) {
if (stepName == null) {
// TODO(BEAM-6538): Disallow this in the future, some tests rely on an empty step name today.
// TODO(https://github.com/apache/beam/issues/19275): Disallow this in the future, some tests
// rely on an empty step name today.
return getUnboundContainer();
}
return metricsContainers.computeIfAbsent(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,9 @@
@RunWith(JUnit4.class)
@SuppressWarnings({
"rawtypes", // TODO(https://github.com/apache/beam/issues/20447)
"unused" // TODO(BEAM-13271): Remove when new version of errorprone is released (2.11.0)
// TODO(https://github.com/apache/beam/issues/21230): Remove when new version of
// errorprone is released (2.11.0)
"unused"
})
public class SimpleDoFnRunnerTest {
@Rule public ExpectedException thrown = ExpectedException.none();
Expand Down
2 changes: 1 addition & 1 deletion runners/direct-java/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -249,7 +249,7 @@ task examplesIntegrationTest(type: Test) {
testClassesDirs = files(project(":examples:java").sourceSets.test.output.classesDirs)
useJUnit {
filter{
// TODO (BEAM-14019) Fix integration Tests to run with DirectRunner: Timeout error
// TODO (https://github.com/apache/beam/issues/21344) Fix integration Tests to run with DirectRunner: Timeout error
excludeTestsMatching 'org.apache.beam.examples.complete.TfIdfIT'
excludeTestsMatching 'org.apache.beam.examples.WindowedWordCountIT.testWindowedWordCountInBatchDynamicSharding'
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,8 @@ final class BoundedReadEvaluatorFactory implements TransformEvaluatorFactory {
private final EvaluationContext evaluationContext;
private final PipelineOptions options;

// TODO: (BEAM-723) Create a shared ExecutorService for maintenance tasks in the DirectRunner.
// TODO: (https://github.com/apache/beam/issues/18079) Create a shared ExecutorService for
// maintenance tasks in the DirectRunner.
@VisibleForTesting
final ExecutorService executor =
Executors.newCachedThreadPool(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -253,7 +253,8 @@ void performRewrites(Pipeline pipeline) {
// The last set of overrides includes GBK overrides used in WriteView
pipeline.replaceAll(groupByKeyOverrides());

// TODO(BEAM-10670): Use SDF read as default when we address performance issue.
// TODO(https://github.com/apache/beam/issues/20530): Use SDF read as default when we address
// performance issue.
SplittableParDo.convertReadBasedSplittableDoFnsToPrimitiveReadsIfNecessary(pipeline);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -147,7 +147,8 @@ private RemovalListener<StepAndKey, TransformExecutorService> shutdownExecutorSe
}

@Override
// TODO: [BEAM-4563] Pass Future back to consumer to check for async errors
// TODO: [https://github.com/apache/beam/issues/18968] Pass Future back to consumer to check for
// async errors
@SuppressWarnings("FutureReturnValueIgnored")
public void start(DirectGraph graph, RootProviderRegistry rootProviderRegistry) {
int numTargetSplits = Math.max(3, targetParallelism);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,8 @@ private ParallelTransformExecutor(ExecutorService executor) {
}

@Override
// TODO: [BEAM-4563] Pass Future back to consumer to check for async errors
// TODO: [https://github.com/apache/beam/issues/18968] Pass Future back to consumer to check for
// async errors
@SuppressWarnings("FutureReturnValueIgnored")
public void schedule(TransformExecutor work) {
if (active.get()) {
Expand Down Expand Up @@ -154,7 +155,8 @@ public void shutdown() {
workQueue.clear();
}

// TODO: [BEAM-4563] Pass Future back to consumer to check for async errors
// TODO: [https://github.com/apache/beam/issues/18968] Pass Future back to consumer to check for
// async errors
@SuppressWarnings("FutureReturnValueIgnored")
private void updateCurrentlyEvaluating() {
if (currentlyEvaluating.get() == null) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,9 @@
@RunWith(JUnit4.class)
@SuppressWarnings({
"rawtypes", // TODO(https://github.com/apache/beam/issues/20447)
"unused" // TODO(BEAM-13271): Remove when new version of errorprone is released (2.11.0)
// TODO(https://github.com/apache/beam/issues/21230): Remove when new version of
// errorprone is released (2.11.0)
"unused"
})
public class StatefulParDoEvaluatorFactoryTest implements Serializable {
@Mock private transient EvaluationContext mockEvaluationContext;
Expand Down
6 changes: 3 additions & 3 deletions runners/flink/flink_runner.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -225,7 +225,7 @@ class ValidatesRunnerConfig {
}

def sickbayTests = [
// TODO(BEAM-13573)
// TODO(https://github.com/apache/beam/issues/21306)
'org.apache.beam.sdk.transforms.ParDoTest$TimestampTests.testOnWindowTimestampSkew',
]

Expand Down Expand Up @@ -358,9 +358,9 @@ tasks.register("examplesIntegrationTest", Test) {
testClassesDirs = files(project(":examples:java").sourceSets.test.output.classesDirs)
useJUnit {
filter{
// TODO (BEAM-14019) Fix integration Tests to run with FlinkRunner: Assertion error
// TODO (https://github.com/apache/beam/issues/21344) Fix integration Tests to run with FlinkRunner: Assertion error
excludeTestsMatching 'org.apache.beam.examples.WindowedWordCountIT.testWindowedWordCountInBatchDynamicSharding'
// TODO (BEAM-14019) Fix integration Tests to run with FlinkRunner: Error deleting table, Not found: Dataset
// TODO (https://github.com/apache/beam/issues/21344) Fix integration Tests to run with FlinkRunner: Error deleting table, Not found: Dataset
excludeTestsMatching 'org.apache.beam.examples.cookbook.BigQueryTornadoesIT.testE2eBigQueryTornadoesWithStorageApiUsingQuery'
}
}
Expand Down
8 changes: 4 additions & 4 deletions runners/flink/job-server/flink_job_server.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -193,17 +193,17 @@ def portableValidatesRunnerTask(String name, boolean streaming, boolean checkpoi
excludeCategories 'org.apache.beam.sdk.testing.UsesPerKeyOrderInBundle'
},
testFilter: {
// TODO(BEAM-10016)
// TODO(https://github.com/apache/beam/issues/20269)
excludeTestsMatching 'org.apache.beam.sdk.transforms.FlattenTest.testFlattenWithDifferentInputAndOutputCoders2'
// TODO(BEAM-12039)
// TODO(https://github.com/apache/beam/issues/20843)
excludeTestsMatching 'org.apache.beam.sdk.testing.TestStreamTest.testDiscardingMode'
// TODO(BEAM-12038)
// TODO(https://github.com/apache/beam/issues/20844)
excludeTestsMatching 'org.apache.beam.sdk.testing.TestStreamTest.testLateDataAccumulating'
// TODO(BEAM-12710)
excludeTestsMatching 'org.apache.beam.sdk.testing.TestStreamTest.testFirstElementLate'
// TODO(BEAM-13498)
excludeTestsMatching 'org.apache.beam.sdk.transforms.ParDoTest$TimestampTests.testProcessElementSkew'
// TODO(BEAM-13952)
// TODO(https://github.com/apache/beam/issues/21472)
excludeTestsMatching 'org.apache.beam.sdk.transforms.GroupByKeyTest$BasicTests.testAfterProcessingTimeContinuationTriggerUsingState'
},
)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,8 @@ protected FlinkRunner(FlinkPipelineOptions options) {
@Override
public PipelineResult run(Pipeline pipeline) {
// Portable flink only support SDF as read.
// TODO(BEAM-10670): Use SDF read as default when we address performance issue.
// TODO(https://github.com/apache/beam/issues/20530): Use SDF read as default when we address
// performance issue.
if (!ExperimentalOptions.hasExperiment(pipeline.getOptions(), "beam_fn_api")) {
SplittableParDo.convertReadBasedSplittableDoFnsToPrimitiveReadsIfNecessary(pipeline);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -173,7 +173,8 @@ public void onCompleted(ProcessBundleResponse response) {
metricContainer.updateMetrics(stepName, response.getMonitoringInfosList());
}
};
// TODO(BEAM-11021): Support bundle finalization in portable batch.
// TODO(https://github.com/apache/beam/issues/19526): Support bundle finalization in portable
// batch.
finalizationHandler =
bundleId -> {
throw new UnsupportedOperationException(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -708,9 +708,9 @@ public void flushData() throws Exception {
// Manually drain processing time timers since Flink will ignore pending
// processing-time timers when upstream operators have shut down and will also
// shut down this operator with pending processing-time timers.
// TODO(BEAM-11210, FLINK-18647): It doesn't work efficiently when the watermark of upstream
// advances
// to MAX_TIMESTAMP immediately.
// TODO(https://github.com/apache/beam/issues/20600, FLINK-18647): It doesn't work
// efficiently when the watermark of upstream advances to MAX_TIMESTAMP
// immediately.
if (numProcessingTimeTimers() > 0) {
timerInternals.processPendingProcessingTimeTimers();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -121,7 +121,7 @@ public static void afterClass() throws Exception {
* restore the savepoint to check if we produce impotent results.
*/
@Test(timeout = 30_000)
@Ignore("BEAM-13575")
@Ignore("https://github.com/apache/beam/issues/21333")
public void testParDoRequiresStableInput() throws Exception {
FlinkPipelineOptions options = FlinkPipelineOptions.defaults();
options.setParallelism(1);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,9 @@
*/
@SuppressWarnings({
"rawtypes", // TODO(https://github.com/apache/beam/issues/20447)
"unused" // TODO(BEAM-13271): Remove when new version of errorprone is released (2.11.0)
// TODO(https://github.com/apache/beam/issues/21230): Remove when new version of
// errorprone is released (2.11.0)
"unused"
})
public class FlinkSavepointTest implements Serializable {

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,8 @@
import org.junit.Test;

/** Tests if overrides are properly applied. */
// TODO(BEAM-13271): Remove when new version of errorprone is released (2.11.0)
// TODO(https://github.com/apache/beam/issues/21230): Remove when new version of errorprone is
// released (2.11.0)
@SuppressWarnings("unused")
public class FlinkStreamingPipelineTranslatorTest {

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,8 @@
* org.apache.beam.runners.flink.translation.wrappers.streaming.ExecutableStageDoFnOperator}.
*/
@RunWith(Parameterized.class)
// TODO(BEAM-13271): Remove when new version of errorprone is released (2.11.0)
// TODO(https://github.com/apache/beam/issues/21230): Remove when new version of errorprone is
// released (2.11.0)
@SuppressWarnings("unused")
public class PortableStateExecutionTest implements Serializable {

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,8 @@
* of a given timer is run.
*/
@RunWith(Parameterized.class)
// TODO(BEAM-13271): Remove when new version of errorprone is released (2.11.0)
// TODO(https://github.com/apache/beam/issues/21230): Remove when new version of errorprone is
// released (2.11.0)
@SuppressWarnings("unused")
public class PortableTimersExecutionTest implements Serializable {

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -117,7 +117,9 @@
@RunWith(JUnit4.class)
@SuppressWarnings({
"keyfor",
"unused" // TODO(BEAM-13271): Remove when new version of errorprone is released (2.11.0)
// TODO(https://github.com/apache/beam/issues/21230): Remove when new version of
// errorprone is released (2.11.0)
"unused"
})
public class DoFnOperatorTest {

Expand Down
Loading

0 comments on commit b774ff5

Please sign in to comment.