Update xlang kinesis to v2 #30926
4 fail, 34 pass in 2m 6s
Annotations
Check warning on line 0 in apache_beam.io.gcp.pubsub_integration_test.PubSubIntegrationTest
github-actions / Test Results
test_streaming_with_attributes (apache_beam.io.gcp.pubsub_integration_test.PubSubIntegrationTest) failed
sdks/python/pytest_postCommitIT-direct-py311.xml [took 38s]
Raw output
KeyError: '__pyx_vtable__'
self = <apache_beam.io.gcp.pubsub_integration_test.PubSubIntegrationTest testMethod=test_streaming_with_attributes>
@pytest.mark.it_postcommit
def test_streaming_with_attributes(self):
> self._test_streaming(with_attributes=True)
apache_beam/io/gcp/pubsub_integration_test.py:221:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
apache_beam/io/gcp/pubsub_integration_test.py:209: in _test_streaming
pubsub_it_pipeline.run_pipeline(
apache_beam/io/gcp/pubsub_it_pipeline.py:93: in run_pipeline
result = p.run()
apache_beam/pipeline.py:594: in run
return self.runner.run_pipeline(self, self._options)
apache_beam/runners/direct/test_direct_runner.py:42: in run_pipeline
self.result = super().run_pipeline(pipeline, options)
apache_beam/runners/direct/direct_runner.py:159: in run_pipeline
from apache_beam.runners.portability import prism_runner
apache_beam/runners/portability/prism_runner.py:39: in <module>
from apache_beam.runners.portability import job_server
apache_beam/runners/portability/job_server.py:30: in <module>
from apache_beam.runners.portability import local_job_service
apache_beam/runners/portability/local_job_service.py:53: in <module>
from apache_beam.runners.portability import portable_runner
apache_beam/runners/portability/portable_runner.py:55: in <module>
from apache_beam.runners.portability.fn_api_runner.fn_runner import translations
apache_beam/runners/portability/fn_api_runner/__init__.py:18: in <module>
from apache_beam.runners.portability.fn_api_runner.fn_runner import FnApiRunner
apache_beam/runners/portability/fn_api_runner/fn_runner.py:69: in <module>
from apache_beam.runners.portability.fn_api_runner import execution
apache_beam/runners/portability/fn_api_runner/execution.py:64: in <module>
from apache_beam.runners.portability.fn_api_runner import translations
apache_beam/runners/portability/fn_api_runner/translations.py:55: in <module>
from apache_beam.runners.worker import bundle_processor
apache_beam/runners/worker/bundle_processor.py:77: in <module>
from apache_beam.runners.worker import operations
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
> ???
E KeyError: '__pyx_vtable__'
apache_beam/runners/worker/operations.py:1: KeyError
github-actions / Test Results
test_big_query_legacy_sql (apache_beam.io.gcp.big_query_query_to_table_it_test.BigQueryQueryToTableIT) failed
sdks/python/pytest_postCommitIT-direct-py311.xml [took 2s]
Raw output
KeyError: '__pyx_vtable__'
self = <apache_beam.io.gcp.big_query_query_to_table_it_test.BigQueryQueryToTableIT testMethod=test_big_query_legacy_sql>
@pytest.mark.it_postcommit
@retry(reraise=True, stop=stop_after_attempt(3))
def test_big_query_legacy_sql(self):
verify_query = DIALECT_OUTPUT_VERIFY_QUERY % self.output_table
expected_checksum = test_utils.compute_hash(DIALECT_OUTPUT_EXPECTED)
pipeline_verifiers = [
PipelineStateMatcher(),
BigqueryMatcher(
project=self.project,
query=verify_query,
checksum=expected_checksum)
]
extra_opts = {
'query': LEGACY_QUERY,
'output': self.output_table,
'output_schema': DIALECT_OUTPUT_SCHEMA,
'use_standard_sql': False,
'wait_until_finish_duration': WAIT_UNTIL_FINISH_DURATION_MS,
'on_success_matcher': all_of(*pipeline_verifiers),
}
options = self.test_pipeline.get_full_options_as_args(**extra_opts)
> big_query_query_to_table_pipeline.run_bq_pipeline(options)
apache_beam/io/gcp/big_query_query_to_table_it_test.py:181:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
apache_beam/io/gcp/big_query_query_to_table_pipeline.py:103: in run_bq_pipeline
result = p.run()
apache_beam/testing/test_pipeline.py:115: in run
result = super().run(
apache_beam/pipeline.py:570: in run
self._options).run(False)
apache_beam/pipeline.py:594: in run
return self.runner.run_pipeline(self, self._options)
apache_beam/runners/direct/test_direct_runner.py:42: in run_pipeline
self.result = super().run_pipeline(pipeline, options)
apache_beam/runners/direct/direct_runner.py:149: in run_pipeline
from apache_beam.runners.portability.fn_api_runner import fn_runner
apache_beam/runners/portability/fn_api_runner/__init__.py:18: in <module>
from apache_beam.runners.portability.fn_api_runner.fn_runner import FnApiRunner
apache_beam/runners/portability/fn_api_runner/fn_runner.py:69: in <module>
from apache_beam.runners.portability.fn_api_runner import execution
apache_beam/runners/portability/fn_api_runner/execution.py:64: in <module>
from apache_beam.runners.portability.fn_api_runner import translations
apache_beam/runners/portability/fn_api_runner/translations.py:55: in <module>
from apache_beam.runners.worker import bundle_processor
apache_beam/runners/worker/bundle_processor.py:77: in <module>
from apache_beam.runners.worker import operations
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
> ???
E KeyError: '__pyx_vtable__'
apache_beam/runners/worker/operations.py:1: KeyError
Check warning on line 0 in apache_beam.io.gcp.bigquery_write_it_test.BigQueryWriteIntegrationTests
github-actions / Test Results
test_big_query_write_temp_table_append_schema_update_2 (apache_beam.io.gcp.bigquery_write_it_test.BigQueryWriteIntegrationTests) failed
sdks/python/pytest_postCommitIT-direct-py311.xml [took 1s]
Raw output
apitools.base.py.exceptions.HttpConflictError: HttpError accessing <https://bigquery.googleapis.com/bigquery/v2/projects/apache-beam-testing/datasets/python_write_to_table_17347067372515b4/tables?alt=json>: response: <{'vary': 'Origin, X-Origin, Referer', 'content-type': 'application/json; charset=UTF-8', 'date': 'Fri, 20 Dec 2024 14:58:59 GMT', 'server': 'ESF', 'cache-control': 'private', 'x-xss-protection': '0', 'x-frame-options': 'SAMEORIGIN', 'x-content-type-options': 'nosniff', 'transfer-encoding': 'chunked', 'status': '409', 'content-length': '424', '-content-encoding': 'gzip'}>, content <{
"error": {
"code": 409,
"message": "Already Exists: Table apache-beam-testing:python_write_to_table_17347067372515b4.python_append_schema_update",
"errors": [
{
"message": "Already Exists: Table apache-beam-testing:python_write_to_table_17347067372515b4.python_append_schema_update",
"domain": "global",
"reason": "duplicate"
}
],
"status": "ALREADY_EXISTS"
}
}
>
a = (<apache_beam.io.gcp.bigquery_write_it_test.BigQueryWriteIntegrationTests testMethod=test_big_query_write_temp_table_append_schema_update_2>,)
kw = {}
@wraps(func)
def standalone_func(*a, **kw):
> return func(*(a + p.args), **p.kwargs, **kw)
../../build/gradleenv/417525524/lib/python3.11/site-packages/parameterized/parameterized.py:620:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
../../build/gradleenv/417525524/lib/python3.11/site-packages/mock/mock.py:1452: in patched
return func(*newargs, **newkeywargs)
../../build/gradleenv/417525524/lib/python3.11/site-packages/parameterized/parameterized.py:93: in dummy_func
return orgfunc(*args, **kwargs)
../../build/gradleenv/417525524/lib/python3.11/site-packages/mock/mock.py:1452: in patched
return func(*newargs, **newkeywargs)
../../build/gradleenv/417525524/lib/python3.11/site-packages/tenacity/__init__.py:336: in wrapped_f
return copy(f, *args, **kw)
../../build/gradleenv/417525524/lib/python3.11/site-packages/tenacity/__init__.py:475: in __call__
do = self.iter(retry_state=retry_state)
../../build/gradleenv/417525524/lib/python3.11/site-packages/tenacity/__init__.py:376: in iter
result = action(retry_state)
../../build/gradleenv/417525524/lib/python3.11/site-packages/tenacity/__init__.py:418: in exc_check
raise retry_exc.reraise()
../../build/gradleenv/417525524/lib/python3.11/site-packages/tenacity/__init__.py:185: in reraise
raise self.last_attempt.result()
/opt/hostedtoolcache/Python/3.11.11/x64/lib/python3.11/concurrent/futures/_base.py:449: in result
return self.__get_result()
/opt/hostedtoolcache/Python/3.11.11/x64/lib/python3.11/concurrent/futures/_base.py:401: in __get_result
raise self._exception
../../build/gradleenv/417525524/lib/python3.11/site-packages/tenacity/__init__.py:478: in __call__
result = fn(*args, **kwargs)
apache_beam/io/gcp/bigquery_write_it_test.py:527: in test_big_query_write_temp_table_append_schema_update
self.create_table(table_name)
apache_beam/io/gcp/bigquery_write_it_test.py:117: in create_table
self.bigquery_client.client.tables.Insert(request)
apache_beam/io/gcp/internal/clients/bigquery/bigquery_v2_client.py:955: in Insert
return self._RunMethod(config, request, global_params=global_params)
../../build/gradleenv/417525524/lib/python3.11/site-packages/apitools/base/py/base_api.py:731: in _RunMethod
return self.ProcessHttpResponse(method_config, http_response, request)
../../build/gradleenv/417525524/lib/python3.11/site-packages/apitools/base/py/base_api.py:737: in ProcessHttpResponse
self.__ProcessHttpResponse(method_config, http_response, request))
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <apache_beam.io.gcp.internal.clients.bigquery.bigquery_v2_client.BigqueryV2.TablesService object at 0x7b81059cfb50>
method_config = <ApiMethodInfo
relative_path: 'projects/{projectId}/datasets/{datasetId}/tables'
method_id: 'bigquery.tables.insert'...ype_name: 'BigqueryTablesInsertRequest'
response_type_name: 'Table'
request_field: 'table'
supports_download: False>
http_response = Response(info={'vary': 'Origin, X-Origin, Referer', 'content-type': 'application/json; charset=UTF-8', 'date': 'Fri, 2...ogleapis.com/bigquery/v2/projects/apache-beam-testing/datasets/python_write_to_table_17347067372515b4/tables?alt=json')
request = <BigqueryTablesInsertRequest
datasetId: 'python_write_to_table_17347067372515b4'
projectId: 'apache-beam-testing'
t...: 'python_write_to_table_17347067372515b4'
projectId: 'apache-beam-testing'
tableId: 'python_append_schema_update'>>>
def __ProcessHttpResponse(self, method_config, http_response, request):
"""Process the given http response."""
if http_response.status_code not in (http_client.OK,
http_client.CREATED,
http_client.NO_CONTENT):
> raise exceptions.HttpError.FromResponse(
http_response, method_config=method_config, request=request)
E apitools.base.py.exceptions.HttpConflictError: HttpError accessing <https://bigquery.googleapis.com/bigquery/v2/projects/apache-beam-testing/datasets/python_write_to_table_17347067372515b4/tables?alt=json>: response: <{'vary': 'Origin, X-Origin, Referer', 'content-type': 'application/json; charset=UTF-8', 'date': 'Fri, 20 Dec 2024 14:58:59 GMT', 'server': 'ESF', 'cache-control': 'private', 'x-xss-protection': '0', 'x-frame-options': 'SAMEORIGIN', 'x-content-type-options': 'nosniff', 'transfer-encoding': 'chunked', 'status': '409', 'content-length': '424', '-content-encoding': 'gzip'}>, content <{
E "error": {
E "code": 409,
E "message": "Already Exists: Table apache-beam-testing:python_write_to_table_17347067372515b4.python_append_schema_update",
E "errors": [
E {
E "message": "Already Exists: Table apache-beam-testing:python_write_to_table_17347067372515b4.python_append_schema_update",
E "domain": "global",
E "reason": "duplicate"
E }
E ],
E "status": "ALREADY_EXISTS"
E }
E }
E >
../../build/gradleenv/417525524/lib/python3.11/site-packages/apitools/base/py/base_api.py:603: HttpConflictError
github-actions / Test Results
test_datastore_write_limit (apache_beam.io.gcp.datastore.v1new.datastore_write_it_test.DatastoreWriteIT) failed
sdks/python/pytest_postCommitIT-direct-py311.xml [took 0s]
Raw output
KeyError: '__pyx_vtable__'
self = <apache_beam.io.gcp.datastore.v1new.datastore_write_it_test.DatastoreWriteIT testMethod=test_datastore_write_limit>
@pytest.mark.it_postcommit
@unittest.skipIf(
datastore_write_it_pipeline is None, 'GCP dependencies are not installed')
def test_datastore_write_limit(self):
> self.run_datastore_write(limit=self.LIMIT)
apache_beam/io/gcp/datastore/v1new/datastore_write_it_test.py:73:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
apache_beam/io/gcp/datastore/v1new/datastore_write_it_test.py:66: in run_datastore_write
datastore_write_it_pipeline.run(
apache_beam/io/gcp/datastore/v1new/datastore_write_it_pipeline.py:120: in run
p.run()
apache_beam/testing/test_pipeline.py:115: in run
result = super().run(
apache_beam/pipeline.py:570: in run
self._options).run(False)
apache_beam/pipeline.py:594: in run
return self.runner.run_pipeline(self, self._options)
apache_beam/runners/direct/test_direct_runner.py:42: in run_pipeline
self.result = super().run_pipeline(pipeline, options)
apache_beam/runners/direct/direct_runner.py:149: in run_pipeline
from apache_beam.runners.portability.fn_api_runner import fn_runner
apache_beam/runners/portability/fn_api_runner/__init__.py:18: in <module>
from apache_beam.runners.portability.fn_api_runner.fn_runner import FnApiRunner
apache_beam/runners/portability/fn_api_runner/fn_runner.py:69: in <module>
from apache_beam.runners.portability.fn_api_runner import execution
apache_beam/runners/portability/fn_api_runner/execution.py:64: in <module>
from apache_beam.runners.portability.fn_api_runner import translations
apache_beam/runners/portability/fn_api_runner/translations.py:55: in <module>
from apache_beam.runners.worker import bundle_processor
apache_beam/runners/worker/bundle_processor.py:77: in <module>
from apache_beam.runners.worker import operations
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
> ???
E KeyError: '__pyx_vtable__'
apache_beam/runners/worker/operations.py:1: KeyError
Check notice on line 0 in .github
github-actions / Test Results
38 tests found
There are 38 tests, see "Raw output" for the full list of tests.
Raw output
apache_beam.examples.wordcount_it_test.WordCountIT ‑ test_wordcount_it
apache_beam.io.gcp.big_query_query_to_table_it_test.BigQueryQueryToTableIT ‑ test_big_query_legacy_sql
apache_beam.io.gcp.big_query_query_to_table_it_test.BigQueryQueryToTableIT ‑ test_big_query_new_types
apache_beam.io.gcp.big_query_query_to_table_it_test.BigQueryQueryToTableIT ‑ test_big_query_new_types_avro
apache_beam.io.gcp.big_query_query_to_table_it_test.BigQueryQueryToTableIT ‑ test_big_query_standard_sql
apache_beam.io.gcp.bigquery_io_read_it_test.BigqueryIOReadIT ‑ test_bigquery_read_1M_python
apache_beam.io.gcp.bigquery_io_read_it_test.BigqueryIOReadIT ‑ test_bigquery_read_custom_1M_python
apache_beam.io.gcp.bigquery_read_it_test.ReadAllBQTests ‑ test_read_queries
apache_beam.io.gcp.bigquery_read_it_test.ReadInteractiveRunnerTests ‑ test_read_in_interactive_runner
apache_beam.io.gcp.bigquery_read_it_test.ReadNewTypesTests ‑ test_iobase_source
apache_beam.io.gcp.bigquery_read_it_test.ReadNewTypesTests ‑ test_native_source
apache_beam.io.gcp.bigquery_read_it_test.ReadTests ‑ test_iobase_source
apache_beam.io.gcp.bigquery_read_it_test.ReadTests ‑ test_native_source
apache_beam.io.gcp.bigquery_read_it_test.ReadTests ‑ test_table_schema_retrieve
apache_beam.io.gcp.bigquery_read_it_test.ReadTests ‑ test_table_schema_retrieve_specifying_only_table
apache_beam.io.gcp.bigquery_read_it_test.ReadTests ‑ test_table_schema_retrieve_with_direct_read
apache_beam.io.gcp.bigquery_read_it_test.ReadUsingStorageApiTests ‑ test_iobase_source
apache_beam.io.gcp.bigquery_read_it_test.ReadUsingStorageApiTests ‑ test_iobase_source_with_column_selection
apache_beam.io.gcp.bigquery_read_it_test.ReadUsingStorageApiTests ‑ test_iobase_source_with_column_selection_and_row_restriction
apache_beam.io.gcp.bigquery_read_it_test.ReadUsingStorageApiTests ‑ test_iobase_source_with_column_selection_and_row_restriction_rows
apache_beam.io.gcp.bigquery_read_it_test.ReadUsingStorageApiTests ‑ test_iobase_source_with_native_datetime
apache_beam.io.gcp.bigquery_read_it_test.ReadUsingStorageApiTests ‑ test_iobase_source_with_query
apache_beam.io.gcp.bigquery_read_it_test.ReadUsingStorageApiTests ‑ test_iobase_source_with_query_and_filters
apache_beam.io.gcp.bigquery_read_it_test.ReadUsingStorageApiTests ‑ test_iobase_source_with_row_restriction
apache_beam.io.gcp.bigquery_read_it_test.ReadUsingStorageApiTests ‑ test_iobase_source_with_very_selective_filters
apache_beam.io.gcp.bigquery_write_it_test.BigQueryWriteIntegrationTests ‑ test_big_query_write
apache_beam.io.gcp.bigquery_write_it_test.BigQueryWriteIntegrationTests ‑ test_big_query_write_insert_errors_reporting
apache_beam.io.gcp.bigquery_write_it_test.BigQueryWriteIntegrationTests ‑ test_big_query_write_insert_non_transient_api_call_error
apache_beam.io.gcp.bigquery_write_it_test.BigQueryWriteIntegrationTests ‑ test_big_query_write_new_types
apache_beam.io.gcp.bigquery_write_it_test.BigQueryWriteIntegrationTests ‑ test_big_query_write_schema_autodetect
apache_beam.io.gcp.bigquery_write_it_test.BigQueryWriteIntegrationTests ‑ test_big_query_write_temp_table_append_schema_update
apache_beam.io.gcp.bigquery_write_it_test.BigQueryWriteIntegrationTests ‑ test_big_query_write_temp_table_append_schema_update_0
apache_beam.io.gcp.bigquery_write_it_test.BigQueryWriteIntegrationTests ‑ test_big_query_write_temp_table_append_schema_update_1
apache_beam.io.gcp.bigquery_write_it_test.BigQueryWriteIntegrationTests ‑ test_big_query_write_temp_table_append_schema_update_2
apache_beam.io.gcp.bigquery_write_it_test.BigQueryWriteIntegrationTests ‑ test_big_query_write_without_schema
apache_beam.io.gcp.datastore.v1new.datastore_write_it_test.DatastoreWriteIT ‑ test_datastore_write_limit
apache_beam.io.gcp.pubsub_integration_test.PubSubIntegrationTest ‑ test_streaming_data_only
apache_beam.io.gcp.pubsub_integration_test.PubSubIntegrationTest ‑ test_streaming_with_attributes