See <https://ci-beam.apache.org/job/beam_PostCommit_Python39/2620/display/redirect?page=changes>
Changes: [noreply] Bump github.com/aws/aws-sdk-go-v2/credentials in /sdks (#29631) ------------------------------------------ [...truncated 12.00 MB...] 'window_duration': 1, 'on_success_matcher': all_of(state_verifier, bigquery_verifier) } # Register clean up before pipeline execution # Note that actual execution happens in reverse order. self.addCleanup(utils.delete_bq_dataset, self.project, self.dataset_ref) # Get pipeline options from command argument: --test-pipeline-options, # and start pipeline job by calling pipeline main function. > hourly_team_score.run( self.test_pipeline.get_full_options_as_args(**extra_opts), save_main_session=False) [1m[31mapache_beam/examples/complete/game/hourly_team_score_it_test.py[0m:89: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ [1m[31mapache_beam/examples/complete/game/hourly_team_score.py[0m:299: in run ( # pylint: disable=expression-not-assigned [1m[31mapache_beam/pipeline.py[0m:612: in __exit__ self.result = self.run() [1m[31mapache_beam/pipeline.py[0m:559: in run return Pipeline.from_runner_api( [1m[31mapache_beam/pipeline.py[0m:586: in run return self.runner.run_pipeline(self, self._options) _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <apache_beam.runners.dataflow.test_dataflow_runner.TestDataflowRunner object at 0x7fb655860910> pipeline = <apache_beam.pipeline.Pipeline object at 0x7fb6557fd460> options = <apache_beam.options.pipeline_options.PipelineOptions object at 0x7fb65587f1c0> def run_pipeline(self, pipeline, options): """Execute test pipeline and verify test matcher""" test_options = options.view_as(TestOptions) on_success_matcher = test_options.on_success_matcher wait_duration = test_options.wait_until_finish_duration is_streaming = options.view_as(StandardOptions).streaming # [BEAM-1889] Do not send this to remote workers also, there is no need to # send this option to remote executors. test_options.on_success_matcher = None self.result = super().run_pipeline(pipeline, options) if self.result.has_job: # TODO(markflyhigh)(https://github.com/apache/beam/issues/18254): Use # print since Nose dosen't show logs in some cases. print('Worker logs: %s' % self.build_console_url(options)) _LOGGER.info('Console log: ') _LOGGER.info(self.build_console_url(options)) try: self.wait_until_in_state(PipelineState.RUNNING) if is_streaming and not wait_duration: _LOGGER.warning('Waiting indefinitely for streaming job.') self.result.wait_until_finish(duration=wait_duration) if on_success_matcher: from hamcrest import assert_that as hc_assert_that > hc_assert_that(self.result, pickler.loads(on_success_matcher)) [1m[31mE AssertionError: [0m [1m[31mE Expected: (Test pipeline expected terminated in state: DONE and Expected checksum is 4fa761fb5c3341ec573d5d12c6ab75e3b2957a25)[0m [1m[31mE but: Expected checksum is 4fa761fb5c3341ec573d5d12c6ab75e3b2957a25 Actual checksum is 48bfa279d9728b298d79232610b09e5066fb9aa9[0m [1m[31mapache_beam/runners/dataflow/test_dataflow_runner.py[0m:70: AssertionError ------------------------------ Captured log call ------------------------------- [32mINFO [0m apache_beam.runners.portability.stager:stager.py:322 Copying Beam SDK "<https://ci-beam.apache.org/job/beam_PostCommit_Python39/ws/src/sdks/python/build/apache_beam-2.53.0.dev0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl"> to staging location. [32mINFO [0m apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:395 Pipeline has additional dependencies to be installed in SDK worker container, consider using the SDK container image pre-building workflow to avoid repetitive installations. Learn more on https://cloud.google.com/dataflow/docs/guides/using-custom-containers#prebuild [32mINFO [0m root:environments.py:314 Using provided Python SDK container image: gcr.io/cloud-dataflow/v1beta3/beam_python3.9_sdk:beam-master-20231205 [32mINFO [0m root:environments.py:321 Python SDK container image set to "gcr.io/cloud-dataflow/v1beta3/beam_python3.9_sdk:beam-master-20231205" for Docker environment [32mINFO [0m apache_beam.runners.portability.fn_api_runner.translations:translations.py:712 ==================== <function pack_combiners at 0x7fb68584ff70> ==================== [32mINFO [0m apache_beam.runners.portability.fn_api_runner.translations:translations.py:712 ==================== <function sort_stages at 0x7fb685850790> ==================== [32mINFO [0m apache_beam.runners.dataflow.internal.apiclient:apiclient.py:673 Starting GCS upload to gs://temp-storage-for-end-to-end-tests/staging-it/beamapp-jenkins-1206185959-712801-z3cd5bib.1701889199.712985/apache_beam-2.53.0.dev0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl... [32mINFO [0m apache_beam.runners.dataflow.internal.apiclient:apiclient.py:683 Completed GCS upload to gs://temp-storage-for-end-to-end-tests/staging-it/beamapp-jenkins-1206185959-712801-z3cd5bib.1701889199.712985/apache_beam-2.53.0.dev0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl in 0 seconds. [32mINFO [0m apache_beam.runners.dataflow.internal.apiclient:apiclient.py:673 Starting GCS upload to gs://temp-storage-for-end-to-end-tests/staging-it/beamapp-jenkins-1206185959-712801-z3cd5bib.1701889199.712985/pipeline.pb... [32mINFO [0m apache_beam.runners.dataflow.internal.apiclient:apiclient.py:683 Completed GCS upload to gs://temp-storage-for-end-to-end-tests/staging-it/beamapp-jenkins-1206185959-712801-z3cd5bib.1701889199.712985/pipeline.pb in 0 seconds. [33mWARNING [0m apache_beam.options.pipeline_options:pipeline_options.py:338 Unknown pipeline options received: --sleep_secs=20,--kms_key_name=projects/apache-beam-testing/locations/global/keyRings/beam-it/cryptoKeys/test. Ignore if flags are used for internal purposes. [33mWARNING [0m apache_beam.options.pipeline_options:pipeline_options.py:338 Unknown pipeline options received: --sleep_secs=20,--kms_key_name=projects/apache-beam-testing/locations/global/keyRings/beam-it/cryptoKeys/test. Ignore if flags are used for internal purposes. [32mINFO [0m apache_beam.runners.dataflow.internal.apiclient:apiclient.py:854 Create job: <Job clientRequestId: '20231206185959713898-1250' createTime: '2023-12-06T19:00:02.048160Z' currentStateTime: '1970-01-01T00:00:00Z' id: '2023-12-06_11_00_00-14818865919210450247' location: 'us-central1' name: 'beamapp-jenkins-1206185959-712801-z3cd5bib' projectId: 'apache-beam-testing' stageStates: [] startTime: '2023-12-06T19:00:02.048160Z' steps: [] tempFiles: [] type: TypeValueValuesEnum(JOB_TYPE_BATCH, 1)> [32mINFO [0m apache_beam.runners.dataflow.internal.apiclient:apiclient.py:856 Created job with id: [2023-12-06_11_00_00-14818865919210450247] [32mINFO [0m apache_beam.runners.dataflow.internal.apiclient:apiclient.py:857 Submitted job: 2023-12-06_11_00_00-14818865919210450247 [32mINFO [0m apache_beam.runners.dataflow.internal.apiclient:apiclient.py:858 To access the Dataflow monitoring console, please navigate to https://console.cloud.google.com/dataflow/jobs/us-central1/2023-12-06_11_00_00-14818865919210450247?project=apache-beam-testing [32mINFO [0m apache_beam.runners.dataflow.test_dataflow_runner:test_dataflow_runner.py:58 Console log: [32mINFO [0m apache_beam.runners.dataflow.test_dataflow_runner:test_dataflow_runner.py:59 https://console.cloud.google.com/dataflow/jobs/us-central1/2023-12-06_11_00_00-14818865919210450247?project=apache-beam-testing [32mINFO [0m apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:151 Job 2023-12-06_11_00_00-14818865919210450247 is in state JOB_STATE_RUNNING [32mINFO [0m apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:201 2023-12-06T19:00:05.121Z: JOB_MESSAGE_BASIC: Worker configuration: e2-standard-2 in us-central1-b. [32mINFO [0m apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:201 2023-12-06T19:00:08.378Z: JOB_MESSAGE_BASIC: Executing operation WriteTeamScoreSums/WriteToBigQuery/BigQueryBatchFileLoads/ImpulseEmptyPC/Impulse+WriteTeamScoreSums/WriteToBigQuery/BigQueryBatchFileLoads/ImpulseEmptyPC/FlatMap(<lambda at core.py:3774>)+WriteTeamScoreSums/WriteToBigQuery/BigQueryBatchFileLoads/ImpulseEmptyPC/Map(decode) [32mINFO [0m apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:201 2023-12-06T19:00:08.401Z: JOB_MESSAGE_BASIC: Executing operation ReadInputText/Read/Impulse+ReadInputText/Read/EmitSource+ref_AppliedPTransform_ReadInputText-Read-SDFBoundedSourceReader-ParDo-SDFBoundedSourceDoFn-_7/PairWithRestriction+ref_AppliedPTransform_ReadInputText-Read-SDFBoundedSourceReader-ParDo-SDFBoundedSourceDoFn-_7/SplitWithSizing [32mINFO [0m apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:201 2023-12-06T19:00:08.429Z: JOB_MESSAGE_BASIC: Executing operation WriteTeamScoreSums/WriteToBigQuery/BigQueryBatchFileLoads/ImpulseSingleElementPC/Impulse+WriteTeamScoreSums/WriteToBigQuery/BigQueryBatchFileLoads/ImpulseSingleElementPC/FlatMap(<lambda at core.py:3774>)+WriteTeamScoreSums/WriteToBigQuery/BigQueryBatchFileLoads/ImpulseSingleElementPC/Map(decode)+WriteTeamScoreSums/WriteToBigQuery/BigQueryBatchFileLoads/CopyJobNamePrefix+WriteTeamScoreSums/WriteToBigQuery/BigQueryBatchFileLoads/GenerateFilePrefix+WriteTeamScoreSums/WriteToBigQuery/BigQueryBatchFileLoads/LoadJobNamePrefix+WriteTeamScoreSums/WriteToBigQuery/BigQueryBatchFileLoads/SchemaModJobNamePrefix [32mINFO [0m apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:201 2023-12-06T19:00:08.441Z: JOB_MESSAGE_BASIC: Starting 1 workers in us-central1-b... [32mINFO [0m apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:201 2023-12-06T19:00:41.956Z: JOB_MESSAGE_BASIC: Your project already contains 100 Dataflow-created metric descriptors, so new user metrics of the form custom.googleapis.com/* will not be created. However, all user metrics are also available in the metric dataflow.googleapis.com/job/user_counter. If you rely on the custom metrics, you can delete old / unused metric descriptors. See https://developers.google.com/apis-explorer/#p/monitoring/v3/monitoring.projects.metricDescriptors.list and https://developers.google.com/apis-explorer/#p/monitoring/v3/monitoring.projects.metricDescriptors.delete [32mINFO [0m apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:201 2023-12-06T19:02:59.019Z: JOB_MESSAGE_BASIC: All workers have finished the startup processes and began to receive work requests. [32mINFO [0m apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:201 2023-12-06T19:02:59.258Z: JOB_MESSAGE_BASIC: Finished operation WriteTeamScoreSums/WriteToBigQuery/BigQueryBatchFileLoads/ImpulseEmptyPC/Impulse+WriteTeamScoreSums/WriteToBigQuery/BigQueryBatchFileLoads/ImpulseEmptyPC/FlatMap(<lambda at core.py:3774>)+WriteTeamScoreSums/WriteToBigQuery/BigQueryBatchFileLoads/ImpulseEmptyPC/Map(decode) [32mINFO [0m apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:201 2023-12-06T19:03:00.908Z: JOB_MESSAGE_BASIC: Finished operation ReadInputText/Read/Impulse+ReadInputText/Read/EmitSource+ref_AppliedPTransform_ReadInputText-Read-SDFBoundedSourceReader-ParDo-SDFBoundedSourceDoFn-_7/PairWithRestriction+ref_AppliedPTransform_ReadInputText-Read-SDFBoundedSourceReader-ParDo-SDFBoundedSourceDoFn-_7/SplitWithSizing [32mINFO [0m apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:201 2023-12-06T19:03:01.005Z: JOB_MESSAGE_BASIC: Executing operation HourlyTeamScore/ExtractAndSumScore/CombinePerKey(sum)/GroupByKey/Create [32mINFO [0m apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:201 2023-12-06T19:03:01.223Z: JOB_MESSAGE_BASIC: Finished operation WriteTeamScoreSums/WriteToBigQuery/BigQueryBatchFileLoads/ImpulseSingleElementPC/Impulse+WriteTeamScoreSums/WriteToBigQuery/BigQueryBatchFileLoads/ImpulseSingleElementPC/FlatMap(<lambda at core.py:3774>)+WriteTeamScoreSums/WriteToBigQuery/BigQueryBatchFileLoads/ImpulseSingleElementPC/Map(decode)+WriteTeamScoreSums/WriteToBigQuery/BigQueryBatchFileLoads/CopyJobNamePrefix+WriteTeamScoreSums/WriteToBigQuery/BigQueryBatchFileLoads/GenerateFilePrefix+WriteTeamScoreSums/WriteToBigQuery/BigQueryBatchFileLoads/LoadJobNamePrefix+WriteTeamScoreSums/WriteToBigQuery/BigQueryBatchFileLoads/SchemaModJobNamePrefix [32mINFO [0m apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:201 2023-12-06T19:03:01.341Z: JOB_MESSAGE_BASIC: Executing operation WriteTeamScoreSums/WriteToBigQuery/BigQueryBatchFileLoads/ParDo(TriggerCopyJobs)/ParDo(TriggerCopyJobs)/View-python_side_input0 [32mINFO [0m apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:201 2023-12-06T19:03:01.361Z: JOB_MESSAGE_BASIC: Executing operation WriteTeamScoreSums/WriteToBigQuery/BigQueryBatchFileLoads/WriteGroupedRecordsToFile/View-python_side_input0 [32mINFO [0m apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:201 2023-12-06T19:03:01.377Z: JOB_MESSAGE_BASIC: Executing operation WriteTeamScoreSums/WriteToBigQuery/BigQueryBatchFileLoads/ParDo(WriteRecordsToFile)/ParDo(WriteRecordsToFile)/View-python_side_input0 [32mINFO [0m apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:201 2023-12-06T19:03:01.386Z: JOB_MESSAGE_BASIC: Finished operation WriteTeamScoreSums/WriteToBigQuery/BigQueryBatchFileLoads/ParDo(TriggerCopyJobs)/ParDo(TriggerCopyJobs)/View-python_side_input0 [32mINFO [0m apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:201 2023-12-06T19:03:01.394Z: JOB_MESSAGE_BASIC: Executing operation WriteTeamScoreSums/WriteToBigQuery/BigQueryBatchFileLoads/TriggerLoadJobsWithTempTables/ParDo(TriggerLoadJobs)/View-python_side_input0 [32mINFO [0m apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:201 2023-12-06T19:03:01.406Z: JOB_MESSAGE_BASIC: Finished operation WriteTeamScoreSums/WriteToBigQuery/BigQueryBatchFileLoads/WriteGroupedRecordsToFile/View-python_side_input0 [32mINFO [0m apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:201 2023-12-06T19:03:01.415Z: JOB_MESSAGE_BASIC: Finished operation WriteTeamScoreSums/WriteToBigQuery/BigQueryBatchFileLoads/ParDo(WriteRecordsToFile)/ParDo(WriteRecordsToFile)/View-python_side_input0 [32mINFO [0m apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:201 2023-12-06T19:03:01.418Z: JOB_MESSAGE_BASIC: Executing operation WriteTeamScoreSums/WriteToBigQuery/BigQueryBatchFileLoads/TriggerLoadJobsWithoutTempTables/ParDo(TriggerLoadJobs)/View-python_side_input0 [32mINFO [0m apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:201 2023-12-06T19:03:01.434Z: JOB_MESSAGE_BASIC: Executing operation WriteTeamScoreSums/WriteToBigQuery/BigQueryBatchFileLoads/ParDo(UpdateDestinationSchema)/View-python_side_input0 [32mINFO [0m apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:201 2023-12-06T19:03:01.450Z: JOB_MESSAGE_BASIC: Finished operation WriteTeamScoreSums/WriteToBigQuery/BigQueryBatchFileLoads/TriggerLoadJobsWithoutTempTables/ParDo(TriggerLoadJobs)/View-python_side_input0 [32mINFO [0m apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:201 2023-12-06T19:03:01.473Z: JOB_MESSAGE_BASIC: Finished operation WriteTeamScoreSums/WriteToBigQuery/BigQueryBatchFileLoads/TriggerLoadJobsWithTempTables/ParDo(TriggerLoadJobs)/View-python_side_input0 [32mINFO [0m apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:201 2023-12-06T19:03:01.515Z: JOB_MESSAGE_BASIC: Finished operation HourlyTeamScore/ExtractAndSumScore/CombinePerKey(sum)/GroupByKey/Create [32mINFO [0m apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:201 2023-12-06T19:03:01.529Z: JOB_MESSAGE_BASIC: Finished operation WriteTeamScoreSums/WriteToBigQuery/BigQueryBatchFileLoads/ParDo(UpdateDestinationSchema)/View-python_side_input0 [32mINFO [0m apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:201 2023-12-06T19:03:01.605Z: JOB_MESSAGE_BASIC: Executing operation ref_AppliedPTransform_ReadInputText-Read-SDFBoundedSourceReader-ParDo-SDFBoundedSourceDoFn-_7/ProcessElementAndRestrictionWithSizing+HourlyTeamScore/ParseGameEventFn+HourlyTeamScore/FilterStartTime+HourlyTeamScore/FilterEndTime+HourlyTeamScore/AddEventTimestamps+HourlyTeamScore/FixedWindowsTeam+HourlyTeamScore/ExtractAndSumScore/Map(<lambda at hourly_team_score.py:142>)+HourlyTeamScore/ExtractAndSumScore/CombinePerKey(sum)/GroupByKey+HourlyTeamScore/ExtractAndSumScore/CombinePerKey(sum)/Combine/Partial+HourlyTeamScore/ExtractAndSumScore/CombinePerKey(sum)/GroupByKey/Reify+HourlyTeamScore/ExtractAndSumScore/CombinePerKey(sum)/GroupByKey/Write [1m[31mERROR [0m apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:205 2023-12-06T19:09:14.603Z: JOB_MESSAGE_ERROR: Data channel closed, unable to receive additional data from SDK sdk-0-0 [1m[31mERROR [0m apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:205 2023-12-06T19:09:26.964Z: JOB_MESSAGE_ERROR: SDK harness sdk-0-0 disconnected. This usually means that the process running the pipeline code has crashed. Inspect the Worker Logs and the Diagnostics tab to determine the cause of the crash. [1m[31mERROR [0m apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:205 2023-12-06T19:09:53.873Z: JOB_MESSAGE_ERROR: Data channel closed, unable to receive additional data from SDK sdk-0-0 [1m[31mERROR [0m apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:205 2023-12-06T19:09:54.490Z: JOB_MESSAGE_ERROR: Data channel closed, unable to receive additional data from SDK sdk-0-0 [32mINFO [0m apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:201 2023-12-06T19:11:28.636Z: JOB_MESSAGE_BASIC: Finished operation ref_AppliedPTransform_ReadInputText-Read-SDFBoundedSourceReader-ParDo-SDFBoundedSourceDoFn-_7/ProcessElementAndRestrictionWithSizing+HourlyTeamScore/ParseGameEventFn+HourlyTeamScore/FilterStartTime+HourlyTeamScore/FilterEndTime+HourlyTeamScore/AddEventTimestamps+HourlyTeamScore/FixedWindowsTeam+HourlyTeamScore/ExtractAndSumScore/Map(<lambda at hourly_team_score.py:142>)+HourlyTeamScore/ExtractAndSumScore/CombinePerKey(sum)/GroupByKey+HourlyTeamScore/ExtractAndSumScore/CombinePerKey(sum)/Combine/Partial+HourlyTeamScore/ExtractAndSumScore/CombinePerKey(sum)/GroupByKey/Reify+HourlyTeamScore/ExtractAndSumScore/CombinePerKey(sum)/GroupByKey/Write [32mINFO [0m apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:201 2023-12-06T19:11:28.682Z: JOB_MESSAGE_BASIC: Executing operation HourlyTeamScore/ExtractAndSumScore/CombinePerKey(sum)/GroupByKey/Close [32mINFO [0m apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:201 2023-12-06T19:11:29.610Z: JOB_MESSAGE_BASIC: Finished operation HourlyTeamScore/ExtractAndSumScore/CombinePerKey(sum)/GroupByKey/Close [32mINFO [0m apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:201 2023-12-06T19:11:29.650Z: JOB_MESSAGE_BASIC: Executing operation WriteTeamScoreSums/WriteToBigQuery/BigQueryBatchFileLoads/GroupShardedRows/Create [32mINFO [0m apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:201 2023-12-06T19:11:29.790Z: JOB_MESSAGE_BASIC: Finished operation WriteTeamScoreSums/WriteToBigQuery/BigQueryBatchFileLoads/GroupShardedRows/Create [32mINFO [0m apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:201 2023-12-06T19:11:29.867Z: JOB_MESSAGE_BASIC: Executing operation WriteTeamScoreSums/WriteToBigQuery/BigQueryBatchFileLoads/GroupFilesByTableDestinations/Create [32mINFO [0m apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:201 2023-12-06T19:11:30.002Z: JOB_MESSAGE_BASIC: Finished operation WriteTeamScoreSums/WriteToBigQuery/BigQueryBatchFileLoads/GroupFilesByTableDestinations/Create [32mINFO [0m apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:201 2023-12-06T19:11:30.081Z: JOB_MESSAGE_BASIC: Executing operation HourlyTeamScore/ExtractAndSumScore/CombinePerKey(sum)/GroupByKey/Read+HourlyTeamScore/ExtractAndSumScore/CombinePerKey(sum)/GroupByKey/GroupByWindow+HourlyTeamScore/ExtractAndSumScore/CombinePerKey(sum)/Combine+HourlyTeamScore/ExtractAndSumScore/CombinePerKey(sum)/Combine/Extract+TeamScoresDict+WriteTeamScoreSums/ConvertToRow+WriteTeamScoreSums/WriteToBigQuery/BigQueryBatchFileLoads/RewindowIntoGlobal+WriteTeamScoreSums/WriteToBigQuery/BigQueryBatchFileLoads/AppendDestination+WriteTeamScoreSums/WriteToBigQuery/BigQueryBatchFileLoads/ParDo(WriteRecordsToFile)/ParDo(WriteRecordsToFile)+WriteTeamScoreSums/WriteToBigQuery/BigQueryBatchFileLoads/IdentityWorkaround+WriteTeamScoreSums/WriteToBigQuery/BigQueryBatchFileLoads/GroupFilesByTableDestinations/Write+WriteTeamScoreSums/WriteToBigQuery/BigQueryBatchFileLoads/ParDo(_ShardDestinations)+WriteTeamScoreSums/WriteToBigQuery/BigQueryBatchFileLoads/GroupShardedRows/Write [32mINFO [0m apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:201 2023-12-06T19:11:35.191Z: JOB_MESSAGE_BASIC: Finished operation HourlyTeamScore/ExtractAndSumScore/CombinePerKey(sum)/GroupByKey/Read+HourlyTeamScore/ExtractAndSumScore/CombinePerKey(sum)/GroupByKey/GroupByWindow+HourlyTeamScore/ExtractAndSumScore/CombinePerKey(sum)/Combine+HourlyTeamScore/ExtractAndSumScore/CombinePerKey(sum)/Combine/Extract+TeamScoresDict+WriteTeamScoreSums/ConvertToRow+WriteTeamScoreSums/WriteToBigQuery/BigQueryBatchFileLoads/RewindowIntoGlobal+WriteTeamScoreSums/WriteToBigQuery/BigQueryBatchFileLoads/AppendDestination+WriteTeamScoreSums/WriteToBigQuery/BigQueryBatchFileLoads/ParDo(WriteRecordsToFile)/ParDo(WriteRecordsToFile)+WriteTeamScoreSums/WriteToBigQuery/BigQueryBatchFileLoads/IdentityWorkaround+WriteTeamScoreSums/WriteToBigQuery/BigQueryBatchFileLoads/GroupFilesByTableDestinations/Write+WriteTeamScoreSums/WriteToBigQuery/BigQueryBatchFileLoads/ParDo(_ShardDestinations)+WriteTeamScoreSums/WriteToBigQuery/BigQueryBatchFileLoads/GroupShardedRows/Write [32mINFO [0m apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:201 2023-12-06T19:11:35.238Z: JOB_MESSAGE_BASIC: Executing operation WriteTeamScoreSums/WriteToBigQuery/BigQueryBatchFileLoads/GroupShardedRows/Close [32mINFO [0m apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:201 2023-12-06T19:11:35.280Z: JOB_MESSAGE_BASIC: Finished operation WriteTeamScoreSums/WriteToBigQuery/BigQueryBatchFileLoads/GroupShardedRows/Close [32mINFO [0m apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:201 2023-12-06T19:11:35.327Z: JOB_MESSAGE_BASIC: Executing operation WriteTeamScoreSums/WriteToBigQuery/BigQueryBatchFileLoads/GroupShardedRows/Read+WriteTeamScoreSums/WriteToBigQuery/BigQueryBatchFileLoads/DropShardNumber+WriteTeamScoreSums/WriteToBigQuery/BigQueryBatchFileLoads/WriteGroupedRecordsToFile+WriteTeamScoreSums/WriteToBigQuery/BigQueryBatchFileLoads/DestinationFilesUnion/InputIdentity+WriteTeamScoreSums/WriteToBigQuery/BigQueryBatchFileLoads/IdentityWorkaround+WriteTeamScoreSums/WriteToBigQuery/BigQueryBatchFileLoads/GroupFilesByTableDestinations/Write [32mINFO [0m apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:201 2023-12-06T19:11:35.592Z: JOB_MESSAGE_BASIC: Finished operation WriteTeamScoreSums/WriteToBigQuery/BigQueryBatchFileLoads/GroupShardedRows/Read+WriteTeamScoreSums/WriteToBigQuery/BigQueryBatchFileLoads/DropShardNumber+WriteTeamScoreSums/WriteToBigQuery/BigQueryBatchFileLoads/WriteGroupedRecordsToFile+WriteTeamScoreSums/WriteToBigQuery/BigQueryBatchFileLoads/DestinationFilesUnion/InputIdentity+WriteTeamScoreSums/WriteToBigQuery/BigQueryBatchFileLoads/IdentityWorkaround+WriteTeamScoreSums/WriteToBigQuery/BigQueryBatchFileLoads/GroupFilesByTableDestinations/Write [32mINFO [0m apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:201 2023-12-06T19:11:35.635Z: JOB_MESSAGE_BASIC: Executing operation WriteTeamScoreSums/WriteToBigQuery/BigQueryBatchFileLoads/GroupFilesByTableDestinations/Close [32mINFO [0m apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:201 2023-12-06T19:11:36.372Z: JOB_MESSAGE_BASIC: Finished operation WriteTeamScoreSums/WriteToBigQuery/BigQueryBatchFileLoads/GroupFilesByTableDestinations/Close [32mINFO [0m apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:201 2023-12-06T19:11:36.413Z: JOB_MESSAGE_BASIC: Executing operation WriteTeamScoreSums/WriteToBigQuery/BigQueryBatchFileLoads/GroupFilesByTableDestinations/Read+WriteTeamScoreSums/WriteToBigQuery/BigQueryBatchFileLoads/ParDo(PartitionFiles)/ParDo(PartitionFiles)+WriteTeamScoreSums/WriteToBigQuery/BigQueryBatchFileLoads/TriggerLoadJobsWithTempTables/ParDo(TriggerLoadJobs)+WriteTeamScoreSums/WriteToBigQuery/BigQueryBatchFileLoads/Map(<lambda at bigquery_file_loads.py:1120>)+WriteTeamScoreSums/WriteToBigQuery/BigQueryBatchFileLoads/ParDo(UpdateDestinationSchema)+WriteTeamScoreSums/WriteToBigQuery/BigQueryBatchFileLoads/TriggerLoadJobsWithoutTempTables/ParDo(TriggerLoadJobs) [32mINFO [0m apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:201 2023-12-06T19:11:50.235Z: JOB_MESSAGE_BASIC: Finished operation WriteTeamScoreSums/WriteToBigQuery/BigQueryBatchFileLoads/GroupFilesByTableDestinations/Read+WriteTeamScoreSums/WriteToBigQuery/BigQueryBatchFileLoads/ParDo(PartitionFiles)/ParDo(PartitionFiles)+WriteTeamScoreSums/WriteToBigQuery/BigQueryBatchFileLoads/TriggerLoadJobsWithTempTables/ParDo(TriggerLoadJobs)+WriteTeamScoreSums/WriteToBigQuery/BigQueryBatchFileLoads/Map(<lambda at bigquery_file_loads.py:1120>)+WriteTeamScoreSums/WriteToBigQuery/BigQueryBatchFileLoads/ParDo(UpdateDestinationSchema)+WriteTeamScoreSums/WriteToBigQuery/BigQueryBatchFileLoads/TriggerLoadJobsWithoutTempTables/ParDo(TriggerLoadJobs) [32mINFO [0m apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:201 2023-12-06T19:11:50.375Z: JOB_MESSAGE_BASIC: Executing operation WriteTeamScoreSums/WriteToBigQuery/BigQueryBatchFileLoads/ParDo(TriggerCopyJobs)/ParDo(TriggerCopyJobs)/View-python_side_input1 [32mINFO [0m apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:201 2023-12-06T19:11:50.390Z: JOB_MESSAGE_BASIC: Executing operation WriteTeamScoreSums/WriteToBigQuery/BigQueryBatchFileLoads/Flatten [32mINFO [0m apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:201 2023-12-06T19:11:50.422Z: JOB_MESSAGE_BASIC: Finished operation WriteTeamScoreSums/WriteToBigQuery/BigQueryBatchFileLoads/ParDo(TriggerCopyJobs)/ParDo(TriggerCopyJobs)/View-python_side_input1 [32mINFO [0m apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:201 2023-12-06T19:11:50.459Z: JOB_MESSAGE_BASIC: Finished operation WriteTeamScoreSums/WriteToBigQuery/BigQueryBatchFileLoads/Flatten [32mINFO [0m apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:201 2023-12-06T19:11:50.496Z: JOB_MESSAGE_BASIC: Executing operation WriteTeamScoreSums/WriteToBigQuery/BigQueryBatchFileLoads/ParDo(TriggerCopyJobs)/ParDo(TriggerCopyJobs) [32mINFO [0m apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:201 2023-12-06T19:11:52.159Z: JOB_MESSAGE_BASIC: Finished operation WriteTeamScoreSums/WriteToBigQuery/BigQueryBatchFileLoads/ParDo(TriggerCopyJobs)/ParDo(TriggerCopyJobs) [32mINFO [0m apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:201 2023-12-06T19:11:52.239Z: JOB_MESSAGE_BASIC: Executing operation WriteTeamScoreSums/WriteToBigQuery/BigQueryBatchFileLoads/RemoveTempTables/AddUselessValue/View-python_side_input0 [32mINFO [0m apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:201 2023-12-06T19:11:52.283Z: JOB_MESSAGE_BASIC: Finished operation WriteTeamScoreSums/WriteToBigQuery/BigQueryBatchFileLoads/RemoveTempTables/AddUselessValue/View-python_side_input0 [32mINFO [0m apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:201 2023-12-06T19:11:52.357Z: JOB_MESSAGE_BASIC: Executing operation WriteTeamScoreSums/WriteToBigQuery/BigQueryBatchFileLoads/RemoveTempTables/DeduplicateTables/Create [32mINFO [0m apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:201 2023-12-06T19:11:52.528Z: JOB_MESSAGE_BASIC: Finished operation WriteTeamScoreSums/WriteToBigQuery/BigQueryBatchFileLoads/RemoveTempTables/DeduplicateTables/Create [32mINFO [0m apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:201 2023-12-06T19:11:52.602Z: JOB_MESSAGE_BASIC: Executing operation WriteTeamScoreSums/WriteToBigQuery/BigQueryBatchFileLoads/RemoveTempTables/AddUselessValue+WriteTeamScoreSums/WriteToBigQuery/BigQueryBatchFileLoads/RemoveTempTables/DeduplicateTables/Write [32mINFO [0m apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:201 2023-12-06T19:11:52.918Z: JOB_MESSAGE_BASIC: Finished operation WriteTeamScoreSums/WriteToBigQuery/BigQueryBatchFileLoads/RemoveTempTables/AddUselessValue+WriteTeamScoreSums/WriteToBigQuery/BigQueryBatchFileLoads/RemoveTempTables/DeduplicateTables/Write [32mINFO [0m apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:201 2023-12-06T19:11:52.956Z: JOB_MESSAGE_BASIC: Executing operation WriteTeamScoreSums/WriteToBigQuery/BigQueryBatchFileLoads/RemoveTempTables/DeduplicateTables/Close [32mINFO [0m apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:201 2023-12-06T19:11:52.998Z: JOB_MESSAGE_BASIC: Finished operation WriteTeamScoreSums/WriteToBigQuery/BigQueryBatchFileLoads/RemoveTempTables/DeduplicateTables/Close [32mINFO [0m apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:201 2023-12-06T19:11:53.039Z: JOB_MESSAGE_BASIC: Executing operation WriteTeamScoreSums/WriteToBigQuery/BigQueryBatchFileLoads/RemoveTempTables/DeduplicateTables/Read+WriteTeamScoreSums/WriteToBigQuery/BigQueryBatchFileLoads/RemoveTempTables/GetTableNames/Keys+WriteTeamScoreSums/WriteToBigQuery/BigQueryBatchFileLoads/RemoveTempTables/Delete [32mINFO [0m apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:201 2023-12-06T19:11:53.230Z: JOB_MESSAGE_BASIC: Finished operation WriteTeamScoreSums/WriteToBigQuery/BigQueryBatchFileLoads/RemoveTempTables/DeduplicateTables/Read+WriteTeamScoreSums/WriteToBigQuery/BigQueryBatchFileLoads/RemoveTempTables/GetTableNames/Keys+WriteTeamScoreSums/WriteToBigQuery/BigQueryBatchFileLoads/RemoveTempTables/Delete [32mINFO [0m apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:201 2023-12-06T19:11:53.371Z: JOB_MESSAGE_BASIC: Stopping worker pool... [32mINFO [0m apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:201 2023-12-06T19:14:38.168Z: JOB_MESSAGE_BASIC: Worker pool stopped. [32mINFO [0m apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:151 Job 2023-12-06_11_00_00-14818865919210450247 is in state JOB_STATE_DONE [32mINFO [0m apache_beam.io.gcp.tests.bigquery_matcher:bigquery_matcher.py:121 Attempting to perform query SELECT COUNT(*) FROM `apache-beam-testing.hourly_team_score_it_dataset1701889198da500b.leader_board` to BQ [32mINFO [0m apache_beam.io.gcp.tests.bigquery_matcher:bigquery_matcher.py:96 Read from given query (SELECT COUNT(*) FROM `apache-beam-testing.hourly_team_score_it_dataset1701889198da500b.leader_board`), total rows 1 [32mINFO [0m apache_beam.io.gcp.tests.bigquery_matcher:bigquery_matcher.py:101 Generate checksum: 48bfa279d9728b298d79232610b09e5066fb9aa9 [33m=============================== warnings summary ===============================[0m apache_beam/io/gcp/bigquery.py:2603 <https://ci-beam.apache.org/job/beam_PostCommit_Python39/ws/src/sdks/python/apache_beam/io/gcp/bigquery.py>:2603: DeprecationWarning: invalid escape sequence \# """Read data from BigQuery. apache_beam/io/gcp/bigquery_read_it_test.py::ReadTests::test_native_source <https://ci-beam.apache.org/job/beam_PostCommit_Python39/ws/src/sdks/python/apache_beam/io/gcp/bigquery_read_it_test.py>:170: BeamDeprecationWarning: BigQuerySource is deprecated since 2.25.0. Use ReadFromBigQuery instead. beam.io.BigQuerySource(query=self.query, use_standard_sql=True))) apache_beam/io/gcp/bigquery_test.py::PubSubBigQueryIT::test_file_loads apache_beam/io/gcp/bigquery_test.py::PubSubBigQueryIT::test_streaming_inserts apache_beam/examples/complete/game/hourly_team_score_it_test.py::HourlyTeamScoreIT::test_hourly_team_score_it apache_beam/examples/complete/game/game_stats_it_test.py::GameStatsIT::test_game_stats_it apache_beam/examples/complete/game/leader_board_it_test.py::LeaderBoardIT::test_leader_board_it <https://ci-beam.apache.org/job/beam_PostCommit_Python39/ws/src/sdks/python/apache_beam/io/gcp/tests/utils.py>:63: PendingDeprecationWarning: Client.dataset is deprecated and will be removed in a future version. Use a string like 'my_project.my_dataset' or a cloud.google.bigquery.DatasetReference object, instead. dataset_ref = client.dataset(unique_dataset_name, project=project) apache_beam/examples/dataframe/flight_delays_it_test.py::FlightDelaysTest::test_flight_delays <https://ci-beam.apache.org/job/beam_PostCommit_Python39/ws/src/sdks/python/apache_beam/examples/dataframe/flight_delays.py>:47: FutureWarning: The default value of numeric_only in DataFrame.mean is deprecated. In a future version, it will default to False. In addition, specifying 'numeric_only=None' is deprecated. Select only valid columns or specify the value of numeric_only to silence this warning. return airline_df[at_top_airports].mean() apache_beam/examples/cookbook/bigquery_tornadoes_it_test.py::BigqueryTornadoesIT::test_bigquery_tornadoes_it <https://ci-beam.apache.org/job/beam_PostCommit_Python39/ws/src/sdks/python/apache_beam/io/gcp/tests/utils.py>:100: PendingDeprecationWarning: Client.dataset is deprecated and will be removed in a future version. Use a string like 'my_project.my_dataset' or a cloud.google.bigquery.DatasetReference object, instead. table_ref = client.dataset(dataset_id).table(table_id) apache_beam/io/gcp/bigquery_read_it_test.py::ReadNewTypesTests::test_native_source <https://ci-beam.apache.org/job/beam_PostCommit_Python39/ws/src/sdks/python/apache_beam/io/gcp/bigquery_read_it_test.py>:706: BeamDeprecationWarning: BigQuerySource is deprecated since 2.25.0. Use ReadFromBigQuery instead. beam.io.BigQuerySource(query=self.query, use_standard_sql=True))) -- Docs: https://docs.pytest.org/en/stable/how-to/capture-warnings.html - generated xml file: <https://ci-beam.apache.org/job/beam_PostCommit_Python39/ws/src/sdks/python/pytest_postCommitIT-df-py39.xml> - [36m[1m=========================== short test summary info ============================[0m [31mFAILED[0m apache_beam/examples/complete/game/hourly_team_score_it_test.py::[1mHourlyTeamScoreIT::test_hourly_team_score_it[0m - AssertionError: Expected: (Test pipeline expected terminated in state: DONE and Expected checksum is 4fa761fb5c3341ec573d5d12c6ab75e3b2957a25) but: Expected checksum is 4fa761fb5c3341ec573d5d12c6ab75e3b2957a25 Actual checksum is 48bfa279d9728b298d79232610b09e5066fb9aa9 [31m====== [31m[1m1 failed[0m, [32m82 passed[0m, [33m50 skipped[0m, [33m10 warnings[0m[31m in 6073.68s (1:41:13)[0m[31m ======[0m > Task :sdks:python:test-suites:dataflow:py39:postCommitIT FAILED FAILURE: Build completed with 2 failures. 1: Task failed with an exception. ----------- * Where: Script '<https://ci-beam.apache.org/job/beam_PostCommit_Python39/ws/src/sdks/python/test-suites/direct/common.gradle'> line: 52 * What went wrong: Execution failed for task ':sdks:python:test-suites:direct:py39:postCommitIT'. > Process 'command 'sh'' finished with non-zero exit value 1 * Try: > Run with --stacktrace option to get the stack trace. > Run with --info or --debug option to get more log output. > Get more help at https://help.gradle.org. ============================================================================== 2: Task failed with an exception. ----------- * Where: Script '<https://ci-beam.apache.org/job/beam_PostCommit_Python39/ws/src/sdks/python/test-suites/dataflow/common.gradle'> line: 139 * What went wrong: Execution failed for task ':sdks:python:test-suites:dataflow:py39:postCommitIT'. > Process 'command 'sh'' finished with non-zero exit value 1 * Try: > Run with --stacktrace option to get the stack trace. > Run with --info or --debug option to get more log output. > Get more help at https://help.gradle.org. ============================================================================== Deprecated Gradle features were used in this build, making it incompatible with Gradle 9.0. You can use '--warning-mode all' to show the individual deprecation warnings and determine if they come from your own scripts or plugins. For more on this, please refer to https://docs.gradle.org/8.4/userguide/command_line_interface.html#sec:command_line_warnings in the Gradle documentation. BUILD FAILED in 1h 48m 37s 222 actionable tasks: 158 executed, 60 from cache, 4 up-to-date Publishing build scan... https://ge.apache.org/s/eqfvjsmqlncq6 Build step 'Invoke Gradle script' changed build result to FAILURE Build step 'Invoke Gradle script' marked build as failure --------------------------------------------------------------------- To unsubscribe, e-mail: builds-unsubscr...@beam.apache.org For additional commands, e-mail: builds-h...@beam.apache.org