beam-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From Apache Jenkins Server <jenk...@builds.apache.org>
Subject Build failed in Jenkins: beam_PostCommit_Python_ValidatesRunner_Dataflow #609
Date Tue, 09 Jan 2018 15:09:34 GMT
See <https://builds.apache.org/job/beam_PostCommit_Python_ValidatesRunner_Dataflow/609/display/redirect>

------------------------------------------
[...truncated 1.06 MB...]
          {
            "key": "fn", 
            "label": "Transform Function", 
            "namespace": "apache_beam.transforms.core.CallableWrapperDoFn", 
            "type": "STRING", 
            "value": "_merge_tagged_vals_under_key"
          }, 
          {
            "key": "fn", 
            "label": "Transform Function", 
            "namespace": "apache_beam.transforms.core.ParDo", 
            "shortValue": "CallableWrapperDoFn", 
            "type": "STRING", 
            "value": "apache_beam.transforms.core.CallableWrapperDoFn"
          }
        ], 
        "non_parallel_inputs": {}, 
        "output_info": [
          {
            "encoding": {
              "@type": "kind:windowed_value", 
              "component_encodings": [
                {
                  "@type": "FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/",

                  "component_encodings": [
                    {
                      "@type": "FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/",

                      "component_encodings": []
                    }, 
                    {
                      "@type": "FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/",

                      "component_encodings": []
                    }
                  ], 
                  "is_pair_like": true
                }, 
                {
                  "@type": "kind:global_window"
                }
              ], 
              "is_wrapper": true
            }, 
            "output_name": "out", 
            "user_name": "assert_that/Group/Map(_merge_tagged_vals_under_key).out"
          }
        ], 
        "parallel_input": {
          "@type": "OutputReference", 
          "output_name": "out", 
          "step_name": "s11"
        }, 
        "serialized_fn": "<string of 1380 bytes>", 
        "user_name": "assert_that/Group/Map(_merge_tagged_vals_under_key)"
      }
    }, 
    {
      "kind": "ParallelDo", 
      "name": "s13", 
      "properties": {
        "display_data": [
          {
            "key": "fn", 
            "label": "Transform Function", 
            "namespace": "apache_beam.transforms.core.CallableWrapperDoFn", 
            "type": "STRING", 
            "value": "<lambda>"
          }, 
          {
            "key": "fn", 
            "label": "Transform Function", 
            "namespace": "apache_beam.transforms.core.ParDo", 
            "shortValue": "CallableWrapperDoFn", 
            "type": "STRING", 
            "value": "apache_beam.transforms.core.CallableWrapperDoFn"
          }
        ], 
        "non_parallel_inputs": {}, 
        "output_info": [
          {
            "encoding": {
              "@type": "kind:windowed_value", 
              "component_encodings": [
                {
                  "@type": "FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/",

                  "component_encodings": [
                    {
                      "@type": "FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/",

                      "component_encodings": []
                    }, 
                    {
                      "@type": "FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/",

                      "component_encodings": []
                    }
                  ], 
                  "is_pair_like": true
                }, 
                {
                  "@type": "kind:global_window"
                }
              ], 
              "is_wrapper": true
            }, 
            "output_name": "out", 
            "user_name": "assert_that/Unkey.out"
          }
        ], 
        "parallel_input": {
          "@type": "OutputReference", 
          "output_name": "out", 
          "step_name": "s12"
        }, 
        "serialized_fn": "<string of 980 bytes>", 
        "user_name": "assert_that/Unkey"
      }
    }, 
    {
      "kind": "ParallelDo", 
      "name": "s14", 
      "properties": {
        "display_data": [
          {
            "key": "fn", 
            "label": "Transform Function", 
            "namespace": "apache_beam.transforms.core.CallableWrapperDoFn", 
            "type": "STRING", 
            "value": "_equal"
          }, 
          {
            "key": "fn", 
            "label": "Transform Function", 
            "namespace": "apache_beam.transforms.core.ParDo", 
            "shortValue": "CallableWrapperDoFn", 
            "type": "STRING", 
            "value": "apache_beam.transforms.core.CallableWrapperDoFn"
          }
        ], 
        "non_parallel_inputs": {}, 
        "output_info": [
          {
            "encoding": {
              "@type": "kind:windowed_value", 
              "component_encodings": [
                {
                  "@type": "FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/",

                  "component_encodings": [
                    {
                      "@type": "FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/",

                      "component_encodings": []
                    }, 
                    {
                      "@type": "FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/",

                      "component_encodings": []
                    }
                  ], 
                  "is_pair_like": true
                }, 
                {
                  "@type": "kind:global_window"
                }
              ], 
              "is_wrapper": true
            }, 
            "output_name": "out", 
            "user_name": "assert_that/Match.out"
          }
        ], 
        "parallel_input": {
          "@type": "OutputReference", 
          "output_name": "out", 
          "step_name": "s13"
        }, 
        "serialized_fn": "<string of 1160 bytes>", 
        "user_name": "assert_that/Match"
      }
    }
  ], 
  "type": "JOB_TYPE_BATCH"
}
root: INFO: Create job: <Job
 createTime: u'2018-01-09T15:07:15.482419Z'
 currentStateTime: u'1970-01-01T00:00:00Z'
 id: u'2018-01-09_07_07_14-1700221786659886619'
 location: u'us-central1'
 name: u'beamapp-jenkins-0109150708-229635'
 projectId: u'apache-beam-testing'
 stageStates: []
 steps: []
 tempFiles: []
 type: TypeValueValuesEnum(JOB_TYPE_BATCH, 1)>
root: INFO: Created job with id: [2018-01-09_07_07_14-1700221786659886619]
root: INFO: To access the Dataflow monitoring console, please navigate to https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2018-01-09_07_07_14-1700221786659886619?project=apache-beam-testing
root: INFO: Job 2018-01-09_07_07_14-1700221786659886619 is in state JOB_STATE_PENDING
root: INFO: 2018-01-09T15:07:14.980Z: JOB_MESSAGE_WARNING: (179866b4f13e1bcf): Setting the
number of workers (1) disables autoscaling for this job. If you are trying to cap autoscaling,
consider only setting max_num_workers. If you want to disable autoscaling altogether, the
documented way is to explicitly use autoscalingAlgorithm=NONE.
root: INFO: 2018-01-09T15:07:17.253Z: JOB_MESSAGE_DETAILED: (e013a3886e0637e1): Checking required
Cloud APIs are enabled.
root: INFO: 2018-01-09T15:07:18.581Z: JOB_MESSAGE_DETAILED: (e013a3886e06303f): Expanding
CoGroupByKey operations into optimizable parts.
root: INFO: 2018-01-09T15:07:18.604Z: JOB_MESSAGE_DEBUG: (e013a3886e063506): Combiner lifting
skipped for step assert_that/Group/GroupByKey: GroupByKey not followed by a combiner.
root: INFO: 2018-01-09T15:07:18.629Z: JOB_MESSAGE_DETAILED: (e013a3886e0632e0): Expanding
GroupByKey operations into optimizable parts.
root: INFO: 2018-01-09T15:07:18.659Z: JOB_MESSAGE_DETAILED: (e013a3886e0630ba): Lifting ValueCombiningMappingFns
into MergeBucketsMappingFns
root: INFO: 2018-01-09T15:07:18.689Z: JOB_MESSAGE_DEBUG: (e013a3886e063c6e): Annotating graph
with Autotuner information.
root: INFO: 2018-01-09T15:07:18.729Z: JOB_MESSAGE_DETAILED: (e013a3886e063822): Fusing adjacent
ParDo, Read, Write, and Flatten operations
root: INFO: 2018-01-09T15:07:18.760Z: JOB_MESSAGE_DETAILED: (e013a3886e0635fc): Unzipping
flatten s10 for input s8.out
root: INFO: 2018-01-09T15:07:18.777Z: JOB_MESSAGE_DETAILED: (e013a3886e0633d6): Fusing unzipped
copy of assert_that/Group/GroupByKey/Reify, through flatten assert_that/Group/Flatten, into
producer assert_that/Group/pair_with_0
root: INFO: 2018-01-09T15:07:18.808Z: JOB_MESSAGE_DETAILED: (e013a3886e0631b0): Fusing consumer
assert_that/Group/Map(_merge_tagged_vals_under_key) into assert_that/Group/GroupByKey/GroupByWindow
root: INFO: 2018-01-09T15:07:18.826Z: JOB_MESSAGE_DETAILED: (e013a3886e063f8a): Fusing consumer
assert_that/Match into assert_that/Unkey
root: INFO: 2018-01-09T15:07:18.857Z: JOB_MESSAGE_DETAILED: (e013a3886e063d64): Fusing consumer
assert_that/Unkey into assert_that/Group/Map(_merge_tagged_vals_under_key)
root: INFO: 2018-01-09T15:07:18.885Z: JOB_MESSAGE_DETAILED: (e013a3886e063b3e): Fusing consumer
assert_that/Group/GroupByKey/GroupByWindow into assert_that/Group/GroupByKey/Read
root: INFO: 2018-01-09T15:07:18.916Z: JOB_MESSAGE_DETAILED: (e013a3886e063918): Unzipping
flatten s10-u13 for input s11-reify-value0-c11
root: INFO: 2018-01-09T15:07:18.940Z: JOB_MESSAGE_DETAILED: (e013a3886e0636f2): Fusing unzipped
copy of assert_that/Group/GroupByKey/Write, through flatten s10-u13, into producer assert_that/Group/GroupByKey/Reify
root: INFO: 2018-01-09T15:07:18.961Z: JOB_MESSAGE_DETAILED: (e013a3886e0634cc): Fusing consumer
assert_that/Group/GroupByKey/Reify into assert_that/Group/pair_with_1
root: INFO: 2018-01-09T15:07:18.991Z: JOB_MESSAGE_DETAILED: (e013a3886e0632a6): Fusing consumer
assert_that/Group/GroupByKey/Write into assert_that/Group/GroupByKey/Reify
root: INFO: 2018-01-09T15:07:19.022Z: JOB_MESSAGE_DETAILED: (e013a3886e063080): Fusing consumer
assert_that/Group/pair_with_0 into assert_that/Create/Read
root: INFO: 2018-01-09T15:07:19.053Z: JOB_MESSAGE_DETAILED: (e013a3886e063e5a): Fusing consumer
assert_that/WindowInto(WindowIntoFn) into compute/compute
root: INFO: 2018-01-09T15:07:19.077Z: JOB_MESSAGE_DETAILED: (e013a3886e063c34): Fusing consumer
assert_that/Group/pair_with_1 into assert_that/ToVoidKey
root: INFO: 2018-01-09T15:07:19.101Z: JOB_MESSAGE_DETAILED: (e013a3886e063a0e): Fusing consumer
assert_that/ToVoidKey into assert_that/WindowInto(WindowIntoFn)
root: INFO: 2018-01-09T15:07:19.132Z: JOB_MESSAGE_DETAILED: (e013a3886e0637e8): Fusing consumer
compute/compute into start/Read
root: INFO: 2018-01-09T15:07:19.165Z: JOB_MESSAGE_DEBUG: (e013a3886e0635c2): Workflow config
is missing a default resource spec.
root: INFO: 2018-01-09T15:07:19.188Z: JOB_MESSAGE_DEBUG: (e013a3886e06339c): Adding StepResource
setup and teardown to workflow graph.
root: INFO: 2018-01-09T15:07:19.216Z: JOB_MESSAGE_DEBUG: (e013a3886e063176): Adding workflow
start and stop steps.
root: INFO: 2018-01-09T15:07:19.241Z: JOB_MESSAGE_DEBUG: (e013a3886e063f50): Assigning stage
ids.
root: INFO: 2018-01-09T15:07:19.380Z: JOB_MESSAGE_DEBUG: (2e3fdd27a082e49a): Executing wait
step start21
root: INFO: 2018-01-09T15:07:19.433Z: JOB_MESSAGE_BASIC: (8a2f5c4c260b15eb): Executing operation
side/Read
root: INFO: 2018-01-09T15:07:19.483Z: JOB_MESSAGE_DEBUG: (2e3fdd27a082e4c3): Value "side/Read.out"
materialized.
root: INFO: 2018-01-09T15:07:19.541Z: JOB_MESSAGE_BASIC: (8a2f5c4c260b1169): Executing operation
compute/_UnpickledSideInput(Read.out.0)
root: INFO: 2018-01-09T15:07:19.595Z: JOB_MESSAGE_DEBUG: (8a2f5c4c260b1ce7): Value "compute/_UnpickledSideInput(Read.out.0).output"
materialized.
root: INFO: 2018-01-09T15:07:19.653Z: JOB_MESSAGE_BASIC: (2e3fdd27a082e4ec): Executing operation
assert_that/Group/GroupByKey/Create
root: INFO: Job 2018-01-09_07_07_14-1700221786659886619 is in state JOB_STATE_RUNNING
root: INFO: 2018-01-09T15:07:19.685Z: JOB_MESSAGE_DEBUG: (e172506258715fcb): Starting worker
pool setup.
root: INFO: 2018-01-09T15:07:19.717Z: JOB_MESSAGE_BASIC: (e172506258715df9): Starting 1 workers
in us-central1-f...
root: INFO: 2018-01-09T15:07:19.772Z: JOB_MESSAGE_DEBUG: (2e3fdd27a082e53e): Value "assert_that/Group/GroupByKey/Session"
materialized.
root: INFO: 2018-01-09T15:07:19.823Z: JOB_MESSAGE_BASIC: (2e3fdd27a082e8bb): Executing operation
start/Read+compute/compute+assert_that/WindowInto(WindowIntoFn)+assert_that/ToVoidKey+assert_that/Group/pair_with_1+assert_that/Group/GroupByKey/Reify+assert_that/Group/GroupByKey/Write
root: INFO: 2018-01-09T15:07:19.850Z: JOB_MESSAGE_BASIC: (d8ac6b5f304f96fe): Executing operation
assert_that/Create/Read+assert_that/Group/pair_with_0+assert_that/Group/GroupByKey/Reify+assert_that/Group/GroupByKey/Write
root: INFO: 2018-01-09T15:07:26.226Z: JOB_MESSAGE_DETAILED: (26e8810822b82710): Autoscaling:
Raised the number of workers to 0 based on the rate of progress in the currently running step(s).
root: INFO: 2018-01-09T15:08:00.759Z: JOB_MESSAGE_ERROR: (26e8810822b82083): Startup of the
worker pool in zone us-central1-f failed to bring up any of the desired 1 workers. QUOTA_EXCEEDED:
Quota 'DISKS_TOTAL_GB' exceeded.  Limit: 21000.0 in region us-central1.
root: INFO: 2018-01-09T15:08:00.786Z: JOB_MESSAGE_ERROR: (26e8810822b820f9): Workflow failed.
root: INFO: 2018-01-09T15:08:00.911Z: JOB_MESSAGE_DETAILED: (e013a3886e063da5): Cleaning up.
root: INFO: 2018-01-09T15:08:00.963Z: JOB_MESSAGE_DEBUG: (e013a3886e063959): Starting worker
pool teardown.
root: INFO: 2018-01-09T15:08:00.989Z: JOB_MESSAGE_BASIC: (e013a3886e063733): Stopping worker
pool...
root: INFO: 2018-01-09T15:09:15.254Z: JOB_MESSAGE_BASIC: (e013a3886e063e9b): Worker pool stopped.
root: INFO: 2018-01-09T15:09:15.297Z: JOB_MESSAGE_DEBUG: (e013a3886e063a4f): Tearing down
pending resources...
root: INFO: Job 2018-01-09_07_07_14-1700221786659886619 is in state JOB_STATE_FAILED
--------------------- >> end captured logging << ---------------------

----------------------------------------------------------------------
Ran 15 tests in 510.367s

FAILED (errors=14)
Build step 'Execute shell' marked build as failure
Not sending mail to unregistered user ankurgoenka@gmail.com
Not sending mail to unregistered user jb@nanthrax.net

Mime
View raw message