[ https://issues.apache.org/jira/browse/BEAM-9523?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel ]
Andrew Pilloud updated BEAM-9523: --------------------------------- Status: Open (was: Triage Needed) > GROUP BY DOUBLE fails at runtime instead of planning time > --------------------------------------------------------- > > Key: BEAM-9523 > URL: https://issues.apache.org/jira/browse/BEAM-9523 > Project: Beam > Issue Type: Bug > Components: dsl-sql-zetasql > Reporter: Andrew Pilloud > Priority: Minor > Labels: zetasql-compliance > > This should fail earlier. > {code:java} > Mar 16, 2020 1:27:32 PM > cloud.dataflow.sql.ExecuteQueryServiceServer$SqlComplianceServiceImpl > executeQuery > INFO: Processing Sql statement: SELECT COUNT(a) FROM ( > SELECT a FROM (SELECT 1.2 a UNION ALL SELECT 2.3 UNION ALL SELECT 3.4) LIMIT > 1) > Mar 16, 2020 1:27:33 PM > com.google.zetasql.io.grpc.internal.SerializingExecutor run > SEVERE: Exception while executing runnable > com.google.zetasql.io.grpc.internal.ServerImpl$JumpToApplicationThreadServerStreamListener$1HalfClosed@7946abe5 > java.lang.IllegalStateException: the keyCoder of a GroupByKey must be > deterministic > at org.apache.beam.sdk.transforms.GroupByKey.expand(GroupByKey.java:234) > at org.apache.beam.sdk.transforms.GroupByKey.expand(GroupByKey.java:110) > at org.apache.beam.sdk.Pipeline.applyInternal(Pipeline.java:542) > at org.apache.beam.sdk.Pipeline.applyTransform(Pipeline.java:493) > at org.apache.beam.sdk.values.PCollection.apply(PCollection.java:368) > at > org.apache.beam.sdk.transforms.join.CoGroupByKey.expand(CoGroupByKey.java:118) > at > org.apache.beam.sdk.transforms.join.CoGroupByKey.expand(CoGroupByKey.java:71) > at org.apache.beam.sdk.Pipeline.applyInternal(Pipeline.java:542) > at org.apache.beam.sdk.Pipeline.applyTransform(Pipeline.java:476) > at > org.apache.beam.sdk.transforms.join.KeyedPCollectionTuple.apply(KeyedPCollectionTuple.java:108) > at > org.apache.beam.sdk.extensions.sql.impl.rel.BeamSetOperatorRelBase.expand(BeamSetOperatorRelBase.java:96) > at > org.apache.beam.sdk.extensions.sql.impl.rel.BeamSetOperatorRelBase.expand(BeamSetOperatorRelBase.java:41) > at org.apache.beam.sdk.Pipeline.applyInternal(Pipeline.java:542) > at org.apache.beam.sdk.Pipeline.applyTransform(Pipeline.java:493) > at > org.apache.beam.sdk.extensions.sql.impl.rel.BeamSqlRelUtils.toPCollection(BeamSqlRelUtils.java:69) > at > org.apache.beam.sdk.extensions.sql.impl.rel.BeamSqlRelUtils.lambda$buildPCollectionList$0(BeamSqlRelUtils.java:50) > at > java.util.stream.ReferencePipeline$3$1.accept(ReferencePipeline.java:193) > at java.util.Iterator.forEachRemaining(Iterator.java:116) > at > java.util.Spliterators$IteratorSpliterator.forEachRemaining(Spliterators.java:1801) > at java.util.stream.AbstractPipeline.copyInto(AbstractPipeline.java:481) > at > java.util.stream.AbstractPipeline.wrapAndCopyInto(AbstractPipeline.java:471) > at > java.util.stream.ReduceOps$ReduceOp.evaluateSequential(ReduceOps.java:708) > at java.util.stream.AbstractPipeline.evaluate(AbstractPipeline.java:234) > at > java.util.stream.ReferencePipeline.collect(ReferencePipeline.java:499) > at > org.apache.beam.sdk.extensions.sql.impl.rel.BeamSqlRelUtils.buildPCollectionList(BeamSqlRelUtils.java:51) > at > org.apache.beam.sdk.extensions.sql.impl.rel.BeamSqlRelUtils.toPCollection(BeamSqlRelUtils.java:67) > at > org.apache.beam.sdk.extensions.sql.impl.rel.BeamSqlRelUtils.lambda$buildPCollectionList$0(BeamSqlRelUtils.java:50) > at > java.util.stream.ReferencePipeline$3$1.accept(ReferencePipeline.java:193) > at java.util.Iterator.forEachRemaining(Iterator.java:116) > at > java.util.Spliterators$IteratorSpliterator.forEachRemaining(Spliterators.java:1801) > at java.util.stream.AbstractPipeline.copyInto(AbstractPipeline.java:481) > at > java.util.stream.AbstractPipeline.wrapAndCopyInto(AbstractPipeline.java:471) > at > java.util.stream.ReduceOps$ReduceOp.evaluateSequential(ReduceOps.java:708) > at java.util.stream.AbstractPipeline.evaluate(AbstractPipeline.java:234) > at > java.util.stream.ReferencePipeline.collect(ReferencePipeline.java:499) > at > org.apache.beam.sdk.extensions.sql.impl.rel.BeamSqlRelUtils.buildPCollectionList(BeamSqlRelUtils.java:51) > at > org.apache.beam.sdk.extensions.sql.impl.rel.BeamSqlRelUtils.toPCollection(BeamSqlRelUtils.java:67) > at > org.apache.beam.sdk.extensions.sql.impl.rel.BeamSqlRelUtils.lambda$buildPCollectionList$0(BeamSqlRelUtils.java:50) > at > java.util.stream.ReferencePipeline$3$1.accept(ReferencePipeline.java:193) > at java.util.Iterator.forEachRemaining(Iterator.java:116) > at > java.util.Spliterators$IteratorSpliterator.forEachRemaining(Spliterators.java:1801) > at java.util.stream.AbstractPipeline.copyInto(AbstractPipeline.java:481) > at > java.util.stream.AbstractPipeline.wrapAndCopyInto(AbstractPipeline.java:471) > at > java.util.stream.ReduceOps$ReduceOp.evaluateSequential(ReduceOps.java:708) > at java.util.stream.AbstractPipeline.evaluate(AbstractPipeline.java:234) > at > java.util.stream.ReferencePipeline.collect(ReferencePipeline.java:499) > at > org.apache.beam.sdk.extensions.sql.impl.rel.BeamSqlRelUtils.buildPCollectionList(BeamSqlRelUtils.java:51) > at > org.apache.beam.sdk.extensions.sql.impl.rel.BeamSqlRelUtils.toPCollection(BeamSqlRelUtils.java:67) > at > org.apache.beam.sdk.extensions.sql.impl.rel.BeamSqlRelUtils.lambda$buildPCollectionList$0(BeamSqlRelUtils.java:50) > at > java.util.stream.ReferencePipeline$3$1.accept(ReferencePipeline.java:193) > at java.util.Iterator.forEachRemaining(Iterator.java:116) > at > java.util.Spliterators$IteratorSpliterator.forEachRemaining(Spliterators.java:1801) > at java.util.stream.AbstractPipeline.copyInto(AbstractPipeline.java:481) > at > java.util.stream.AbstractPipeline.wrapAndCopyInto(AbstractPipeline.java:471) > at > java.util.stream.ReduceOps$ReduceOp.evaluateSequential(ReduceOps.java:708) > at java.util.stream.AbstractPipeline.evaluate(AbstractPipeline.java:234) > at > java.util.stream.ReferencePipeline.collect(ReferencePipeline.java:499) > at > org.apache.beam.sdk.extensions.sql.impl.rel.BeamSqlRelUtils.buildPCollectionList(BeamSqlRelUtils.java:51) > at > org.apache.beam.sdk.extensions.sql.impl.rel.BeamSqlRelUtils.toPCollection(BeamSqlRelUtils.java:67) > at > org.apache.beam.sdk.extensions.sql.impl.rel.BeamSqlRelUtils.toPCollection(BeamSqlRelUtils.java:39) > at > org.apache.beam.sdk.extensions.sql.impl.rel.BeamEnumerableConverter.runCollector(BeamEnumerableConverter.java:199) > at > org.apache.beam.sdk.extensions.sql.impl.rel.BeamEnumerableConverter.collectRows(BeamEnumerableConverter.java:218) > at > org.apache.beam.sdk.extensions.sql.impl.rel.BeamEnumerableConverter.toRowList(BeamEnumerableConverter.java:150) > at > org.apache.beam.sdk.extensions.sql.impl.rel.BeamEnumerableConverter.toRowList(BeamEnumerableConverter.java:127) > at > cloud.dataflow.sql.ExecuteQueryServiceServer$SqlComplianceServiceImpl.executeQuery(ExecuteQueryServiceServer.java:244) > at > com.google.zetasql.testing.SqlComplianceServiceGrpc$MethodHandlers.invoke(SqlComplianceServiceGrpc.java:423) > at > com.google.zetasql.io.grpc.stub.ServerCalls$UnaryServerCallHandler$UnaryServerCallListener.onHalfClose(ServerCalls.java:171) > at > com.google.zetasql.io.grpc.internal.ServerCallImpl$ServerStreamListenerImpl.halfClosed(ServerCallImpl.java:283) > at > com.google.zetasql.io.grpc.internal.ServerImpl$JumpToApplicationThreadServerStreamListener$1HalfClosed.runInContext(ServerImpl.java:711) > at > com.google.zetasql.io.grpc.internal.ContextRunnable.run(ContextRunnable.java:37) > at > com.google.zetasql.io.grpc.internal.SerializingExecutor.run(SerializingExecutor.java:123) > at > java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) > at > java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) > at java.lang.Thread.run(Thread.java:748) > Caused by: org.apache.beam.sdk.coders.Coder$NonDeterministicException: > SchemaCoder<Schema: Fields: > Field{name=a, description=, type=FieldType{typeName=DOUBLE, nullable=false, > logicalType=null, collectionElementType=null, mapKeyType=null, > mapValueType=null, rowSchema=null, metadata={}}} > UUID: 7cca0c59-4f8d-4218-8ec8-57afc3eba1ac delegateCoder: > org.apache.beam.sdk.coders.Coder$ByteBuddy$5jomCBlC@7869652f is not > deterministic because: > All fields must have deterministic encoding > at org.apache.beam.sdk.coders.Coder.verifyDeterministic(Coder.java:196) > at > org.apache.beam.sdk.schemas.SchemaCoder.verifyDeterministic(SchemaCoder.java:192) > at > org.apache.beam.sdk.schemas.SchemaCoder.verifyDeterministic(SchemaCoder.java:180) > at org.apache.beam.sdk.transforms.GroupByKey.expand(GroupByKey.java:232) > ... 73 more > Caused by: org.apache.beam.sdk.coders.Coder$NonDeterministicException: > DoubleCoder is not deterministic because: > Floating point encodings are not guaranteed to be deterministic. > at > org.apache.beam.sdk.coders.DoubleCoder.verifyDeterministic(DoubleCoder.java:71) > at org.apache.beam.sdk.coders.Coder.verifyDeterministic(Coder.java:194) > ... 76 more > {code} -- This message was sent by Atlassian Jira (v8.3.4#803005)