yangjiandan commented on PR #5262:
URL: https://github.com/apache/hive/pull/5262#issuecomment-2792147744
@deniskuzZ current master branch may have problem.
when I run the following cmd under master branch with any code changing, it
throws exception.
my command:
```
mvn test -Dtest=TestMiniLlapLocalCliDriver
-Dqfile=stddev_variance_with_double_decimal_test.q -Dtest.output.overwrite=true
```
Exception:
```
[ERROR] Failures:
[ERROR] TestMiniLlapLocalCliDriver.testCliDriver:62 Client execution
failed with error code = 40000
running
SELECT account_id,
STDDEV(interest_paid) AS sdev
FROM (
SELECT 10 AS account_id, CAST(23.79 AS DOUBLE) interest_paid
UNION ALL SELECT 10, 23.79
UNION ALL SELECT 10, 23.79
UNION ALL SELECT 11, 64.34
UNION ALL SELECT 11, 64.34
UNION ALL SELECT 11, 64.34
) tempDataset
GROUP BY account_id
fname=stddev_variance_with_double_decimal_test.q
See ./ql/target/tmp/log/hive.log or ./itests/qtest/target/tmp/log/hive.log,
or check ./ql/target/surefire-reports or
./itests/qtest/target/surefire-reports/ for specific test cases logs.
org.apache.hadoop.hive.ql.parse.SemanticException: Line 0:-1 Not yet
supported place for UDAF 'sum'
at
org.apache.hadoop.hive.ql.parse.type.TypeCheckProcFactory$DefaultExprProcessor.validateUDF(TypeCheckProcFactory.java:821)
at
org.apache.hadoop.hive.ql.parse.type.TypeCheckProcFactory$DefaultExprProcessor.getXpathOrFuncExprNodeDesc(TypeCheckProcFactory.java:968)
at
org.apache.hadoop.hive.ql.parse.type.TypeCheckProcFactory$DefaultExprProcessor.process(TypeCheckProcFactory.java:1481)
at
org.apache.hadoop.hive.ql.lib.CostLessRuleDispatcher.dispatch(CostLessRuleDispatcher.java:66)
at
org.apache.hadoop.hive.ql.lib.DefaultGraphWalker.dispatchAndReturn(DefaultGraphWalker.java:105)
at
org.apache.hadoop.hive.ql.lib.DefaultGraphWalker.dispatch(DefaultGraphWalker.java:89)
at
org.apache.hadoop.hive.ql.lib.ExpressionWalker.walk(ExpressionWalker.java:101)
at
org.apache.hadoop.hive.ql.lib.DefaultGraphWalker.startWalking(DefaultGraphWalker.java:120)
at
org.apache.hadoop.hive.ql.parse.type.TypeCheckProcFactory.genExprNode(TypeCheckProcFactory.java:231)
at
org.apache.hadoop.hive.ql.parse.type.ExprNodeTypeCheck.genExprNode(ExprNodeTypeCheck.java:49)
at
org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.genAllExprNodeDesc(SemanticAnalyzer.java:13609)
at
org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.genExprNodeDesc(SemanticAnalyzer.java:13564)
at
org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.genExprNodeDesc(SemanticAnalyzer.java:13532)
at
org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.genExprNodeDesc(SemanticAnalyzer.java:13526)
at
org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.genGroupByPlanMapGroupByOperator(SemanticAnalyzer.java:5863)
at
org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.genGroupByPlanMapAggrNoSkew(SemanticAnalyzer.java:6805)
at
org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.genBodyPlan(SemanticAnalyzer.java:11485)
at
org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.genPlan(SemanticAnalyzer.java:12441)
at
org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.genPlan(SemanticAnalyzer.java:12307)
at
org.apache.hadoop.hive.ql.parse.CalcitePlanner.genOPTree(CalcitePlanner.java:653)
at
org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.analyzeInternal(SemanticAnalyzer.java:13180)
at
org.apache.hadoop.hive.ql.parse.CalcitePlanner.analyzeInternal(CalcitePlanner.java:479)
at
org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer.analyze(BaseSemanticAnalyzer.java:336)
at org.apache.hadoop.hive.ql.Compiler.analyze(Compiler.java:224)
at org.apache.hadoop.hive.ql.Compiler.compile(Compiler.java:109)
at org.apache.hadoop.hive.ql.Driver.compile(Driver.java:498)
at org.apache.hadoop.hive.ql.Driver.compileInternal(Driver.java:450)
at
org.apache.hadoop.hive.ql.Driver.compileAndRespond(Driver.java:414)
at
org.apache.hadoop.hive.ql.Driver.compileAndRespond(Driver.java:408)
at
org.apache.hadoop.hive.ql.reexec.ReExecDriver.compileAndRespond(ReExecDriver.java:126)
at
org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:234)
at
org.apache.hadoop.hive.cli.CliDriver.processLocalCmd(CliDriver.java:257)
at
org.apache.hadoop.hive.cli.CliDriver.processCmd1(CliDriver.java:201)
at
org.apache.hadoop.hive.cli.CliDriver.processCmd(CliDriver.java:127)
at
org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:425)
at
org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:356)
at
org.apache.hadoop.hive.ql.QTestUtil.executeClientInternal(QTestUtil.java:746)
at
org.apache.hadoop.hive.ql.QTestUtil.executeClient(QTestUtil.java:716)
at
org.apache.hadoop.hive.cli.control.CoreCliDriver.runTest(CoreCliDriver.java:116)
at
org.apache.hadoop.hive.cli.control.CliAdapter.runTest(CliAdapter.java:157)
at
org.apache.hadoop.hive.cli.TestMiniLlapLocalCliDriver.testCliDriver(TestMiniLlapLocalCliDriver.java:62)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at
org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:59)
at
org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
at
org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:56)
at
org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17)
at
org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:135)
at org.junit.runners.ParentRunner$3.evaluate(ParentRunner.java:306)
at
org.junit.runners.BlockJUnit4ClassRunner$1.evaluate(BlockJUnit4ClassRunner.java:100)
at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:366)
at
org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:103)
at
org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:63)
at org.junit.runners.ParentRunner$4.run(ParentRunner.java:331)
at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:79)
at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:329)
at org.junit.runners.ParentRunner.access$100(ParentRunner.java:66)
at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:293)
at org.junit.runners.ParentRunner.run(ParentRunner.java:413)
at org.junit.runners.Suite.runChild(Suite.java:128)
at org.junit.runners.Suite.runChild(Suite.java:27)
at org.junit.runners.ParentRunner$4.run(ParentRunner.java:331)
at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:79)
at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:329)
at org.junit.runners.ParentRunner.access$100(ParentRunner.java:66)
at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:293)
at
org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:95)
at org.junit.rules.RunRules.evaluate(RunRules.java:20)
at org.junit.runners.ParentRunner$3.evaluate(ParentRunner.java:306)
at org.junit.runners.ParentRunner.run(ParentRunner.java:413)
at
org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:316)
at
org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:240)
at
org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:214)
at
org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:155)
at
org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:385)
at
org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:162)
at
org.apache.maven.surefire.booter.ForkedBooter.run(ForkedBooter.java:507)
at
org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:495)
[INFO]
[ERROR] Tests run: 1, Failures: 1, Errors: 0, Skipped: 0
```
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]