svn commit: r1083919 - in /websites/production/hive/content/javadocs/r4.0.0-beta-1: ./ api/ api/org/ api/org/apache/ api/org/apache/hadoop/ api/org/apache/hadoop/fs/ api/org/apache/hadoop/fs/class-use
Author: zabetak Date: Mon Aug 14 18:20:29 2023 New Revision: 1083919 Log: Hive 4.0.0-beta-1 release [This commit notification would consist of 7832 parts, which exceeds the limit of 50 ones, so it was shortened to the summary.]
[hive] branch master updated: HIVE-27554: added control to JDBCBrowser client URL (#4537) (Henri Biestro)
This is an automated email from the ASF dual-hosted git repository. ngangam pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/hive.git The following commit(s) were added to refs/heads/master by this push: new 7abeb1df463 HIVE-27554: added control to JDBCBrowser client URL (#4537) (Henri Biestro) 7abeb1df463 is described below commit 7abeb1df463cc389f668172e7cf3bb772799858a Author: Henrib AuthorDate: Mon Aug 14 16:48:46 2023 +0200 HIVE-27554: added control to JDBCBrowser client URL (#4537) (Henri Biestro) * HIVE-27554: added control to JDBCBrowser client URL * HIVE-27554: moved control in redirect strategy; - simplified check using uri properties (scheme, absolute); - cleaned up imports; Simplify code; * Update TestSSOControl.java * Update HiveJdbcSamlRedirectStrategy.java --- .../hive/jdbc/saml/HiveJdbcBrowserClient.java | 8 ++-- .../jdbc/saml/HiveJdbcSamlRedirectStrategy.java| 26 +++ .../apache/hive/jdbc/saml/IJdbcBrowserClient.java | 5 --- .../org/apache/hive/jdbc/saml/TestSSOControl.java | 51 ++ 4 files changed, 81 insertions(+), 9 deletions(-) diff --git a/jdbc/src/java/org/apache/hive/jdbc/saml/HiveJdbcBrowserClient.java b/jdbc/src/java/org/apache/hive/jdbc/saml/HiveJdbcBrowserClient.java index 146bc63a5ec..fd8ee895509 100644 --- a/jdbc/src/java/org/apache/hive/jdbc/saml/HiveJdbcBrowserClient.java +++ b/jdbc/src/java/org/apache/hive/jdbc/saml/HiveJdbcBrowserClient.java @@ -203,7 +203,6 @@ public class HiveJdbcBrowserClient implements IJdbcBrowserClient { @VisibleForTesting protected void openBrowserWindow() throws HiveJdbcBrowserException { URI ssoUri = clientContext.getSsoUri(); -Preconditions.checkNotNull(ssoUri, "SSO Url is null"); try { if (Desktop.isDesktopSupported() && Desktop.getDesktop() .isSupported(Action.BROWSE)) { @@ -212,18 +211,19 @@ public class HiveJdbcBrowserClient implements IJdbcBrowserClient { LOG.info( "Desktop mode is not supported. Attempting to use OS " + "commands to open the default browser"); +String ssoUriStr = ssoUri.toString(); //Desktop is not supported, lets try to open the browser process OsType os = getOperatingSystem(); switch (os) { case WINDOWS: Runtime.getRuntime() -.exec("rundll32 url.dll,FileProtocolHandler " + ssoUri.toString()); +.exec("rundll32 url.dll,FileProtocolHandler " + ssoUriStr); break; case MAC: -Runtime.getRuntime().exec("open " + ssoUri.toString()); +Runtime.getRuntime().exec("open " + ssoUriStr); break; case LINUX: -Runtime.getRuntime().exec("xdg-open " + ssoUri.toString()); +Runtime.getRuntime().exec("xdg-open " + ssoUriStr); break; case UNKNOWN: throw new HiveJdbcBrowserException( diff --git a/jdbc/src/java/org/apache/hive/jdbc/saml/HiveJdbcSamlRedirectStrategy.java b/jdbc/src/java/org/apache/hive/jdbc/saml/HiveJdbcSamlRedirectStrategy.java index 40e057570e3..c5ce4eeea5e 100644 --- a/jdbc/src/java/org/apache/hive/jdbc/saml/HiveJdbcSamlRedirectStrategy.java +++ b/jdbc/src/java/org/apache/hive/jdbc/saml/HiveJdbcSamlRedirectStrategy.java @@ -62,4 +62,30 @@ public class HiveJdbcSamlRedirectStrategy extends DefaultRedirectStrategy { } return super.isRedirected(request, response, context); } + + @Override + public URI getLocationURI(HttpRequest request, HttpResponse response, HttpContext context) throws ProtocolException { +// add our own check to super-call +return checkSsoUri(super.getLocationURI(request, response, context)); + } + + /** + * Checks that the URI used to redirect SSO is valid. + * @param uri the uri to validate + * @return the uri + * @throws ProtocolException if uri is null or not http(s) or not absolute + */ + static URI checkSsoUri(URI uri) throws ProtocolException { +if (uri == null) { + throw new ProtocolException("SSO Url is null"); +} +final String scheme = uri.getScheme(); +// require https or https and absolute +final boolean valid = ("http".equalsIgnoreCase(scheme) || "https".equalsIgnoreCase(scheme)) + && uri.isAbsolute(); +if (!valid) { + throw new ProtocolException("SSO Url "+uri.toString()+ "is invalid"); +} +return uri; + } } diff --git a/jdbc/src/java/org/apache/hive/jdbc/saml/IJdbcBrowserClient.java b/jdbc/src/java/org/apache/hive/jdbc/saml/IJdbcBrowserClient.java index a6125838300..c1d7b9f9ba1 100644 --- a/jdbc/src/java/org/apache/hive/jdbc/saml/IJdbcBrowserClient.java +++ b/jdbc/src/java/org/apache/hive/jdbc/saml/IJdbcBrowserClient.java @@ -21,12 +21,7 @@ package org.apache.hive.jdbc.saml; import com.google.common.base.Preconditions; import
svn commit: r63446 - /dev/hive/hive-4.0.0-beta-1/ /release/hive/hive-4.0.0-beta-1/
Author: zabetak Date: Mon Aug 14 13:16:25 2023 New Revision: 63446 Log: Move hive-4.0.0-beta-1 release from dev to release Added: release/hive/hive-4.0.0-beta-1/ - copied from r63445, dev/hive/hive-4.0.0-beta-1/ Removed: dev/hive/hive-4.0.0-beta-1/
svn commit: r63445 - /dev/hive/hive-4.0.0-beta-1/
Author: zabetak Date: Mon Aug 14 12:50:51 2023 New Revision: 63445 Log: Hive 4.0.0-beta-1 release Added: dev/hive/hive-4.0.0-beta-1/ dev/hive/hive-4.0.0-beta-1/apache-hive-4.0.0-beta-1-bin.tar.gz (with props) dev/hive/hive-4.0.0-beta-1/apache-hive-4.0.0-beta-1-bin.tar.gz.asc dev/hive/hive-4.0.0-beta-1/apache-hive-4.0.0-beta-1-bin.tar.gz.sha256 dev/hive/hive-4.0.0-beta-1/apache-hive-4.0.0-beta-1-src.tar.gz (with props) dev/hive/hive-4.0.0-beta-1/apache-hive-4.0.0-beta-1-src.tar.gz.asc dev/hive/hive-4.0.0-beta-1/apache-hive-4.0.0-beta-1-src.tar.gz.sha256 Added: dev/hive/hive-4.0.0-beta-1/apache-hive-4.0.0-beta-1-bin.tar.gz == Binary file - no diff available. Propchange: dev/hive/hive-4.0.0-beta-1/apache-hive-4.0.0-beta-1-bin.tar.gz -- svn:mime-type = application/octet-stream Added: dev/hive/hive-4.0.0-beta-1/apache-hive-4.0.0-beta-1-bin.tar.gz.asc == --- dev/hive/hive-4.0.0-beta-1/apache-hive-4.0.0-beta-1-bin.tar.gz.asc (added) +++ dev/hive/hive-4.0.0-beta-1/apache-hive-4.0.0-beta-1-bin.tar.gz.asc Mon Aug 14 12:50:51 2023 @@ -0,0 +1,16 @@ +-BEGIN PGP SIGNATURE- + +iQIzBAABCgAdFiEEBHSVd/2TRnS5zUXF13wzg/GSdXAFAmTQ17MACgkQ13wzg/GS +dXDRIg//dCY71SrvDNWHNjYhJPb8AaUfYKwgB0Z7T/Eubbz22KLKzg80ArjPTndE +xAA13r/mkqT+4wsmvh2RAD+pMfNvmDcg9DytyeXiJw0aaGhx2afwon2UNYNBpRCN +YYbVbKoyrjuHAoPixgizc5Jq6+QqLrlIHNdQEe9LBWtCZiKEUO37yU/z/1r1Nd4W +Tjc55aSsXv5ICf0aVFdX4fqhjMho9hsuxM3J9ElNe/LOwvNJHqx7S5R9grUkNP39 +0P0kJvCPdxcra8o5Cpa853OtPQUDihHGXw4Zh9IAoYfpOVF1o1pKAIH6lXvzRcHc +D07TVNyaGKuTbicoThMiPNpQzvd8wnifaPLafb6d+9Rd+QUByRVCne1IesHSLAS/ +QlBbPRzOm0l2UeQnF3MX3GuOTFXWQIy1QqBfji0D/quw7qpvNJFdqdDFiBidwdIx +Yaw0I1t3F6P6Rd9VwlPR1UiVM3kYDzTKbtRQvuDP7t/c4I+3HbYvHdfWGH5mqMnx +BObx/zpjU3Af3fkiaGaM0UxM9i5TtgrdOba73jh8Iio6VX3uFPUgOtNIYBPwMP4l +H4ZoqDBtxbkVMysR89tzPcQEgI4r0onaojfaVGOnuXwyBntHaCG6ezQcFMvkqbBW +H2ZgxjCMQt6rpf9zlpxBZGbUmvvInyciuEnzi05WFms2R4QRM78= +=ixAN +-END PGP SIGNATURE- Added: dev/hive/hive-4.0.0-beta-1/apache-hive-4.0.0-beta-1-bin.tar.gz.sha256 == --- dev/hive/hive-4.0.0-beta-1/apache-hive-4.0.0-beta-1-bin.tar.gz.sha256 (added) +++ dev/hive/hive-4.0.0-beta-1/apache-hive-4.0.0-beta-1-bin.tar.gz.sha256 Mon Aug 14 12:50:51 2023 @@ -0,0 +1 @@ +4114d8e9a523562c77237a8751dec9ed1bcbf6ccbe2e178d72f356ca4e65d466 apache-hive-4.0.0-beta-1-bin.tar.gz Added: dev/hive/hive-4.0.0-beta-1/apache-hive-4.0.0-beta-1-src.tar.gz == Binary file - no diff available. Propchange: dev/hive/hive-4.0.0-beta-1/apache-hive-4.0.0-beta-1-src.tar.gz -- svn:mime-type = application/octet-stream Added: dev/hive/hive-4.0.0-beta-1/apache-hive-4.0.0-beta-1-src.tar.gz.asc == --- dev/hive/hive-4.0.0-beta-1/apache-hive-4.0.0-beta-1-src.tar.gz.asc (added) +++ dev/hive/hive-4.0.0-beta-1/apache-hive-4.0.0-beta-1-src.tar.gz.asc Mon Aug 14 12:50:51 2023 @@ -0,0 +1,16 @@ +-BEGIN PGP SIGNATURE- + +iQIzBAABCgAdFiEEBHSVd/2TRnS5zUXF13wzg/GSdXAFAmTQ16gACgkQ13wzg/GS +dXAigxAAtRJZecmPQJcbJ0jfm/Ct+I2Rr+boRCBeMfZlpHvHqCNAUoXT4TPw6jdf +/i2Hrogy+W7ekwv8i41hhdorTMSLn57ON5eEgdHMJ6eK/hh5sAUOcx3/FIRj1/tg +C97bvnw2oPu4uyHX0e5iahHON/g/351lOHH4qG7AB0XcPjn1MegVx6414i2VG89y +MiQhdk9lRdRm4pu5C4nF/fzT3hffmmK/dXiDN+Zio2HY/o9ZPSC6s9lG6b6f4eHu ++5JNJUm9eoLEyuA8FJIqS/xCzkmjKHbbvFO0+HUaJ1Qn/WepkAKLYNFa3mOF3AId +Vd9ERR+YrpUT5LGti+UTkGB58CcLjmA/eFOeLFd/BmL3mt8D2awmFphxWafav5mj +zBm8dO28RtCNzNCG3GPpMHb/B3+4cBwARsHDD3bTZgs1sET0//ewI2bqkN/Zps+R +Q3C+Md2dJ5ihN5yf9ZjUfYnVpO//RpS4FuAt8RCz0WCms464tZgDAOA9Tk8m2pFZ +arRsPwYAzr3kno2Rvd9ZJ8LgjlgJufbVgaHNMypey95cGGbbLquk62s8H5jsj1LZ +AkqlGC85bhDEmvR9jzsqjUgzGg0xb9Bwyg2PKn/feDfowBmgM8KaABKaMIrBLl9D +W1PcISliqSXvV5LZsIyKsescy4iiX9wbyHD94Af+dNAuoGewUlo= +=sPFM +-END PGP SIGNATURE- Added: dev/hive/hive-4.0.0-beta-1/apache-hive-4.0.0-beta-1-src.tar.gz.sha256 == --- dev/hive/hive-4.0.0-beta-1/apache-hive-4.0.0-beta-1-src.tar.gz.sha256 (added) +++ dev/hive/hive-4.0.0-beta-1/apache-hive-4.0.0-beta-1-src.tar.gz.sha256 Mon Aug 14 12:50:51 2023 @@ -0,0 +1 @@ +8d157f4dcb9af5e48e51206a4046d1c11414fbc39583c84be31d609606136209 apache-hive-4.0.0-beta-1-src.tar.gz
[hive] annotated tag release-4.0.0-beta-1-rc0 deleted (was 429d5b0e6dc)
This is an automated email from the ASF dual-hosted git repository. zabetak pushed a change to annotated tag release-4.0.0-beta-1-rc0 in repository https://gitbox.apache.org/repos/asf/hive.git *** WARNING: tag release-4.0.0-beta-1-rc0 was deleted! *** tag was 429d5b0e6dc The revisions that were on this annotated tag are still contained in other references; therefore, this change does not discard any commits from the repository.
[hive] annotated tag rel/release-4.0.0-beta-1 updated (d2310944e41 -> 72977c9c78b)
This is an automated email from the ASF dual-hosted git repository. zabetak pushed a change to annotated tag rel/release-4.0.0-beta-1 in repository https://gitbox.apache.org/repos/asf/hive.git *** WARNING: tag rel/release-4.0.0-beta-1 was modified! *** from d2310944e41 (commit) to 72977c9c78b (tag) tagging d2310944e412b577a39687c7968b2e93eede8433 (commit) by Stamatis Zampetakis on Mon Aug 14 15:06:16 2023 +0300 - Log - Hive 4.0.0-beta-1 release --- No new revisions were added by this update. Summary of changes:
[hive] branch branch-3 updated: HIVE-27551: Backport of HIVE-22208: Column name with reserved keyword is unescaped when query including join on table with mask column is re-written
This is an automated email from the ASF dual-hosted git repository. sankarh pushed a commit to branch branch-3 in repository https://gitbox.apache.org/repos/asf/hive.git The following commit(s) were added to refs/heads/branch-3 by this push: new 9be0397e84b HIVE-27551: Backport of HIVE-22208: Column name with reserved keyword is unescaped when query including join on table with mask column is re-written 9be0397e84b is described below commit 9be0397e84b06bd4480c341373bb2c5b0738ce6a Author: Aman Raj <104416558+amanraj2...@users.noreply.github.com> AuthorDate: Mon Aug 14 13:02:13 2023 +0530 HIVE-27551: Backport of HIVE-22208: Column name with reserved keyword is unescaped when query including join on table with mask column is re-written Signed-off-by: Sankar Hariappan Closes (#4534) --- .../hadoop/hive/ql/parse/SemanticAnalyzer.java | 22 ++- .../test/queries/clientpositive/masking_reserved.q | 12 ++ .../results/clientpositive/masking_reserved.q.out | 198 + 3 files changed, 230 insertions(+), 2 deletions(-) diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java index 8abe8407aa5..0f1577353b9 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java @@ -12082,8 +12082,8 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer { // the table needs to be masked or filtered. // For the replacement, we leverage the methods that are used for // unparseTranslator. - protected static ASTNode rewriteASTWithMaskAndFilter(TableMask tableMask, ASTNode ast, TokenRewriteStream tokenRewriteStream, - Context ctx, Hive db, Map tabNameToTabObject, Set ignoredTokens) + protected ASTNode rewriteASTWithMaskAndFilter(TableMask tableMask, ASTNode ast, TokenRewriteStream tokenRewriteStream, +Context ctx, Hive db, Map tabNameToTabObject, Set ignoredTokens) throws SemanticException { // 1. collect information about CTE if there is any. // The base table of CTE should be masked. @@ -12124,6 +12124,7 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer { } // 2. rewrite the AST, replace TABREF with masking/filtering if (tableMask.needsRewrite()) { + quoteIdentifierTokens(tokenRewriteStream); tableMask.applyTranslations(tokenRewriteStream); String rewrittenQuery = tokenRewriteStream.toString( ast.getTokenStartIndex(), ast.getTokenStopIndex()); @@ -14874,6 +14875,23 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer { return sb.toString(); } + private void quoteIdentifierTokens(TokenRewriteStream tokenRewriteStream) { +if (conf.getVar(ConfVars.HIVE_QUOTEDID_SUPPORT).equals("none")) { + return; +} + +for (int idx = tokenRewriteStream.MIN_TOKEN_INDEX; idx <= tokenRewriteStream.size()-1; idx++) { + Token curTok = tokenRewriteStream.get(idx); + if (curTok.getType() == HiveLexer.Identifier) { +// The Tokens have no distinction between Identifiers and QuotedIdentifiers. +// Ugly solution is just to surround all identifiers with quotes. +// Re-escape any backtick (`) characters in the identifier. +String escapedTokenText = curTok.getText().replaceAll("`", "``"); +tokenRewriteStream.replace(curTok, "`" + escapedTokenText + "`"); + } +} + } + /** * Generate the query string for this query (with fully resolved table references). * @return The query string with resolved references. NULL if an error occurred. diff --git a/ql/src/test/queries/clientpositive/masking_reserved.q b/ql/src/test/queries/clientpositive/masking_reserved.q new file mode 100644 index 000..7fe94fa7e3a --- /dev/null +++ b/ql/src/test/queries/clientpositive/masking_reserved.q @@ -0,0 +1,12 @@ +set hive.mapred.mode=nonstrict; +set hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactoryForTest; + +create table keyword_test_off (id int, `etad` string, key int); +create table keyword_test_on (id int, `date` string, key int); +create table masking_test_n_masking_reserved (id int, value string, key int); + +explain select a.`etad`, b.value from keyword_test_off a join masking_test_n_masking_reserved b on b.id = a.id; +select a.`etad`, b.value from keyword_test_off a join masking_test_n_masking_reserved b on b.id = a.id; + +explain select a.`date`, b.value from keyword_test_on a join masking_test_n_masking_reserved b on b.id = a.id; +select a.`date`, b.value from keyword_test_on a join masking_test_n_masking_reserved b on b.id = a.id; diff --git a/ql/src/test/results/clientpositive/masking_reserved.q.out
[hive] branch branch-3 updated: HIVE-27550: Backport of HIVE-22113: Prevent LLAP shutdown on AMReporter related RuntimeException
This is an automated email from the ASF dual-hosted git repository. sankarh pushed a commit to branch branch-3 in repository https://gitbox.apache.org/repos/asf/hive.git The following commit(s) were added to refs/heads/branch-3 by this push: new 008b5792ec8 HIVE-27550: Backport of HIVE-22113: Prevent LLAP shutdown on AMReporter related RuntimeException 008b5792ec8 is described below commit 008b5792ec8ba129b5a87fe71f523230c721b373 Author: Aman Raj <104416558+amanraj2...@users.noreply.github.com> AuthorDate: Mon Aug 14 12:58:25 2023 +0530 HIVE-27550: Backport of HIVE-22113: Prevent LLAP shutdown on AMReporter related RuntimeException Signed-off-by: Sankar Hariappan Closes (#4533) --- .../hadoop/hive/llap/daemon/impl/TaskRunnerCallable.java| 13 ++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/TaskRunnerCallable.java b/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/TaskRunnerCallable.java index 7f436e23264..0fbaede7294 100644 --- a/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/TaskRunnerCallable.java +++ b/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/TaskRunnerCallable.java @@ -380,9 +380,16 @@ public class TaskRunnerCallable extends CallableWithNdc { // If the task hasn't started - inform about fragment completion immediately. It's possible for // the callable to never run. fragmentCompletionHanler.fragmentComplete(fragmentInfo); -this.amReporter -.unregisterTask(request.getAmHost(), request.getAmPort(), -fragmentInfo.getQueryInfo().getQueryIdentifier(), ta); + +try { + this.amReporter + .unregisterTask(request.getAmHost(), request.getAmPort(), + fragmentInfo.getQueryInfo().getQueryIdentifier(), ta); +} catch (Throwable thr) { + // unregisterTask can throw a RuntimeException (i.e. if task attempt not found) + // this brings down LLAP daemon if exception is not caught here + LOG.error("Unregistering task from AMReporter failed", thr); +} } } } else {
[hive] branch branch-3 updated: HIVE-27548: Backport HIVE-22275: OperationManager.queryIdOperation does not properly clean up multiple queryIds
This is an automated email from the ASF dual-hosted git repository. sankarh pushed a commit to branch branch-3 in repository https://gitbox.apache.org/repos/asf/hive.git The following commit(s) were added to refs/heads/branch-3 by this push: new fe844e8950b HIVE-27548: Backport HIVE-22275: OperationManager.queryIdOperation does not properly clean up multiple queryIds fe844e8950b is described below commit fe844e8950b8b6c493c551b69af790d68e01 Author: Aman Raj <104416558+amanraj2...@users.noreply.github.com> AuthorDate: Mon Aug 14 12:56:02 2023 +0530 HIVE-27548: Backport HIVE-22275: OperationManager.queryIdOperation does not properly clean up multiple queryIds Signed-off-by: Sankar Hariappan Closes (#4531) --- .../service/cli/session/TestSessionCleanup.java| 36 +++--- 1 file changed, 32 insertions(+), 4 deletions(-) diff --git a/service/src/test/org/apache/hive/service/cli/session/TestSessionCleanup.java b/service/src/test/org/apache/hive/service/cli/session/TestSessionCleanup.java index 487a5d492d5..51ce2c2426d 100644 --- a/service/src/test/org/apache/hive/service/cli/session/TestSessionCleanup.java +++ b/service/src/test/org/apache/hive/service/cli/session/TestSessionCleanup.java @@ -25,22 +25,38 @@ import java.util.Collections; import java.util.HashSet; import java.util.Set; -import junit.framework.TestCase; + import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; +import org.apache.hive.service.cli.CLIService; +import org.apache.hive.service.cli.OperationHandle; import org.apache.hive.service.cli.SessionHandle; import org.apache.hive.service.cli.thrift.EmbeddedThriftBinaryCLIService; import org.apache.hive.service.cli.thrift.ThriftCLIServiceClient; import org.junit.Assert; import org.junit.Test; -public class TestSessionCleanup extends TestCase { +/** + * TestSessionCleanup. + */ +public class TestSessionCleanup { + // Create subclass of EmbeddedThriftBinaryCLIService, just so we can get an accessor to the CLIService. + // Needed for access to the OperationManager. + private class MyEmbeddedThriftBinaryCLIService extends EmbeddedThriftBinaryCLIService { +public MyEmbeddedThriftBinaryCLIService() { + super(); +} + +public CLIService getCliService() { + return cliService; +} + } @Test // This is to test session temporary files are cleaned up after HIVE-11768 public void testTempSessionFileCleanup() throws Exception { -EmbeddedThriftBinaryCLIService service = new EmbeddedThriftBinaryCLIService(); +MyEmbeddedThriftBinaryCLIService service = new MyEmbeddedThriftBinaryCLIService(); HiveConf hiveConf = new HiveConf(); hiveConf .setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, @@ -51,7 +67,12 @@ public class TestSessionCleanup extends TestCase { Set existingPipeoutFiles = new HashSet(Arrays.asList(getPipeoutFiles())); SessionHandle sessionHandle = client.openSession("user1", "foobar", Collections.emptyMap()); -client.executeStatement(sessionHandle, "set a=b", null); +OperationHandle opHandle1 = client.executeStatement(sessionHandle, "set a=b", null); +String queryId1 = service.getCliService().getQueryId(opHandle1.toTOperationHandle()); +Assert.assertNotNull(queryId1); +OperationHandle opHandle2 = client.executeStatement(sessionHandle, "set b=c", null); +String queryId2 = service.getCliService().getQueryId(opHandle2.toTOperationHandle()); +Assert.assertNotNull(queryId2); File operationLogRootDir = new File( new HiveConf().getVar(ConfVars.HIVE_SERVER2_LOGGING_OPERATION_LOG_LOCATION)); Assert.assertNotEquals(operationLogRootDir.list().length, 0); @@ -64,6 +85,13 @@ public class TestSessionCleanup extends TestCase { Set finalPipeoutFiles = new HashSet(Arrays.asList(getPipeoutFiles())); finalPipeoutFiles.removeAll(existingPipeoutFiles); Assert.assertTrue(finalPipeoutFiles.isEmpty()); + +// Verify both operationHandles are no longer held by the OperationManager +Assert.assertEquals(0, service.getCliService().getSessionManager().getOperations().size()); + +// Verify both queryIds are no longer held by the OperationManager + Assert.assertNull(service.getCliService().getSessionManager().getOperationManager().getOperationByQueryId(queryId2)); + Assert.assertNull(service.getCliService().getSessionManager().getOperationManager().getOperationByQueryId(queryId1)); } private String[] getPipeoutFiles() {