Repository: hive Updated Branches: refs/heads/master 6548cec28 -> 52e1ba15d
HIVE-17793 : Parameterize Logging Messages (Beluga Behr via Ashutosh Chauhan) Signed-off-by: Ashutosh Chauhan <hashut...@apache.org> Project: http://git-wip-us.apache.org/repos/asf/hive/repo Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/52e1ba15 Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/52e1ba15 Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/52e1ba15 Branch: refs/heads/master Commit: 52e1ba15d11e06f5236c7fb814500e6be38f3ac2 Parents: 6548cec Author: BELUGA BEHR <dam6...@gmail.com> Authored: Thu Oct 19 16:40:33 2017 -0700 Committer: Ashutosh Chauhan <hashut...@apache.org> Committed: Thu Oct 19 16:40:33 2017 -0700 ---------------------------------------------------------------------- .../hadoop/hive/ql/exec/ColumnStatsTask.java | 2 +- .../hive/ql/exec/ColumnStatsUpdateTask.java | 2 +- .../apache/hadoop/hive/ql/exec/CopyTask.java | 7 +- .../org/apache/hadoop/hive/ql/exec/DDLTask.java | 102 ++++++++++--------- .../apache/hadoop/hive/ql/exec/ExplainTask.java | 6 +- .../apache/hadoop/hive/ql/exec/FetchTask.java | 2 +- .../hadoop/hive/ql/exec/FunctionTask.java | 16 +-- .../apache/hadoop/hive/ql/exec/MoveTask.java | 14 +-- .../hadoop/hive/ql/exec/ReplCopyTask.java | 16 +-- .../hadoop/hive/ql/exec/StatsNoJobTask.java | 11 +- .../apache/hadoop/hive/ql/exec/StatsTask.java | 11 +- 11 files changed, 100 insertions(+), 89 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/hive/blob/52e1ba15/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsTask.java ---------------------------------------------------------------------- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsTask.java index e3b0d1e..1f28688 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsTask.java @@ -365,7 +365,7 @@ public class ColumnStatsTask extends Task<ColumnStatsWork> implements Serializab statsObjs.add(statsObj); } catch (UnsupportedDoubleException e) { // due to infinity or nan. - LOG.info("Because " + colName.get(i) + " is infinite or NaN, we skip stats."); + LOG.info("Because {} is infinite or NaN, we skip stats.", colName.get(i)); } } http://git-wip-us.apache.org/repos/asf/hive/blob/52e1ba15/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsUpdateTask.java ---------------------------------------------------------------------- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsUpdateTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsUpdateTask.java index 48a9c9a..2c15ba7 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsUpdateTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsUpdateTask.java @@ -326,7 +326,7 @@ public class ColumnStatsUpdateTask extends Task<ColumnStatsUpdateWork> { return new Date(writableVal.getDays()); } catch (IllegalArgumentException err) { // Fallback to integer parsing - LOG.debug("Reading date value as days since epoch: " + dateStr); + LOG.debug("Reading date value as days since epoch: {}", dateStr); return new Date(Long.parseLong(dateStr)); } } http://git-wip-us.apache.org/repos/asf/hive/blob/52e1ba15/ql/src/java/org/apache/hadoop/hive/ql/exec/CopyTask.java ---------------------------------------------------------------------- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/CopyTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/CopyTask.java index 664a11d..0732610 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/CopyTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/CopyTask.java @@ -83,13 +83,14 @@ public class CopyTask extends Task<CopyWork> implements Serializable { } for (FileStatus oneSrc : srcs) { - console.printInfo("Copying file: " + oneSrc.getPath().toString()); - LOG.debug("Copying file: " + oneSrc.getPath().toString()); + String oneSrcPathStr = oneSrc.getPath().toString(); + console.printInfo("Copying file: " + oneSrcPathStr); + LOG.debug("Copying file: {}", oneSrcPathStr); if (!FileUtils.copy(srcFs, oneSrc.getPath(), dstFs, toPath, false, // delete source true, // overwrite destination conf)) { - console.printError("Failed to copy: '" + oneSrc.getPath().toString() + console.printError("Failed to copy: '" + oneSrcPathStr + "to: '" + toPath.toString() + "'"); return 1; } http://git-wip-us.apache.org/repos/asf/hive/blob/52e1ba15/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java ---------------------------------------------------------------------- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java index 335ea63..3b2454d 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java @@ -662,7 +662,7 @@ public class DDLTask extends Task<DDLWork> implements Serializable { e = e.getCause(); } setException(e); - LOG.error(stringifyException(e)); + LOG.error("Failed", e); } private int showConf(Hive db, ShowConfDesc showConf) throws Exception { @@ -1125,7 +1125,7 @@ public class DDLTask extends Task<DDLWork> implements Serializable { db.alterIndex(baseTableName, indexName, idx); } catch (InvalidOperationException e) { console.printError("Invalid alter operation: " + e.getMessage()); - LOG.info("alter index: " + stringifyException(e)); + LOG.info("alter index: ", e); return 1; } catch (HiveException e) { console.printError("Invalid alter operation: " + e.getMessage()); @@ -1169,9 +1169,11 @@ public class DDLTask extends Task<DDLWork> implements Serializable { if (!allowOperationInReplicationScope(db, tableName, oldPartSpec, renamePartitionDesc.getReplicationSpec())) { // no rename, the table is missing either due to drop/rename which follows the current rename. // or the existing table is newer than our update. + if (LOG.isDebugEnabled()) { LOG.debug("DDLTask: Rename Partition is skipped as table {} / partition {} is newer than update", tableName, - FileUtils.makePartName(new ArrayList(oldPartSpec.keySet()), new ArrayList(oldPartSpec.values()))); + FileUtils.makePartName(new ArrayList<>(oldPartSpec.keySet()), new ArrayList<>(oldPartSpec.values()))); + } return 0; } @@ -2472,7 +2474,7 @@ public class DDLTask extends Task<DDLWork> implements Serializable { outStream.write(createTab_stmt.render().getBytes(StandardCharsets.UTF_8)); } catch (IOException e) { - LOG.info("show create table: " + stringifyException(e)); + LOG.info("show create table: ", e); return 1; } @@ -2545,10 +2547,10 @@ public class DDLTask extends Task<DDLWork> implements Serializable { outStream.write(MetaDataFormatUtils.getIndexInformation(index, isOutputPadded).getBytes(StandardCharsets.UTF_8)); } } catch (FileNotFoundException e) { - LOG.info("show indexes: " + stringifyException(e)); + LOG.info("show indexes: ", e); throw new HiveException(e.toString()); } catch (IOException e) { - LOG.info("show indexes: " + stringifyException(e)); + LOG.info("show indexes: ", e); throw new HiveException(e.toString()); } catch (Exception e) { throw new HiveException(e.toString()); @@ -2572,12 +2574,12 @@ public class DDLTask extends Task<DDLWork> implements Serializable { // get the databases for the desired pattern - populate the output stream List<String> databases = null; if (showDatabasesDesc.getPattern() != null) { - LOG.info("pattern: " + showDatabasesDesc.getPattern()); + LOG.info("pattern: {}", showDatabasesDesc.getPattern()); databases = db.getDatabasesByPattern(showDatabasesDesc.getPattern()); } else { databases = db.getAllDatabases(); } - LOG.info("results : " + databases.size()); + LOG.info("results : {}", databases.size()); // write the results in the file DataOutputStream outStream = getOutputStream(showDatabasesDesc.getResFile()); @@ -2615,9 +2617,9 @@ public class DDLTask extends Task<DDLWork> implements Serializable { throw new HiveException(ErrorMsg.DATABASE_NOT_EXISTS, dbName); } - LOG.debug("pattern: " + pattern); + LOG.debug("pattern: {}", pattern); tablesOrViews = db.getTablesByType(dbName, pattern, type); - LOG.debug("results : " + tablesOrViews.size()); + LOG.debug("results : {}", tablesOrViews.size()); // write the results in the file DataOutputStream outStream = null; @@ -2678,14 +2680,14 @@ public class DDLTask extends Task<DDLWork> implements Serializable { // get the tables for the desired patten - populate the output stream Set<String> funcs = null; if (showFuncs.getPattern() != null) { - LOG.info("pattern: " + showFuncs.getPattern()); + LOG.info("pattern: {}", showFuncs.getPattern()); if (showFuncs.getIsLikePattern()) { funcs = FunctionRegistry.getFunctionNamesByLikePattern(showFuncs.getPattern()); } else { console.printInfo("SHOW FUNCTIONS is deprecated, please use SHOW FUNCTIONS LIKE instead."); funcs = FunctionRegistry.getFunctionNames(showFuncs.getPattern()); } - LOG.info("results : " + funcs.size()); + LOG.info("results : {}", funcs.size()); } else { funcs = FunctionRegistry.getFunctionNames(); } @@ -2704,10 +2706,10 @@ public class DDLTask extends Task<DDLWork> implements Serializable { outStream.write(terminator); } } catch (FileNotFoundException e) { - LOG.warn("show function: " + stringifyException(e)); + LOG.warn("show function: ", e); return 1; } catch (IOException e) { - LOG.warn("show function: " + stringifyException(e)); + LOG.warn("show function: ", e); return 1; } catch (Exception e) { throw new HiveException(e); @@ -2800,10 +2802,10 @@ public class DDLTask extends Task<DDLWork> implements Serializable { outStream.write(terminator); } } catch (FileNotFoundException e) { - LOG.warn("show function: " + stringifyException(e)); + LOG.warn("show function: ", e); return 1; } catch (IOException e) { - LOG.warn("show function: " + stringifyException(e)); + LOG.warn("show function: ", e); return 1; } catch (Exception e) { throw new HiveException(e.toString(), e); @@ -2922,10 +2924,10 @@ public class DDLTask extends Task<DDLWork> implements Serializable { try { dumpLockInfo(os, rsp); } catch (FileNotFoundException e) { - LOG.warn("show function: " + stringifyException(e)); + LOG.warn("show function: ", e); return 1; } catch (IOException e) { - LOG.warn("show function: " + stringifyException(e)); + LOG.warn("show function: ", e); return 1; } catch (Exception e) { throw new HiveException(e.toString()); @@ -2993,7 +2995,7 @@ public class DDLTask extends Task<DDLWork> implements Serializable { } } } catch (IOException e) { - LOG.warn("show compactions: " + stringifyException(e)); + LOG.warn("show compactions: ", e); return 1; } finally { IOUtils.closeStream(os); @@ -3037,7 +3039,7 @@ public class DDLTask extends Task<DDLWork> implements Serializable { os.write(terminator); } } catch (IOException e) { - LOG.warn("show transactions: " + stringifyException(e)); + LOG.warn("show transactions: ", e); return 1; } finally { IOUtils.closeStream(os); @@ -3055,7 +3057,7 @@ public class DDLTask extends Task<DDLWork> implements Serializable { for (String queryId : desc.getQueryIds()) { sessionState.getKillQuery().killQuery(queryId); } - LOG.info("kill query called (" + desc.getQueryIds().toString() + ")"); + LOG.info("kill query called ({})", desc.getQueryIds()); return 0; } @@ -3182,10 +3184,10 @@ public class DDLTask extends Task<DDLWork> implements Serializable { } } } catch (FileNotFoundException e) { - LOG.warn("describe function: " + stringifyException(e)); + LOG.warn("describe function: ", e); return 1; } catch (IOException e) { - LOG.warn("describe function: " + stringifyException(e)); + LOG.warn("describe function: ", e); return 1; } catch (Exception e) { throw new HiveException(e); @@ -3255,7 +3257,7 @@ public class DDLTask extends Task<DDLWork> implements Serializable { } tbls.add(tbl); } else { - LOG.info("pattern: " + showTblStatus.getPattern()); + LOG.info("pattern: {}", showTblStatus.getPattern()); List<String> tblStr = db.getTablesForDb(showTblStatus.getDbName(), showTblStatus.getPattern()); SortedSet<String> sortedTbls = new TreeSet<String>(tblStr); @@ -3266,7 +3268,7 @@ public class DDLTask extends Task<DDLWork> implements Serializable { Table tbl = db.getTable(showTblStatus.getDbName(), tblName); tbls.add(tbl); } - LOG.info("results : " + tblStr.size()); + LOG.info("results : {}", tblStr.size()); } // write the results in the file @@ -3304,7 +3306,7 @@ public class DDLTask extends Task<DDLWork> implements Serializable { return 0; } - LOG.info("DDLTask: show properties for " + tbl.getTableName()); + LOG.info("DDLTask: show properties for {}", tableName); StringBuilder builder = new StringBuilder(); String propertyName = showTblPrpt.getPropertyName(); @@ -3326,14 +3328,14 @@ public class DDLTask extends Task<DDLWork> implements Serializable { } } - LOG.info("DDLTask: written data for showing properties of " + tbl.getTableName()); + LOG.info("DDLTask: written data for showing properties of {}", tableName); writeToFile(builder.toString(), showTblPrpt.getResFile()); } catch (FileNotFoundException e) { - LOG.info("show table properties: " + stringifyException(e)); + LOG.info("show table properties: ", e); return 1; } catch (IOException e) { - LOG.info("show table properties: " + stringifyException(e)); + LOG.info("show table properties: ", e); return 1; } catch (Exception e) { throw new HiveException(e); @@ -3391,7 +3393,7 @@ public class DDLTask extends Task<DDLWork> implements Serializable { DataOutputStream outStream = getOutputStream(descTbl.getResFile()); try { - LOG.debug("DDLTask: got data for " + tbl.getTableName()); + LOG.debug("DDLTask: got data for {}", tableName); List<FieldSchema> cols = null; List<ColumnStatisticsObj> colStats = null; @@ -3511,7 +3513,7 @@ public class DDLTask extends Task<DDLWork> implements Serializable { isOutputPadded, colStats, pkInfo, fkInfo, ukInfo, nnInfo); - LOG.debug("DDLTask: written data for " + tbl.getTableName()); + LOG.debug("DDLTask: written data for {}", tableName); } catch (SQLException e) { throw new HiveException(e, ErrorMsg.GENERIC_ERROR, tableName); @@ -3701,7 +3703,7 @@ public class DDLTask extends Task<DDLWork> implements Serializable { // Add constraints if necessary addConstraints(db, alterTbl); } catch (InvalidOperationException e) { - LOG.error("alter table: " + stringifyException(e)); + LOG.error("alter table: ", e); throw new HiveException(e, ErrorMsg.GENERIC_ERROR); } @@ -3743,8 +3745,8 @@ public class DDLTask extends Task<DDLWork> implements Serializable { static boolean addIfAbsentByName(WriteEntity newWriteEntity, Set<WriteEntity> outputs) { for(WriteEntity writeEntity : outputs) { if(writeEntity.getName().equalsIgnoreCase(newWriteEntity.getName())) { - LOG.debug("Ignoring request to add " + newWriteEntity.toStringDetail() + " because " + - writeEntity.toStringDetail() + " is present"); + LOG.debug("Ignoring request to add {} because {} is present", + newWriteEntity.toStringDetail(), writeEntity.toStringDetail()); return false; } } @@ -4280,7 +4282,7 @@ public class DDLTask extends Task<DDLWork> implements Serializable { && alterTbl.getReplicationSpec()!= null && alterTbl.getReplicationSpec().isInReplicationScope()) { // During repl load, NoSuchObjectException in foreign key shall // ignore as the foreign table may not be part of the replication - LOG.debug(e.getMessage()); + LOG.debug("InvalidObjectException: ", e); } else { throw e; } @@ -4500,7 +4502,7 @@ public class DDLTask extends Task<DDLWork> implements Serializable { Deserializer d = ReflectionUtil.newInstance(conf.getClassByName(serdeName). asSubclass(Deserializer.class), conf); if (d != null) { - LOG.debug("Found class for " + serdeName); + LOG.debug("Found class for {}", serdeName); } } catch (Exception e) { throw new HiveException("Cannot validate serde: " + serdeName, e); @@ -4580,8 +4582,8 @@ public class DDLTask extends Task<DDLWork> implements Serializable { for (HiveConf.ConfVars var: HiveConf.dbVars) { String newValue = dbParams.get(var.varname); if (newValue != null) { - LOG.info("Changing " + var.varname + - " from " + conf.getVar(var) + " to " + newValue); + LOG.info("Changing {} from {} to {}", var.varname, conf.getVar(var), + newValue); conf.setVar(var, newValue); } } @@ -4608,8 +4610,8 @@ public class DDLTask extends Task<DDLWork> implements Serializable { List<SQLForeignKey> foreignKeys = crtTbl.getForeignKeys(); List<SQLUniqueConstraint> uniqueConstraints = crtTbl.getUniqueConstraints(); List<SQLNotNullConstraint> notNullConstraints = crtTbl.getNotNullConstraints(); - LOG.info("creating table " + tbl.getDbName() + "." + tbl.getTableName() + " on " + - tbl.getDataLocation()); + LOG.info("creating table {}.{} on {}", tbl.getDbName(), tbl.getTableName(), + tbl.getDataLocation()); if (crtTbl.getReplicationSpec().isInReplicationScope() && (!crtTbl.getReplaceMode())){ // if this is a replication spec, then replace-mode semantics might apply. @@ -4698,7 +4700,7 @@ public class DDLTask extends Task<DDLWork> implements Serializable { tbl.setPartCols(oldtbl.getPartCols()); if (crtTbl.getDefaultSerName() == null) { - LOG.info("Default to LazySimpleSerDe for table " + crtTbl.getTableName()); + LOG.info("Default to LazySimpleSerDe for table {}", targetTableName); tbl.setSerializationLib(org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.class.getName()); } else { // let's validate that the serde exists @@ -4775,7 +4777,7 @@ public class DDLTask extends Task<DDLWork> implements Serializable { tbl.getTTable().getSd().setOutputFormat( tbl.getOutputFormatClass().getName()); if (crtTbl.getDefaultSerName() == null) { - LOG.info("Default to LazySimpleSerDe for like table " + crtTbl.getTableName()); + LOG.info("Default to LazySimpleSerDe for like table {}", targetTableName); tbl.setSerializationLib(org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.class.getName()); } else { // let's validate that the serde exists @@ -4907,12 +4909,12 @@ public class DDLTask extends Task<DDLWork> implements Serializable { if (crtView.getSerde() == null) { if (storageHandler == null) { serDeClassName = PlanUtils.getDefaultSerDe().getName(); - LOG.info("Default to " + serDeClassName - + " for materialized view " + crtView.getViewName()); + LOG.info("Default to {} for materialized view {}", serDeClassName, + crtView.getViewName()); } else { serDeClassName = storageHandler.getSerDeClass().getName(); - LOG.info("Use StorageHandler-supplied " + serDeClassName - + " for materialized view " + crtView.getViewName()); + LOG.info("Use StorageHandler-supplied {} for materialized view {}", + serDeClassName, crtView.getViewName()); } } else { // let's validate that the serde exists @@ -4974,9 +4976,11 @@ public class DDLTask extends Task<DDLWork> implements Serializable { if (!allowOperationInReplicationScope(db, tableName, partSpec, truncateTableDesc.getReplicationSpec())) { // no truncate, the table is missing either due to drop/rename which follows the truncate. // or the existing table is newer than our update. - LOG.debug("DDLTask: Truncate Table/Partition is skipped as table {} / partition {} is newer than update", - tableName, - (partSpec == null) ? "null" : FileUtils.makePartName(new ArrayList(partSpec.keySet()), new ArrayList(partSpec.values()))); + if (LOG.isDebugEnabled()) { + LOG.debug("DDLTask: Truncate Table/Partition is skipped as table {} / partition {} is newer than update", + tableName, + (partSpec == null) ? "null" : FileUtils.makePartName(new ArrayList<>(partSpec.keySet()), new ArrayList<>(partSpec.values()))); + } return 0; } http://git-wip-us.apache.org/repos/asf/hive/blob/52e1ba15/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java ---------------------------------------------------------------------- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java index b9ba10b..d7d33c6 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java @@ -358,8 +358,8 @@ public class ExplainTask extends Task<ExplainWork> implements Serializable { jsonParser.print(jsonPlan, out); } catch (Exception e) { // if there is anything wrong happen, we bail out. - LOG.error("Running explain user level has problem: " + e.toString() - + ". Falling back to normal explain"); + LOG.error("Running explain user level has problem." + + " Falling back to normal explain.", e); work.getConfig().setFormatted(false); work.getConfig().setUserLevelExplain(false); jsonPlan = getJSONPlan(out, work); @@ -371,7 +371,7 @@ public class ExplainTask extends Task<ExplainWork> implements Serializable { JsonParser jsonParser = JsonParserFactory.getParser(conf); if (jsonParser != null) { jsonParser.print(jsonPlan, null); - LOG.info("JsonPlan is augmented to " + jsonPlan.toString()); + LOG.info("JsonPlan is augmented to {}", jsonPlan); } out.print(jsonPlan); } http://git-wip-us.apache.org/repos/asf/hive/blob/52e1ba15/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchTask.java ---------------------------------------------------------------------- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchTask.java index 81f4103..6589bb2 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchTask.java @@ -93,7 +93,7 @@ public class FetchTask extends Task<FetchWork> implements Serializable { } catch (Exception e) { // Bail out ungracefully - we should never hit // this here - but would have hit it in SemanticAnalyzer - LOG.error(StringUtils.stringifyException(e)); + LOG.error("Initialize failed", e); throw new RuntimeException(e); } } http://git-wip-us.apache.org/repos/asf/hive/blob/52e1ba15/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionTask.java ---------------------------------------------------------------------- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionTask.java index bb0fff4..cd9a60b 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionTask.java @@ -93,7 +93,7 @@ public class FunctionTask extends Task<FunctionWork> { return createPermanentFunction(Hive.get(conf), createFunctionDesc); } catch (Exception e) { setException(e); - LOG.error(stringifyException(e)); + LOG.error("Failed to create function", e); return 1; } } @@ -121,7 +121,7 @@ public class FunctionTask extends Task<FunctionWork> { return dropPermanentFunction(Hive.get(conf), dropFunctionDesc); } catch (Exception e) { setException(e); - LOG.error(stringifyException(e)); + LOG.error("Failed to drop function", e); return 1; } } @@ -132,7 +132,7 @@ public class FunctionTask extends Task<FunctionWork> { Hive.get().reloadFunctions(); } catch (Exception e) { setException(e); - LOG.error(stringifyException(e)); + LOG.error("Failed to reload functions", e); return 1; } } @@ -212,12 +212,12 @@ public class FunctionTask extends Task<FunctionWork> { return 1; } catch (HiveException e) { console.printError("FAILED: " + e.toString()); - LOG.info("create function: " + StringUtils.stringifyException(e)); + LOG.info("create function: ", e); return 1; } catch (ClassNotFoundException e) { console.printError("FAILED: Class " + createFunctionDesc.getClassName() + " not found"); - LOG.info("create function: " + StringUtils.stringifyException(e)); + LOG.info("create function: ", e); return 1; } } @@ -237,7 +237,7 @@ public class FunctionTask extends Task<FunctionWork> { FunctionRegistry.unregisterTemporaryUDF(dropMacroDesc.getMacroName()); return 0; } catch (HiveException e) { - LOG.info("drop macro: " + StringUtils.stringifyException(e)); + LOG.info("drop macro: ", e); return 1; } } @@ -256,7 +256,7 @@ public class FunctionTask extends Task<FunctionWork> { return 0; } catch (Exception e) { - LOG.info("drop function: " + StringUtils.stringifyException(e)); + LOG.info("drop function: ", e); console.printError("FAILED: error during drop function: " + StringUtils.stringifyException(e)); return 1; } @@ -267,7 +267,7 @@ public class FunctionTask extends Task<FunctionWork> { FunctionRegistry.unregisterTemporaryUDF(dropFunctionDesc.getFunctionName()); return 0; } catch (HiveException e) { - LOG.info("drop function: " + StringUtils.stringifyException(e)); + LOG.info("drop function: ", e); return 1; } } http://git-wip-us.apache.org/repos/asf/hive/blob/52e1ba15/ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java ---------------------------------------------------------------------- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java index bef7433..31a920b 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java @@ -138,8 +138,8 @@ public class MoveTask extends Task<MoveWork> implements Serializable { tgtFs.delete(deletePath, true); } } catch (IOException e) { - LOG.info("Unable to delete the path created for facilitating rename" - + deletePath); + LOG.info("Unable to delete the path created for facilitating rename: {}", + deletePath); } throw new HiveException("Unable to rename: " + sourcePath + " to: " + targetPath); @@ -230,14 +230,14 @@ public class MoveTask extends Task<MoveWork> implements Serializable { for (HiveLock lock : locks) { if (lock.getHiveLockMode() == lockObj.getMode()) { if (ctx.getHiveLocks().remove(lock)) { - LOG.info("about to release lock for output: " + output.toString() + - " lock: " + lock.getHiveLockObject().getName()); + LOG.info("about to release lock for output: {} lock: {}", output, + lock.getHiveLockObject().getName()); try { lockMgr.unlock(lock); } catch (LockException le) { // should be OK since the lock is ephemeral and will eventually be deleted // when the query finishes and zookeeper session is closed. - LOG.warn("Could not release lock " + lock.getHiveLockObject().getName()); + LOG.warn("Could not release lock {}", lock.getHiveLockObject().getName()); } } } @@ -385,7 +385,7 @@ public class MoveTask extends Task<MoveWork> implements Serializable { getWriteType(tbd, work.getLoadTableWork().getWriteType())), work.getOutputs()); } } else { - LOG.info("Partition is: " + tbd.getPartitionSpec().toString()); + LOG.info("Partition is: {}", tbd.getPartitionSpec()); // Check if the bucketing and/or sorting columns were inferred TaskInformation ti = new TaskInformation(this, tbd.getSourcePath().toUri().toString()); @@ -558,7 +558,7 @@ public class MoveTask extends Task<MoveWork> implements Serializable { work.getLineagState().setLineage(tbd.getSourcePath(), dc, table.getCols()); } - LOG.info("\tLoading partition " + entry.getKey()); + LOG.info("Loading partition " + entry.getKey()); } console.printInfo("\t Time taken for adding to write entity : " + (System.currentTimeMillis() - startTime)/1000.0 + " seconds"); http://git-wip-us.apache.org/repos/asf/hive/blob/52e1ba15/ql/src/java/org/apache/hadoop/hive/ql/exec/ReplCopyTask.java ---------------------------------------------------------------------- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ReplCopyTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ReplCopyTask.java index 80905d5..33a6c04 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/ReplCopyTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ReplCopyTask.java @@ -94,7 +94,9 @@ public class ReplCopyTask extends Task<ReplCopyWork> implements Serializable { // This flow is usually taken for REPL LOAD // Our input is the result of a _files listing, we should expand out _files. srcFiles = filesInFileListing(srcFs, fromPath); - LOG.debug("ReplCopyTask _files contains:" + (srcFiles == null ? "null" : srcFiles.size())); + if (LOG.isDebugEnabled()) { + LOG.debug("ReplCopyTask _files contains: {}", (srcFiles == null ? "null" : srcFiles.size())); + } if ((srcFiles == null) || (srcFiles.isEmpty())) { if (work.isErrorOnSrcEmpty()) { console.printError("No _files entry found on source: " + fromPath.toString()); @@ -106,7 +108,9 @@ public class ReplCopyTask extends Task<ReplCopyWork> implements Serializable { } else { // This flow is usually taken for IMPORT command FileStatus[] srcs = LoadSemanticAnalyzer.matchFilesOrDir(srcFs, fromPath); - LOG.debug("ReplCopyTasks srcs= {}", (srcs == null ? "null" : srcs.length)); + if (LOG.isDebugEnabled()) { + LOG.debug("ReplCopyTasks srcs= {}", (srcs == null ? "null" : srcs.length)); + } if (srcs == null || srcs.length == 0) { if (work.isErrorOnSrcEmpty()) { console.printError("No files matching path: " + fromPath.toString()); @@ -162,7 +166,7 @@ public class ReplCopyTask extends Task<ReplCopyWork> implements Serializable { private List<ReplChangeManager.FileInfo> filesInFileListing(FileSystem fs, Path dataPath) throws IOException { Path fileListing = new Path(dataPath, EximUtil.FILES_NAME); - LOG.debug("ReplCopyTask filesInFileListing() reading " + fileListing.toUri()); + LOG.debug("ReplCopyTask filesInFileListing() reading {}", fileListing.toUri()); if (! fs.exists(fileListing)){ LOG.debug("ReplCopyTask : _files does not exist"); return null; // Returning null from this fn can serve as an err condition. @@ -175,7 +179,7 @@ public class ReplCopyTask extends Task<ReplCopyWork> implements Serializable { String line = null; while ((line = br.readLine()) != null) { - LOG.debug("ReplCopyTask :_filesReadLine:" + line); + LOG.debug("ReplCopyTask :_filesReadLine: {}", line); String[] fileWithChksum = ReplChangeManager.getFileWithChksumFromURI(line); try { @@ -184,7 +188,7 @@ public class ReplCopyTask extends Task<ReplCopyWork> implements Serializable { filePaths.add(f); } catch (MetaException e) { // issue warning for missing file and throw exception - LOG.warn("Cannot find " + fileWithChksum[0] + " in source repo or cmroot"); + LOG.warn("Cannot find {} in source repo or cmroot", fileWithChksum[0]); throw new IOException(e.getMessage()); } // Note - we need srcFs rather than fs, because it is possible that the _files lists files @@ -213,7 +217,7 @@ public class ReplCopyTask extends Task<ReplCopyWork> implements Serializable { public static Task<?> getLoadCopyTask(ReplicationSpec replicationSpec, Path srcPath, Path dstPath, HiveConf conf) { Task<?> copyTask = null; - LOG.debug("ReplCopyTask:getLoadCopyTask: "+srcPath + "=>" + dstPath); + LOG.debug("ReplCopyTask:getLoadCopyTask: {}=>{}", srcPath, dstPath); if ((replicationSpec != null) && replicationSpec.isInReplicationScope()){ ReplCopyWork rcwork = new ReplCopyWork(srcPath, dstPath, false); LOG.debug("ReplCopyTask:\trcwork"); http://git-wip-us.apache.org/repos/asf/hive/blob/52e1ba15/ql/src/java/org/apache/hadoop/hive/ql/exec/StatsNoJobTask.java ---------------------------------------------------------------------- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/StatsNoJobTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/StatsNoJobTask.java index df96b92..e5d4978 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/StatsNoJobTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/StatsNoJobTask.java @@ -108,9 +108,9 @@ public class StatsNoJobTask extends Task<StatsNoJobWork> implements Serializable new ThreadFactoryBuilder().setDaemon(true).setNameFormat("StatsNoJobTask-Thread-%d") .build()); partUpdates = new MapMaker().concurrencyLevel(numThreads).makeMap(); - LOG.info("Initialized threadpool for stats computation with " + numThreads + " threads"); + LOG.info("Initialized threadpool for stats computation with {} threads", numThreads); } catch (HiveException e) { - LOG.error("Cannot get table " + tableName, e); + LOG.error("Cannot get table {}", tableName, e); console.printError("Cannot get table " + tableName, e.toString()); } @@ -185,12 +185,12 @@ public class StatsNoJobTask extends Task<StatsNoJobWork> implements Serializable String threadName = Thread.currentThread().getName(); String msg = "Partition " + tableFullName + partn.getSpec() + " stats: [" + toString(parameters) + ']'; - LOG.debug(threadName + ": " + msg); + LOG.debug("{}: {}", threadName, msg); console.printInfo(msg); } else { String threadName = Thread.currentThread().getName(); String msg = "Partition " + tableFullName + partn.getSpec() + " does not provide stats."; - LOG.debug(threadName + ": " + msg); + LOG.debug("{}: {}", threadName, msg); } } catch (Exception e) { console.printInfo("[Warning] could not update stats for " + tableFullName + partn.getSpec() @@ -290,6 +290,7 @@ public class StatsNoJobTask extends Task<StatsNoJobWork> implements Serializable Utilities.FILE_OP_LOGGER.trace(msg); } console.printInfo(msg); + LOG.debug("Table {} does not provide stats.", tableFullName); } } catch (Exception e) { console.printInfo("[Warning] could not update stats for " + tableFullName + ".", @@ -333,7 +334,7 @@ public class StatsNoJobTask extends Task<StatsNoJobWork> implements Serializable environmentContext.putToProperties(StatsSetupConst.STATS_GENERATED, StatsSetupConst.TASK); db.alterPartitions(tableFullName, Lists.newArrayList(partUpdates.values()), environmentContext); - LOG.debug("Bulk updated " + partUpdates.values().size() + " partitions."); + LOG.debug("Bulk updated {} partitions.", partUpdates.values().size()); } } return 0; http://git-wip-us.apache.org/repos/asf/hive/blob/52e1ba15/ql/src/java/org/apache/hadoop/hive/ql/exec/StatsTask.java ---------------------------------------------------------------------- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/StatsTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/StatsTask.java index f11f163..ff46d3a 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/StatsTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/StatsTask.java @@ -128,7 +128,7 @@ public class StatsTask extends Task<StatsWork> implements Serializable { table = hive.getTable(tableName); } catch (HiveException e) { - LOG.error("Cannot get table " + tableName, e); + LOG.error("Cannot get table {}", tableName, e); console.printError("Cannot get table " + tableName, e.toString()); } @@ -219,6 +219,7 @@ public class StatsTask extends Task<StatsWork> implements Serializable { if (conf.getBoolVar(ConfVars.TEZ_EXEC_SUMMARY)) { console.printInfo("Table " + tableFullName + " stats: [" + toString(parameters) + ']'); } + LOG.info("Table {} stats: [{}]", tableFullName, toString(parameters)); if (Utilities.FILE_OP_LOGGER.isTraceEnabled()) { Utilities.FILE_OP_LOGGER.trace( "Table " + tableFullName + " stats: [" + toString(parameters) + ']'); @@ -239,7 +240,7 @@ public class StatsTask extends Task<StatsWork> implements Serializable { .setNameFormat("stats-updater-thread-%d") .build()); final List<Future<Void>> futures = Lists.newLinkedList(); - LOG.debug("Getting file stats of all partitions. threadpool size:" + poolSize); + LOG.debug("Getting file stats of all partitions. threadpool size: {}", poolSize); try { for(final Partition partn : partitions) { final String partitionName = partn.getName(); @@ -263,7 +264,7 @@ public class StatsTask extends Task<StatsWork> implements Serializable { future.get(); } } catch (InterruptedException e) { - LOG.debug("Cancelling " + futures.size() + " file stats lookup tasks"); + LOG.debug("Cancelling {} file stats lookup tasks", futures.size()); //cancel other futures for (Future future : futures) { future.cancel(true); @@ -324,8 +325,8 @@ public class StatsTask extends Task<StatsWork> implements Serializable { console.printInfo("Partition " + tableFullName + partn.getSpec() + " stats: [" + toString(parameters) + ']'); } - LOG.info("Partition " + tableFullName + partn.getSpec() + - " stats: [" + toString(parameters) + ']'); + LOG.info("Partition {}{} stats: [{}]", tableFullName, partn.getSpec(), + toString(parameters)); } if (!updates.isEmpty()) { db.alterPartitions(tableFullName, updates, environmentContext);