[BEAM-1178] Make naming of logger objects consistent
Project: http://git-wip-us.apache.org/repos/asf/incubator-beam/repo Commit: http://git-wip-us.apache.org/repos/asf/incubator-beam/commit/4cf59170 Tree: http://git-wip-us.apache.org/repos/asf/incubator-beam/tree/4cf59170 Diff: http://git-wip-us.apache.org/repos/asf/incubator-beam/diff/4cf59170 Branch: refs/heads/gearpump-runner Commit: 4cf59170546da3689fca4352ccde259cee408331 Parents: fbfea59 Author: Ismaël MejÃa <ieme...@gmail.com> Authored: Sun Dec 18 21:01:13 2016 +0100 Committer: Ismaël MejÃa <ieme...@gmail.com> Committed: Mon Dec 19 09:42:22 2016 +0100 ---------------------------------------------------------------------- .../apache/beam/sdk/io/range/ByteKeyRange.java | 4 +- .../beam/sdk/io/range/ByteKeyRangeTracker.java | 8 +-- .../beam/sdk/metrics/MetricsEnvironment.java | 6 +-- .../org/apache/beam/sdk/util/ApiSurface.java | 52 ++++++++++---------- .../beam/sdk/io/gcp/bigtable/BigtableIO.java | 24 ++++----- .../io/gcp/bigtable/BigtableServiceImpl.java | 6 +-- .../org/apache/beam/sdk/io/jdbc/JdbcIOTest.java | 4 +- .../apache/beam/sdk/io/mongodb/MongoDbIO.java | 12 ++--- .../sdk/io/mongodb/MongoDBGridFSIOTest.java | 8 +-- .../beam/sdk/io/mongodb/MongoDbIOTest.java | 8 +-- 10 files changed, 66 insertions(+), 66 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/4cf59170/sdks/java/core/src/main/java/org/apache/beam/sdk/io/range/ByteKeyRange.java ---------------------------------------------------------------------- diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/range/ByteKeyRange.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/range/ByteKeyRange.java index e30f8af..0212e8a 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/range/ByteKeyRange.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/range/ByteKeyRange.java @@ -69,7 +69,7 @@ import org.slf4j.LoggerFactory; * @see ByteKey */ public final class ByteKeyRange implements Serializable { - private static final Logger logger = LoggerFactory.getLogger(ByteKeyRange.class); + private static final Logger LOG = LoggerFactory.getLogger(ByteKeyRange.class); /** The range of all keys, with empty start and end keys. */ public static final ByteKeyRange ALL_KEYS = ByteKeyRange.of(ByteKey.EMPTY, ByteKey.EMPTY); @@ -191,7 +191,7 @@ public final class ByteKeyRange implements Serializable { // Keys are equal subject to padding by 0. BigInteger range = rangeEndInt.subtract(rangeStartInt); if (range.equals(BigInteger.ZERO)) { - logger.warn( + LOG.warn( "Using 0.0 as the default fraction for this near-empty range {} where start and end keys" + " differ only by trailing zeros.", this); http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/4cf59170/sdks/java/core/src/main/java/org/apache/beam/sdk/io/range/ByteKeyRangeTracker.java ---------------------------------------------------------------------- diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/range/ByteKeyRangeTracker.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/range/ByteKeyRangeTracker.java index 7c0f1c0..99717a4 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/range/ByteKeyRangeTracker.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/range/ByteKeyRangeTracker.java @@ -32,7 +32,7 @@ import org.slf4j.LoggerFactory; * @see ByteKeyRange */ public final class ByteKeyRangeTracker implements RangeTracker<ByteKey> { - private static final Logger logger = LoggerFactory.getLogger(ByteKeyRangeTracker.class); + private static final Logger LOG = LoggerFactory.getLogger(ByteKeyRangeTracker.class); /** Instantiates a new {@link ByteKeyRangeTracker} with the specified range. */ public static ByteKeyRangeTracker of(ByteKeyRange range) { @@ -89,7 +89,7 @@ public final class ByteKeyRangeTracker implements RangeTracker<ByteKey> { public synchronized boolean trySplitAtPosition(ByteKey splitPosition) { // Unstarted. if (position == null) { - logger.warn( + LOG.warn( "{}: Rejecting split request at {} because no records have been returned.", this, splitPosition); @@ -98,7 +98,7 @@ public final class ByteKeyRangeTracker implements RangeTracker<ByteKey> { // Started, but not after current position. if (splitPosition.compareTo(position) <= 0) { - logger.warn( + LOG.warn( "{}: Rejecting split request at {} because it is not after current position {}.", this, splitPosition, @@ -108,7 +108,7 @@ public final class ByteKeyRangeTracker implements RangeTracker<ByteKey> { // Sanity check. if (!range.containsKey(splitPosition)) { - logger.warn( + LOG.warn( "{}: Rejecting split request at {} because it is not within the range.", this, splitPosition); http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/4cf59170/sdks/java/core/src/main/java/org/apache/beam/sdk/metrics/MetricsEnvironment.java ---------------------------------------------------------------------- diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/metrics/MetricsEnvironment.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/metrics/MetricsEnvironment.java index 5d7cb0b..2942578 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/metrics/MetricsEnvironment.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/metrics/MetricsEnvironment.java @@ -41,7 +41,7 @@ import org.slf4j.LoggerFactory; */ public class MetricsEnvironment { - private static final Logger LOGGER = LoggerFactory.getLogger(MetricsContainer.class); + private static final Logger LOG = LoggerFactory.getLogger(MetricsContainer.class); private static final AtomicBoolean METRICS_SUPPORTED = new AtomicBoolean(false); private static final AtomicBoolean REPORTED_MISSING_CONTAINER = new AtomicBoolean(false); @@ -107,11 +107,11 @@ public class MetricsEnvironment { MetricsContainer container = CONTAINER_FOR_THREAD.get(); if (container == null && REPORTED_MISSING_CONTAINER.compareAndSet(false, true)) { if (METRICS_SUPPORTED.get()) { - LOGGER.error( + LOG.error( "Unable to update metrics on the current thread. " + "Most likely caused by using metrics outside the managed work-execution thread."); } else { - LOGGER.warn("Reporting metrics are not supported in the current execution environment."); + LOG.warn("Reporting metrics are not supported in the current execution environment."); } } return container; http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/4cf59170/sdks/java/core/src/main/java/org/apache/beam/sdk/util/ApiSurface.java ---------------------------------------------------------------------- diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/ApiSurface.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/ApiSurface.java index 3914bb0..2040161 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/ApiSurface.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/ApiSurface.java @@ -75,13 +75,13 @@ import org.slf4j.LoggerFactory; */ @SuppressWarnings("rawtypes") public class ApiSurface { - private static Logger logger = LoggerFactory.getLogger(ApiSurface.class); + private static final Logger LOG = LoggerFactory.getLogger(ApiSurface.class); /** * Returns an empty {@link ApiSurface}. */ public static ApiSurface empty() { - logger.debug("Returning an empty ApiSurface"); + LOG.debug("Returning an empty ApiSurface"); return new ApiSurface(Collections.<Class<?>>emptySet(), Collections.<Pattern>emptySet()); } @@ -113,7 +113,7 @@ public class ApiSurface { newRootClasses.add(clazz); } } - logger.debug("Including package {} and subpackages: {}", packageName, newRootClasses); + LOG.debug("Including package {} and subpackages: {}", packageName, newRootClasses); newRootClasses.addAll(rootClasses); return new ApiSurface(newRootClasses, patternsToPrune); @@ -124,7 +124,7 @@ public class ApiSurface { */ public ApiSurface includingClass(Class<?> clazz) { Set<Class<?>> newRootClasses = Sets.newHashSet(); - logger.debug("Including class {}", clazz); + LOG.debug("Including class {}", clazz); newRootClasses.add(clazz); newRootClasses.addAll(rootClasses); return new ApiSurface(newRootClasses, patternsToPrune); @@ -360,7 +360,7 @@ public class ApiSurface { * See {@link #addExposedTypes(Type, Class)}. */ private void addExposedTypes(TypeToken type, Class<?> cause) { - logger.debug( + LOG.debug( "Adding exposed types from {}, which is the type in type token {}", type.getType(), type); addExposedTypes(type.getType(), cause); } @@ -372,19 +372,19 @@ public class ApiSurface { */ private void addExposedTypes(Type type, Class<?> cause) { if (type instanceof TypeVariable) { - logger.debug("Adding exposed types from {}, which is a type variable", type); + LOG.debug("Adding exposed types from {}, which is a type variable", type); addExposedTypes((TypeVariable) type, cause); } else if (type instanceof WildcardType) { - logger.debug("Adding exposed types from {}, which is a wildcard type", type); + LOG.debug("Adding exposed types from {}, which is a wildcard type", type); addExposedTypes((WildcardType) type, cause); } else if (type instanceof GenericArrayType) { - logger.debug("Adding exposed types from {}, which is a generic array type", type); + LOG.debug("Adding exposed types from {}, which is a generic array type", type); addExposedTypes((GenericArrayType) type, cause); } else if (type instanceof ParameterizedType) { - logger.debug("Adding exposed types from {}, which is a parameterized type", type); + LOG.debug("Adding exposed types from {}, which is a parameterized type", type); addExposedTypes((ParameterizedType) type, cause); } else if (type instanceof Class) { - logger.debug("Adding exposed types from {}, which is a class", type); + LOG.debug("Adding exposed types from {}, which is a class", type); addExposedTypes((Class) type, cause); } else { throw new IllegalArgumentException("Unknown implementation of Type"); @@ -402,7 +402,7 @@ public class ApiSurface { } visit(type); for (Type bound : type.getBounds()) { - logger.debug("Adding exposed types from {}, which is a type bound on {}", bound, type); + LOG.debug("Adding exposed types from {}, which is a type bound on {}", bound, type); addExposedTypes(bound, cause); } } @@ -414,14 +414,14 @@ public class ApiSurface { private void addExposedTypes(WildcardType type, Class<?> cause) { visit(type); for (Type lowerBound : type.getLowerBounds()) { - logger.debug( + LOG.debug( "Adding exposed types from {}, which is a type lower bound on wildcard type {}", lowerBound, type); addExposedTypes(lowerBound, cause); } for (Type upperBound : type.getUpperBounds()) { - logger.debug( + LOG.debug( "Adding exposed types from {}, which is a type upper bound on wildcard type {}", upperBound, type); @@ -439,7 +439,7 @@ public class ApiSurface { return; } visit(type); - logger.debug( + LOG.debug( "Adding exposed types from {}, which is the component type on generic array type {}", type.getGenericComponentType(), type); @@ -467,13 +467,13 @@ public class ApiSurface { // The type parameters themselves may not be pruned, // for example with List<MyApiType> probably the // standard List is pruned, but MyApiType is not. - logger.debug( + LOG.debug( "Adding exposed types from {}, which is the raw type on parameterized type {}", type.getRawType(), type); addExposedTypes(type.getRawType(), cause); for (Type typeArg : type.getActualTypeArguments()) { - logger.debug( + LOG.debug( "Adding exposed types from {}, which is a type argument on parameterized type {}", typeArg, type); @@ -501,14 +501,14 @@ public class ApiSurface { TypeToken<?> token = TypeToken.of(clazz); for (TypeToken<?> superType : token.getTypes()) { if (!superType.equals(token)) { - logger.debug( + LOG.debug( "Adding exposed types from {}, which is a super type token on {}", superType, clazz); addExposedTypes(superType, clazz); } } for (Class innerClass : clazz.getDeclaredClasses()) { if (exposed(innerClass.getModifiers())) { - logger.debug( + LOG.debug( "Adding exposed types from {}, which is an exposed inner class of {}", innerClass, clazz); @@ -517,12 +517,12 @@ public class ApiSurface { } for (Field field : clazz.getDeclaredFields()) { if (exposed(field.getModifiers())) { - logger.debug("Adding exposed types from {}, which is an exposed field on {}", field, clazz); + LOG.debug("Adding exposed types from {}, which is an exposed field on {}", field, clazz); addExposedTypes(field, clazz); } } for (Invokable invokable : getExposedInvokables(token)) { - logger.debug( + LOG.debug( "Adding exposed types from {}, which is an exposed invokable on {}", invokable, clazz); addExposedTypes(invokable, clazz); } @@ -531,21 +531,21 @@ public class ApiSurface { private void addExposedTypes(Invokable<?, ?> invokable, Class<?> cause) { addExposedTypes(invokable.getReturnType(), cause); for (Annotation annotation : invokable.getAnnotations()) { - logger.debug( + LOG.debug( "Adding exposed types from {}, which is an annotation on invokable {}", annotation, invokable); addExposedTypes(annotation.annotationType(), cause); } for (Parameter parameter : invokable.getParameters()) { - logger.debug( + LOG.debug( "Adding exposed types from {}, which is a parameter on invokable {}", parameter, invokable); addExposedTypes(parameter, cause); } for (TypeToken<?> exceptionType : invokable.getExceptionTypes()) { - logger.debug( + LOG.debug( "Adding exposed types from {}, which is an exception type on invokable {}", exceptionType, invokable); @@ -554,13 +554,13 @@ public class ApiSurface { } private void addExposedTypes(Parameter parameter, Class<?> cause) { - logger.debug( + LOG.debug( "Adding exposed types from {}, which is the type of parameter {}", parameter.getType(), parameter); addExposedTypes(parameter.getType(), cause); for (Annotation annotation : parameter.getAnnotations()) { - logger.debug( + LOG.debug( "Adding exposed types from {}, which is an annotation on parameter {}", annotation, parameter); @@ -571,7 +571,7 @@ public class ApiSurface { private void addExposedTypes(Field field, Class<?> cause) { addExposedTypes(field.getGenericType(), cause); for (Annotation annotation : field.getDeclaredAnnotations()) { - logger.debug( + LOG.debug( "Adding exposed types from {}, which is an annotation on field {}", annotation, field); addExposedTypes(annotation.annotationType(), cause); } http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/4cf59170/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigtable/BigtableIO.java ---------------------------------------------------------------------- diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigtable/BigtableIO.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigtable/BigtableIO.java index a83784b..c27389d 100644 --- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigtable/BigtableIO.java +++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigtable/BigtableIO.java @@ -154,7 +154,7 @@ import org.slf4j.LoggerFactory; */ @Experimental public class BigtableIO { - private static final Logger logger = LoggerFactory.getLogger(BigtableIO.class); + private static final Logger LOG = LoggerFactory.getLogger(BigtableIO.class); /** * Creates an uninitialized {@link BigtableIO.Read}. Before use, the {@code Read} must be @@ -307,7 +307,7 @@ public class BigtableIO { "Table %s does not exist", tableId); } catch (IOException e) { - logger.warn("Error checking whether table {} exists; proceeding.", tableId, e); + LOG.warn("Error checking whether table {} exists; proceeding.", tableId, e); } } @@ -521,7 +521,7 @@ public class BigtableIO { "Table %s does not exist", tableId); } catch (IOException e) { - logger.warn("Error checking whether table {} exists; proceeding.", tableId, e); + LOG.warn("Error checking whether table {} exists; proceeding.", tableId, e); } } @@ -612,7 +612,7 @@ public class BigtableIO { public void finishBundle(Context c) throws Exception { bigtableWriter.flush(); checkForFailures(); - logger.info("Wrote {} records", recordsWritten); + LOG.info("Wrote {} records", recordsWritten); } @Teardown @@ -658,7 +658,7 @@ public class BigtableIO { i + failures.size(), i, logEntry.toString()); - logger.error(message); + LOG.error(message); throw new IOException(message); } @@ -762,11 +762,11 @@ public class BigtableIO { long desiredBundleSizeBytes, List<SampleRowKeysResponse> sampleRowKeys) { // There are no regions, or no samples available. Just scan the entire range. if (sampleRowKeys.isEmpty()) { - logger.info("Not splitting source {} because no sample row keys are available.", this); + LOG.info("Not splitting source {} because no sample row keys are available.", this); return Collections.singletonList(this); } - logger.info( + LOG.info( "About to split into bundles of size {} with sampleRowKeys length {} first element {}", desiredBundleSizeBytes, sampleRowKeys.size(), @@ -832,7 +832,7 @@ public class BigtableIO { } List<BigtableSource> ret = splits.build(); - logger.info("Generated {} splits. First split: {}", ret.size(), ret.get(0)); + LOG.info("Generated {} splits. First split: {}", ret.size(), ret.get(0)); return ret; } @@ -912,7 +912,7 @@ public class BigtableIO { private List<BigtableSource> splitKeyRangeIntoBundleSizedSubranges( long sampleSizeBytes, long desiredBundleSizeBytes, ByteKeyRange range) { // Catch the trivial cases. Split is small enough already, or this is the last region. - logger.debug( + LOG.debug( "Subsplit for sampleSizeBytes {} and desiredBundleSizeBytes {}", sampleSizeBytes, desiredBundleSizeBytes); @@ -1010,7 +1010,7 @@ public class BigtableIO { @Override public void close() throws IOException { - logger.info("Closing reader after reading {} records.", recordsReturned); + LOG.info("Closing reader after reading {} records.", recordsReturned); if (reader != null) { reader.close(); reader = null; @@ -1033,11 +1033,11 @@ public class BigtableIO { try { splitKey = rangeTracker.getRange().interpolateKey(fraction); } catch (IllegalArgumentException e) { - logger.info( + LOG.info( "%s: Failed to interpolate key for fraction %s.", rangeTracker.getRange(), fraction); return null; } - logger.debug( + LOG.debug( "Proposing to split {} at fraction {} (key {})", rangeTracker, fraction, splitKey); BigtableSource primary = source.withEndKey(splitKey); BigtableSource residual = source.withStartKey(splitKey); http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/4cf59170/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigtable/BigtableServiceImpl.java ---------------------------------------------------------------------- diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigtable/BigtableServiceImpl.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigtable/BigtableServiceImpl.java index 7ce4b4a..1a4937c 100644 --- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigtable/BigtableServiceImpl.java +++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigtable/BigtableServiceImpl.java @@ -52,7 +52,7 @@ import org.slf4j.LoggerFactory; * service. */ class BigtableServiceImpl implements BigtableService { - private static final Logger logger = LoggerFactory.getLogger(BigtableService.class); + private static final Logger LOG = LoggerFactory.getLogger(BigtableService.class); public BigtableServiceImpl(BigtableOptions options) { this.options = options; @@ -75,7 +75,7 @@ class BigtableServiceImpl implements BigtableService { @Override public boolean tableExists(String tableId) throws IOException { if (!BigtableSession.isAlpnProviderEnabled()) { - logger.info( + LOG.info( "Skipping existence check for table {} (BigtableOptions {}) because ALPN is not" + " configured.", tableId, @@ -97,7 +97,7 @@ class BigtableServiceImpl implements BigtableService { String message = String.format( "Error checking whether table %s (BigtableOptions %s) exists", tableId, options); - logger.error(message, e); + LOG.error(message, e); throw new IOException(message, e); } } http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/4cf59170/sdks/java/io/jdbc/src/test/java/org/apache/beam/sdk/io/jdbc/JdbcIOTest.java ---------------------------------------------------------------------- diff --git a/sdks/java/io/jdbc/src/test/java/org/apache/beam/sdk/io/jdbc/JdbcIOTest.java b/sdks/java/io/jdbc/src/test/java/org/apache/beam/sdk/io/jdbc/JdbcIOTest.java index d09929d..aa93a22 100644 --- a/sdks/java/io/jdbc/src/test/java/org/apache/beam/sdk/io/jdbc/JdbcIOTest.java +++ b/sdks/java/io/jdbc/src/test/java/org/apache/beam/sdk/io/jdbc/JdbcIOTest.java @@ -57,7 +57,7 @@ import org.slf4j.LoggerFactory; * Test on the JdbcIO. */ public class JdbcIOTest implements Serializable { - private static final Logger LOGGER = LoggerFactory.getLogger(JdbcIOTest.class); + private static final Logger LOG = LoggerFactory.getLogger(JdbcIOTest.class); private static NetworkServerControl derbyServer; private static ClientDataSource dataSource; @@ -70,7 +70,7 @@ public class JdbcIOTest implements Serializable { port = socket.getLocalPort(); socket.close(); - LOGGER.info("Starting Derby database on {}", port); + LOG.info("Starting Derby database on {}", port); System.setProperty("derby.stream.error.file", "target/derby.log"); http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/4cf59170/sdks/java/io/mongodb/src/main/java/org/apache/beam/sdk/io/mongodb/MongoDbIO.java ---------------------------------------------------------------------- diff --git a/sdks/java/io/mongodb/src/main/java/org/apache/beam/sdk/io/mongodb/MongoDbIO.java b/sdks/java/io/mongodb/src/main/java/org/apache/beam/sdk/io/mongodb/MongoDbIO.java index f539431..de2eb16 100644 --- a/sdks/java/io/mongodb/src/main/java/org/apache/beam/sdk/io/mongodb/MongoDbIO.java +++ b/sdks/java/io/mongodb/src/main/java/org/apache/beam/sdk/io/mongodb/MongoDbIO.java @@ -93,7 +93,7 @@ import org.slf4j.LoggerFactory; */ public class MongoDbIO { - private static final Logger LOGGER = LoggerFactory.getLogger(MongoDbIO.class); + private static final Logger LOG = LoggerFactory.getLogger(MongoDbIO.class); /** Read data from MongoDB. */ public static Read read() { @@ -253,19 +253,19 @@ public class MongoDbIO { splitVectorCommand.append("keyPattern", new BasicDBObject().append("_id", 1)); splitVectorCommand.append("force", false); // maxChunkSize is the Mongo partition size in MB - LOGGER.debug("Splitting in chunk of {} MB", desiredBundleSizeBytes / 1024 / 1024); + LOG.debug("Splitting in chunk of {} MB", desiredBundleSizeBytes / 1024 / 1024); splitVectorCommand.append("maxChunkSize", desiredBundleSizeBytes / 1024 / 1024); Document splitVectorCommandResult = mongoDatabase.runCommand(splitVectorCommand); splitKeys = (List<Document>) splitVectorCommandResult.get("splitKeys"); List<BoundedSource<Document>> sources = new ArrayList<>(); if (splitKeys.size() < 1) { - LOGGER.debug("Split keys is low, using an unique source"); + LOG.debug("Split keys is low, using an unique source"); sources.add(this); return sources; } - LOGGER.debug("Number of splits is {}", splitKeys.size()); + LOG.debug("Number of splits is {}", splitKeys.size()); for (String shardFilter : splitKeysToFilters(splitKeys, spec.filter())) { sources.add(new BoundedMongoDbSource(spec.withFilter(shardFilter))); } @@ -392,12 +392,12 @@ public class MongoDbIO { cursor.close(); } } catch (Exception e) { - LOGGER.warn("Error closing MongoDB cursor", e); + LOG.warn("Error closing MongoDB cursor", e); } try { client.close(); } catch (Exception e) { - LOGGER.warn("Error closing MongoDB client", e); + LOG.warn("Error closing MongoDB client", e); } } http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/4cf59170/sdks/java/io/mongodb/src/test/java/org/apache/beam/sdk/io/mongodb/MongoDBGridFSIOTest.java ---------------------------------------------------------------------- diff --git a/sdks/java/io/mongodb/src/test/java/org/apache/beam/sdk/io/mongodb/MongoDBGridFSIOTest.java b/sdks/java/io/mongodb/src/test/java/org/apache/beam/sdk/io/mongodb/MongoDBGridFSIOTest.java index 5061487..df05c93 100644 --- a/sdks/java/io/mongodb/src/test/java/org/apache/beam/sdk/io/mongodb/MongoDBGridFSIOTest.java +++ b/sdks/java/io/mongodb/src/test/java/org/apache/beam/sdk/io/mongodb/MongoDBGridFSIOTest.java @@ -88,7 +88,7 @@ import org.slf4j.LoggerFactory; * Test on the MongoDbGridFSIO. */ public class MongoDBGridFSIOTest implements Serializable { - private static final Logger LOGGER = LoggerFactory.getLogger(MongoDBGridFSIOTest.class); + private static final Logger LOG = LoggerFactory.getLogger(MongoDBGridFSIOTest.class); private static final String MONGODB_LOCATION = "target/mongodb"; private static final String DATABASE = "gridfs"; @@ -105,7 +105,7 @@ public class MongoDBGridFSIOTest implements Serializable { try (ServerSocket serverSocket = new ServerSocket(0)) { port = serverSocket.getLocalPort(); } - LOGGER.info("Starting MongoDB embedded instance on {}", port); + LOG.info("Starting MongoDB embedded instance on {}", port); try { Files.forceDelete(new File(MONGODB_LOCATION)); } catch (Exception e) { @@ -127,7 +127,7 @@ public class MongoDBGridFSIOTest implements Serializable { mongodExecutable = mongodStarter.prepare(mongodConfig); mongodProcess = mongodExecutable.start(); - LOGGER.info("Insert test data"); + LOG.info("Insert test data"); Mongo client = new Mongo("localhost", port); DB database = client.getDB(DATABASE); @@ -174,7 +174,7 @@ public class MongoDBGridFSIOTest implements Serializable { @AfterClass public static void stop() throws Exception { - LOGGER.info("Stopping MongoDB instance"); + LOG.info("Stopping MongoDB instance"); mongodProcess.stop(); mongodExecutable.stop(); } http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/4cf59170/sdks/java/io/mongodb/src/test/java/org/apache/beam/sdk/io/mongodb/MongoDbIOTest.java ---------------------------------------------------------------------- diff --git a/sdks/java/io/mongodb/src/test/java/org/apache/beam/sdk/io/mongodb/MongoDbIOTest.java b/sdks/java/io/mongodb/src/test/java/org/apache/beam/sdk/io/mongodb/MongoDbIOTest.java index 129e81c..5faa618 100644 --- a/sdks/java/io/mongodb/src/test/java/org/apache/beam/sdk/io/mongodb/MongoDbIOTest.java +++ b/sdks/java/io/mongodb/src/test/java/org/apache/beam/sdk/io/mongodb/MongoDbIOTest.java @@ -67,7 +67,7 @@ import org.slf4j.LoggerFactory; */ public class MongoDbIOTest implements Serializable { - private static final Logger LOGGER = LoggerFactory.getLogger(MongoDbIOTest.class); + private static final Logger LOG = LoggerFactory.getLogger(MongoDbIOTest.class); private static final String MONGODB_LOCATION = "target/mongodb"; private static final String DATABASE = "beam"; @@ -92,7 +92,7 @@ public class MongoDbIOTest implements Serializable { @Before public void setup() throws Exception { - LOGGER.info("Starting MongoDB embedded instance on {}", port); + LOG.info("Starting MongoDB embedded instance on {}", port); try { Files.forceDelete(new File(MONGODB_LOCATION)); } catch (Exception e) { @@ -114,7 +114,7 @@ public class MongoDbIOTest implements Serializable { mongodExecutable = mongodStarter.prepare(mongodConfig); mongodProcess = mongodExecutable.start(); - LOGGER.info("Insert test data"); + LOG.info("Insert test data"); MongoClient client = new MongoClient("localhost", port); MongoDatabase database = client.getDatabase(DATABASE); @@ -135,7 +135,7 @@ public class MongoDbIOTest implements Serializable { @After public void stop() throws Exception { - LOGGER.info("Stopping MongoDB instance"); + LOG.info("Stopping MongoDB instance"); mongodProcess.stop(); mongodExecutable.stop(); }