[ 
https://issues.apache.org/jira/browse/KAFKA-4423?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=16483551#comment-16483551
 ] 

ASF GitHub Bot commented on KAFKA-4423:
---------------------------------------

ijuma closed pull request #5046: KAFKA-4423: Drop support for Java 7 (KIP-118) 
and update deps
URL: https://github.com/apache/kafka/pull/5046
 
 
   

This is a PR merged from a forked repository.
As GitHub hides the original diff on merge, it is displayed below for
the sake of provenance:

As this is a foreign pull request (from a fork), the diff is supplied
below (as it won't show otherwise due to GitHub magic):

diff --git a/README.md b/README.md
index 9371e482169..18cb03e9880 100644
--- a/README.md
+++ b/README.md
@@ -4,9 +4,9 @@ See our [web site](http://kafka.apache.org) for details on the 
project.
 
 You need to have [Gradle](http://www.gradle.org/installation) and 
[Java](http://www.oracle.com/technetwork/java/javase/downloads/index.html) 
installed.
 
-Kafka requires Gradle 3.0 or higher.
+Kafka requires Gradle 4.5 or higher.
 
-Java 7 should be used for building in order to support both Java 7 and Java 8 
at runtime.
+Java 8 should be used for building in order to support both Java 8 and Java 10 
at runtime.
 
 ### First bootstrap and download the wrapper ###
     cd kafka_source_dir
@@ -85,8 +85,6 @@ You can pass either the major version (eg 2.11) or the full 
version (eg 2.11.12)
     ./gradlew -PscalaVersion=2.11 test
     ./gradlew -PscalaVersion=2.11 releaseTarGz
 
-Scala 2.12.x requires Java 8.
-
 ### Running a task for a specific project ###
 This is for `core`, `examples` and `clients`
 
diff --git a/build.gradle b/build.gradle
index 31026d91e1e..3e4388a288c 100644
--- a/build.gradle
+++ b/build.gradle
@@ -26,9 +26,9 @@ buildscript {
     // For Apache Rat plugin to ignore non-Git files
     classpath "org.ajoberstar:grgit:1.9.3"
     classpath 'com.github.ben-manes:gradle-versions-plugin:0.17.0'
-    classpath 'org.scoverage:gradle-scoverage:2.1.0'
-    classpath 'com.github.jengelman.gradle.plugins:shadow:2.0.2'
-    classpath 'org.owasp:dependency-check-gradle:3.1.1'
+    classpath 'org.scoverage:gradle-scoverage:2.3.0'
+    classpath 'com.github.jengelman.gradle.plugins:shadow:2.0.4'
+    classpath 'org.owasp:dependency-check-gradle:3.1.2'
   }
 }
 
@@ -67,24 +67,19 @@ allprojects {
     }
   }
 
-  if (JavaVersion.current().isJava8Compatible()) {
-    tasks.withType(Javadoc) {
-      // disable the crazy super-strict doclint tool in Java 8
-      // noinspection SpellCheckingInspection
-      options.addStringOption('Xdoclint:none', '-quiet')
-    }
+  tasks.withType(Javadoc) {
+    // disable the crazy super-strict doclint tool in Java 8
+    // noinspection SpellCheckingInspection
+    options.addStringOption('Xdoclint:none', '-quiet')
   }
 
 }
 
 ext {
-  gradleVersion = "4.5.1"
+  gradleVersion = "4.7"
+  minJavaVersion = "8"
   buildVersionFileName = "kafka-version.properties"
 
-  maxPermSizeArgs = []
-  if (!JavaVersion.current().isJava8Compatible())
-    maxPermSizeArgs += '-XX:MaxPermSize=512m'
-
   userMaxForks = project.hasProperty('maxParallelForks') ? 
maxParallelForks.toInteger() : null
 
   skipSigning = project.hasProperty('skipSigning') && skipSigning.toBoolean()
@@ -145,20 +140,17 @@ subprojects {
   if (!JavaVersion.current().isJava9Compatible())
     apply plugin: 'findbugs'
 
-  sourceCompatibility = 1.7
-  targetCompatibility = 1.7
+  sourceCompatibility = minJavaVersion
+  targetCompatibility = minJavaVersion
 
   compileJava {
     options.encoding = 'UTF-8'
-    options.compilerArgs << "-Xlint:deprecation"
-    // -Xlint:unchecked is too buggy in Java 7, so we only enable for Java 8 
or higher
-    if (JavaVersion.current().isJava8Compatible())
-      options.compilerArgs << "-Xlint:unchecked"
+    options.compilerArgs << "-Xlint:deprecation,unchecked"
     // --release is the recommended way to select the target release, but it's 
only supported in Java 9 so we also
     // set --source and --target via `sourceCompatibility` and 
`targetCompatibility`. If/when Gradle supports `--release`
     // natively (https://github.com/gradle/gradle/issues/2510), we should 
switch to that.
     if (JavaVersion.current().isJava9Compatible())
-      options.compilerArgs << "--release" << "7"
+      options.compilerArgs << "--release" << minJavaVersion
   }
 
   uploadArchives {
@@ -202,7 +194,6 @@ subprojects {
 
     minHeapSize = "256m"
     maxHeapSize = "2048m"
-    jvmArgs = maxPermSizeArgs
 
     testLogging {
       events = userTestLoggingEvents ?: testLoggingEvents
@@ -217,7 +208,6 @@ subprojects {
 
     minHeapSize = "256m"
     maxHeapSize = "2048m"
-    jvmArgs = maxPermSizeArgs
 
     testLogging {
       events = userTestLoggingEvents ?: testLoggingEvents
@@ -236,7 +226,6 @@ subprojects {
 
     minHeapSize = "256m"
     maxHeapSize = "2048m"
-    jvmArgs = maxPermSizeArgs
 
     testLogging {
       events = userTestLoggingEvents ?: testLoggingEvents
@@ -343,7 +332,7 @@ subprojects {
       "-Xlint:private-shadow",
       "-Xlint:stars-align",
       "-Xlint:type-parameter-shadow",
-      "-Xlint:unsound-match",
+      "-Xlint:unsound-match"
     ]
 
     if (versions.baseScala != '2.11') {
@@ -355,15 +344,14 @@ subprojects {
 
     configure(scalaCompileOptions.forkOptions) {
       memoryMaximumSize = '1g'
-      jvmArgs = ['-Xss2m'] + maxPermSizeArgs
+      jvmArgs = ['-Xss2m']
     }
   }
 
   checkstyle {
     configFile = new File(rootDir, "checkstyle/checkstyle.xml")
     configProperties = [importControlFile: 
"$rootDir/checkstyle/import-control.xml"]
-    // version 7.x requires Java 8
-    toolVersion = '6.19'
+    toolVersion = '8.10'
   }
   test.dependsOn('checkstyleMain', 'checkstyleTest')
 
@@ -885,6 +873,7 @@ project(':tools') {
 
     compile libs.jacksonJaxrsJsonProvider
     compile libs.jerseyContainerServlet
+    compile libs.jerseyHk2
     compile libs.jaxbApi // Jersey dependency that was available in the JDK 
before Java 9
     compile libs.activation // Jersey dependency that was available in the JDK 
before Java 9
     compile libs.jettyServer
@@ -1335,6 +1324,7 @@ project(':connect:runtime') {
 
     compile libs.jacksonJaxrsJsonProvider
     compile libs.jerseyContainerServlet
+    compile libs.jerseyHk2
     compile libs.jaxbApi // Jersey dependency that was available in the JDK 
before Java 9
     compile libs.activation // Jersey dependency that was available in the JDK 
before Java 9
     compile libs.jettyServer
diff --git a/checkstyle/import-control.xml b/checkstyle/import-control.xml
index 65f294f208a..1afb83ac636 100644
--- a/checkstyle/import-control.xml
+++ b/checkstyle/import-control.xml
@@ -207,6 +207,8 @@
     <allow pkg="org.apache.kafka.clients"/>
     <allow pkg="org.apache.kafka.clients.producer" exact-match="true"/>
     <allow pkg="org.apache.kafka.clients.consumer" exact-match="true"/>
+    <!-- Temporary until EosTestDriver migrates to the Java AdminClient -->
+    <allow pkg="kafka.admin" exact-match="true"/>
 
     <allow pkg="org.apache.kafka.streams"/>
 
diff --git a/checkstyle/suppressions.xml b/checkstyle/suppressions.xml
index 64258bf7b07..ba48c38cb28 100644
--- a/checkstyle/suppressions.xml
+++ b/checkstyle/suppressions.xml
@@ -57,7 +57,7 @@
               
files="AbstractRequest.java|KerberosLogin.java|WorkerSinkTaskTest.java|TransactionManagerTest.java"/>
 
     <suppress checks="NPathComplexity"
-              
files="(BufferPool|MetricName|Node|ConfigDef|SslTransportLayer|MetadataResponse|KerberosLogin|Selector|Sender|Serdes|Agent|Values|PluginUtils|MiniTrogdorCluster).java"/>
+              
files="(BufferPool|Fetcher|MetricName|Node|ConfigDef|RecordBatch|SslFactory|SslTransportLayer|MetadataResponse|KerberosLogin|Selector|Sender|Serdes|TokenInformation|Agent|Values|PluginUtils|MiniTrogdorCluster).java"/>
 
     <!-- clients tests -->
     <suppress checks="ClassDataAbstractionCoupling"
@@ -72,6 +72,9 @@
     <suppress checks="JavaNCSS"
               files="RequestResponseTest.java"/>
 
+    <suppress checks="NPathComplexity"
+              files="MemoryRecordsTest.java"/>
+
     <!-- Connect -->
     <suppress checks="ClassFanOutComplexity"
               files="DistributedHerder(|Test).java"/>
@@ -111,17 +114,8 @@
               files="Values.java"/>
 
     <suppress checks="NPathComplexity"
-              files="ConnectRecord.java"/>
-    <suppress checks="NPathComplexity"
-              files="ConnectSchema.java"/>
-    <suppress checks="NPathComplexity"
-              files="FileStreamSourceTask.java"/>
-    <suppress checks="NPathComplexity"
-              files="JsonConverter.java"/>
-    <suppress checks="NPathComplexity"
-              files="DistributedHerder.java"/>
-    <suppress checks="NPathComplexity"
-              files="ConnectHeaders.java"/>
+              
files="(AbstractStatus|ConnectHeaders|ConnectRecord|ConnectSchema|DistributedHerder|FileStreamSourceTask|JsonConverter|KafkaConfigBackingStore).java"/>
+
     <suppress checks="MethodLength"
               files="Values.java"/>
 
@@ -155,11 +149,7 @@
               files="StreamsPartitionAssignor.java"/>
 
     <suppress checks="NPathComplexity"
-              files="ProcessorStateManager.java"/>
-    <suppress checks="NPathComplexity"
-              files="StreamsPartitionAssignor.java"/>
-    <suppress checks="NPathComplexity"
-              files="StreamThread.java"/>
+              
files="(ProcessorStateManager|InternalTopologyBuilder|StreamsPartitionAssignor|StreamThread).java"/>
 
     <!-- Streams tests -->
     <suppress checks="ClassFanOutComplexity"
@@ -189,7 +179,7 @@
               files="SmokeTestDriver.java"/>
 
     <suppress checks="NPathComplexity"
-              files="KStreamKStreamJoinTest.java|SmokeTestDriver.java"/>
+              
files="EosTestDriver|KStreamKStreamJoinTest.java|SmokeTestDriver.java"/>
     <suppress checks="NPathComplexity"
               files="KStreamKStreamLeftJoinTest.java"/>
 
diff --git a/clients/src/main/java/org/apache/kafka/common/metrics/Metrics.java 
b/clients/src/main/java/org/apache/kafka/common/metrics/Metrics.java
index d456fedcc80..a6da9f90397 100644
--- a/clients/src/main/java/org/apache/kafka/common/metrics/Metrics.java
+++ b/clients/src/main/java/org/apache/kafka/common/metrics/Metrics.java
@@ -96,7 +96,7 @@ public Metrics(Time time) {
      * Expiration of Sensors is disabled.
      */
     public Metrics(MetricConfig defaultConfig, Time time) {
-      this(defaultConfig, new ArrayList<MetricsReporter>(0), time);
+        this(defaultConfig, new ArrayList<MetricsReporter>(0), time);
     }
 
 
diff --git 
a/clients/src/main/java/org/apache/kafka/common/record/MemoryRecords.java 
b/clients/src/main/java/org/apache/kafka/common/record/MemoryRecords.java
index eb4e31b6e58..be7ea6214b2 100644
--- a/clients/src/main/java/org/apache/kafka/common/record/MemoryRecords.java
+++ b/clients/src/main/java/org/apache/kafka/common/record/MemoryRecords.java
@@ -584,7 +584,7 @@ public static MemoryRecords withRecords(byte magic, long 
initialOffset, Compress
     public static MemoryRecords withRecords(byte magic, long initialOffset, 
CompressionType compressionType,
                                             TimestampType timestampType, long 
producerId, short producerEpoch,
                                             int baseSequence, int 
partitionLeaderEpoch, boolean isTransactional,
-                                            SimpleRecord ... records) {
+                                            SimpleRecord... records) {
         if (records.length == 0)
             return MemoryRecords.EMPTY;
         int sizeEstimate = AbstractRecords.estimateSizeInBytes(magic, 
compressionType, Arrays.asList(records));
diff --git a/clients/src/main/java/org/apache/kafka/common/utils/Shell.java 
b/clients/src/main/java/org/apache/kafka/common/utils/Shell.java
index ebfd0bacc5c..a9b93ec8aa1 100644
--- a/clients/src/main/java/org/apache/kafka/common/utils/Shell.java
+++ b/clients/src/main/java/org/apache/kafka/common/utils/Shell.java
@@ -251,7 +251,7 @@ public String toString() {
      * @param cmd shell command to execute.
      * @return the output of the executed command.
      */
-    public static String execCommand(String ... cmd) throws IOException {
+    public static String execCommand(String... cmd) throws IOException {
         return execCommand(cmd, -1);
     }
 
diff --git a/clients/src/main/java/org/apache/kafka/common/utils/Utils.java 
b/clients/src/main/java/org/apache/kafka/common/utils/Utils.java
index f02630831a6..ebe87ba3e27 100755
--- a/clients/src/main/java/org/apache/kafka/common/utils/Utils.java
+++ b/clients/src/main/java/org/apache/kafka/common/utils/Utils.java
@@ -151,7 +151,7 @@ public static int abs(int n) {
      * @param rest The remaining values to compare
      * @return The minimum of all passed values
      */
-    public static long min(long first, long ... rest) {
+    public static long min(long first, long... rest) {
         long min = first;
         for (long r : rest) {
             if (r < min)
@@ -166,7 +166,7 @@ public static long min(long first, long ... rest) {
      * @param rest The remaining values to compare
      * @return The maximum of all passed values
      */
-    public static long max(long first, long ... rest) {
+    public static long max(long first, long... rest) {
         long max = first;
         for (long r : rest) {
             if (r > max)
diff --git 
a/clients/src/test/java/org/apache/kafka/common/record/FileLogInputStreamTest.java
 
b/clients/src/test/java/org/apache/kafka/common/record/FileLogInputStreamTest.java
index 77aaae86f5f..8a955972b5c 100644
--- 
a/clients/src/test/java/org/apache/kafka/common/record/FileLogInputStreamTest.java
+++ 
b/clients/src/test/java/org/apache/kafka/common/record/FileLogInputStreamTest.java
@@ -221,7 +221,7 @@ public void testNextBatchSelectionWithZeroedParams() throws 
IOException {
     }
 
     private void assertProducerData(RecordBatch batch, long producerId, short 
producerEpoch, int baseSequence,
-                                    boolean isTransactional, SimpleRecord ... 
records) {
+                                    boolean isTransactional, SimpleRecord... 
records) {
         assertEquals(producerId, batch.producerId());
         assertEquals(producerEpoch, batch.producerEpoch());
         assertEquals(baseSequence, batch.baseSequence());
@@ -237,7 +237,7 @@ private void assertNoProducerData(RecordBatch batch) {
         assertFalse(batch.isTransactional());
     }
 
-    private void assertGenericRecordBatchData(RecordBatch batch, long 
baseOffset, long maxTimestamp, SimpleRecord ... records) {
+    private void assertGenericRecordBatchData(RecordBatch batch, long 
baseOffset, long maxTimestamp, SimpleRecord... records) {
         assertEquals(magic, batch.magic());
         assertEquals(compression, batch.compressionType());
 
diff --git 
a/connect/api/src/test/java/org/apache/kafka/connect/data/ValuesTest.java 
b/connect/api/src/test/java/org/apache/kafka/connect/data/ValuesTest.java
index c2caf08d5f9..dcfa3cf5c7e 100644
--- a/connect/api/src/test/java/org/apache/kafka/connect/data/ValuesTest.java
+++ b/connect/api/src/test/java/org/apache/kafka/connect/data/ValuesTest.java
@@ -367,7 +367,7 @@ protected void assertParsed(String input, String... 
expectedTokens) {
         assertConsumable(parser, expectedTokens);
     }
 
-    protected void assertConsumable(Parser parser, String ... expectedTokens) {
+    protected void assertConsumable(Parser parser, String... expectedTokens) {
         for (String expectedToken : expectedTokens) {
             if (!expectedToken.trim().isEmpty()) {
                 int position = parser.mark();
@@ -459,4 +459,4 @@ protected void assertRoundTrip(Schema schema, Schema 
currentSchema, Object value
         }
     }
 
-}
\ No newline at end of file
+}
diff --git 
a/connect/runtime/src/main/java/org/apache/kafka/connect/runtime/rest/util/SSLUtils.java
 
b/connect/runtime/src/main/java/org/apache/kafka/connect/runtime/rest/util/SSLUtils.java
index 51222e5dd70..f9bac0da75b 100644
--- 
a/connect/runtime/src/main/java/org/apache/kafka/connect/runtime/rest/util/SSLUtils.java
+++ 
b/connect/runtime/src/main/java/org/apache/kafka/connect/runtime/rest/util/SSLUtils.java
@@ -114,7 +114,7 @@ protected static void 
configureSslContextFactoryAlgorithms(SslContextFactory ssl
         if (sslCipherSuites != null)
             ssl.setIncludeCipherSuites(sslCipherSuites.toArray(new 
String[sslCipherSuites.size()]));
 
-        ssl.setSslKeyManagerFactoryAlgorithm((String) 
getOrDefault(sslConfigValues, SslConfigs.SSL_KEYMANAGER_ALGORITHM_CONFIG, 
SslConfigs.DEFAULT_SSL_KEYMANGER_ALGORITHM));
+        ssl.setKeyManagerFactoryAlgorithm((String) 
getOrDefault(sslConfigValues, SslConfigs.SSL_KEYMANAGER_ALGORITHM_CONFIG, 
SslConfigs.DEFAULT_SSL_KEYMANGER_ALGORITHM));
 
         String sslSecureRandomImpl = (String) 
sslConfigValues.get(SslConfigs.SSL_SECURE_RANDOM_IMPLEMENTATION_CONFIG);
         if (sslSecureRandomImpl != null)
diff --git 
a/connect/runtime/src/test/java/org/apache/kafka/connect/runtime/rest/util/SSLUtilsTest.java
 
b/connect/runtime/src/test/java/org/apache/kafka/connect/runtime/rest/util/SSLUtilsTest.java
index 422b2b0cb68..b8b7114cf23 100644
--- 
a/connect/runtime/src/test/java/org/apache/kafka/connect/runtime/rest/util/SSLUtilsTest.java
+++ 
b/connect/runtime/src/test/java/org/apache/kafka/connect/runtime/rest/util/SSLUtilsTest.java
@@ -80,8 +80,8 @@ public void testCreateSslContextFactory() {
         DistributedConfig config = new DistributedConfig(configMap);
         SslContextFactory ssl = SSLUtils.createSslContextFactory(config);
 
-        Assert.assertEquals("/path/to/keystore", ssl.getKeyStorePath());
-        Assert.assertEquals("/path/to/truststore", ssl.getTrustStore());
+        Assert.assertEquals("file:///path/to/keystore", ssl.getKeyStorePath());
+        Assert.assertEquals("file:///path/to/truststore", 
ssl.getTrustStorePath());
         Assert.assertEquals("SunJSSE", ssl.getProvider());
         Assert.assertArrayEquals(new String[] {"SSL_RSA_WITH_RC4_128_SHA", 
"SSL_RSA_WITH_RC4_128_MD5"}, ssl.getIncludeCipherSuites());
         Assert.assertEquals("SHA1PRNG", ssl.getSecureRandomAlgorithm());
@@ -90,7 +90,7 @@ public void testCreateSslContextFactory() {
         Assert.assertEquals("JKS", ssl.getTrustStoreType());
         Assert.assertEquals("TLS", ssl.getProtocol());
         Assert.assertArrayEquals(new String[] {"TLSv1.2", "TLSv1.1", "TLSv1"}, 
ssl.getIncludeProtocols());
-        Assert.assertEquals("SunX509", ssl.getSslKeyManagerFactoryAlgorithm());
+        Assert.assertEquals("SunX509", ssl.getKeyManagerFactoryAlgorithm());
         Assert.assertEquals("PKIX", ssl.getTrustManagerFactoryAlgorithm());
     }
 
@@ -118,7 +118,7 @@ public void testCreateSslContextFactoryDefaultValues() {
         Assert.assertEquals(SslConfigs.DEFAULT_SSL_TRUSTSTORE_TYPE, 
ssl.getTrustStoreType());
         Assert.assertEquals(SslConfigs.DEFAULT_SSL_PROTOCOL, 
ssl.getProtocol());
         
Assert.assertArrayEquals(Arrays.asList(SslConfigs.DEFAULT_SSL_ENABLED_PROTOCOLS.split("\\s*,\\s*")).toArray(),
 ssl.getIncludeProtocols());
-        Assert.assertEquals(SslConfigs.DEFAULT_SSL_KEYMANGER_ALGORITHM, 
ssl.getSslKeyManagerFactoryAlgorithm());
+        Assert.assertEquals(SslConfigs.DEFAULT_SSL_KEYMANGER_ALGORITHM, 
ssl.getKeyManagerFactoryAlgorithm());
         Assert.assertEquals(SslConfigs.DEFAULT_SSL_TRUSTMANAGER_ALGORITHM, 
ssl.getTrustManagerFactoryAlgorithm());
         Assert.assertFalse(ssl.getNeedClientAuth());
         Assert.assertFalse(ssl.getWantClientAuth());
diff --git a/docs/ops.html b/docs/ops.html
index 450a268a2a1..95b9a960172 100644
--- a/docs/ops.html
+++ b/docs/ops.html
@@ -639,9 +639,7 @@ <h3><a id="java" href="#java">6.4 Java Version</a></h3>
 
   From a security perspective, we recommend you use the latest released 
version of JDK 1.8 as older freely available versions have disclosed security 
vulnerabilities.
 
-  LinkedIn is currently running JDK 1.8 u5 (looking to upgrade to a newer 
version) with the G1 collector. If you decide to use the G1 collector (the 
current default) and you are still on JDK 1.7, make sure you are on u51 or 
newer. LinkedIn tried out u21 in testing, but they had a number of problems 
with the GC implementation in that version.
-
-  LinkedIn's tuning looks like this:
+  LinkedIn is currently running JDK 1.8 u5 (looking to upgrade to a newer 
version) with the G1 collector. LinkedIn's tuning looks like this:
   <pre class="brush: text;">
   -Xmx6g -Xms6g -XX:MetaspaceSize=96m -XX:+UseG1GC
   -XX:MaxGCPauseMillis=20 -XX:InitiatingHeapOccupancyPercent=35 
-XX:G1HeapRegionSize=16M
diff --git a/docs/upgrade.html b/docs/upgrade.html
index 00f7ffe758a..7ae69ce5d1c 100644
--- a/docs/upgrade.html
+++ b/docs/upgrade.html
@@ -66,6 +66,7 @@ <h4><a id="upgrade_2_0_0" href="#upgrade_2_0_0">Upgrading 
from 0.8.x, 0.9.x, 0.1
 <h5><a id="upgrade_200_notable" href="#upgrade_200_notable">Notable changes in 
2.0.0</a></h5>
 <ul>
     <li><a href="https://cwiki.apache.org/confluence/x/oYtjB";>KIP-186</a> 
increases the default offset retention time from 1 day to 7 days. This makes it 
less likely to "lose" offsets in an application that commits infrequently. It 
also increases the active set of offsets and therefore can increase memory 
usage on the broker. Note that the console consumer currently enables offset 
commit by default and can be the source of a large number of offsets which this 
change will now preserve for 7 days instead of 1. You can preserve the existing 
behavior by setting the broker config <code>offsets.retention.minutes</code> to 
1440.</li>
+    <li>Support for Java 7 has been dropped, Java 8 is now the minimum version 
required.</li>
     <li><a 
href="https://issues.apache.org/jira/browse/KAFKA-5674";>KAFKA-5674</a> extends 
the lower interval of <code>max.connections.per.ip minimum</code> to zero and 
therefore allows IP-based filtering of inbound connections.</li>
     <li><a 
href="https://cwiki.apache.org/confluence/display/KAFKA/KIP-272%3A+Add+API+version+tag+to+broker%27s+RequestsPerSec+metric";>KIP-272</a>
         added API version tag to the metric 
<code>kafka.network:type=RequestMetrics,name=RequestsPerSec,request={Produce|FetchConsumer|FetchFollower|...}</code>.
diff --git a/gradle/dependencies.gradle b/gradle/dependencies.gradle
index db056b76757..ce2db721025 100644
--- a/gradle/dependencies.gradle
+++ b/gradle/dependencies.gradle
@@ -22,9 +22,9 @@ ext {
   libs = [:]
   
   // Enabled by default when commands like `testAll` are invoked
-  defaultScalaVersions = [ '2.11' ]
-  // Available if -PscalaVersion is used, this is necessary because Scala 2.12 
requires Java 8 while Kafka is usually
-  // built with Java 7
+  defaultScalaVersions = [ '2.11', '2.12' ]
+  // Available if -PscalaVersion is used. This is useful when we want to 
support a Scala version that has
+  // a higher minimum Java requirement than Kafka. This was previously the 
case for Scala 2.12 and Java 7.
   availableScalaVersions = [ '2.11', '2.12' ]
 }
 
@@ -48,17 +48,17 @@ versions["baseScala"] = versions.scala.substring(0, 
versions.scala.lastIndexOf("
 
 versions += [
   activation: "1.1.1",
-  apacheda: "1.0.0",
+  apacheda: "1.0.1",
   apacheds: "2.0.0-M24",
   argparse4j: "0.7.0",
   bcpkix: "1.59",
   easymock: "3.6",
   jackson: "2.9.5",
-  jetty: "9.2.24.v20180105",
-  jersey: "2.25.1",
-  jmh: "1.20",
+  jetty: "9.4.10.v20180503",
+  jersey: "2.27",
+  jmh: "1.21",
   log4j: "1.2.17",
-  scalaLogging: "3.8.0",
+  scalaLogging: "3.9.0",
   jaxb: "2.3.0",
   jfreechart: "1.0.0",
   jopt: "5.0.4",
@@ -106,6 +106,7 @@ libs += [
   jettyServlet: "org.eclipse.jetty:jetty-servlet:$versions.jetty",
   jettyServlets: "org.eclipse.jetty:jetty-servlets:$versions.jetty",
   jerseyContainerServlet: 
"org.glassfish.jersey.containers:jersey-container-servlet:$versions.jersey",
+  jerseyHk2: "org.glassfish.jersey.inject:jersey-hk2:$versions.jersey",
   jfreechart: "1.0.0",
   jmhCore: "org.openjdk.jmh:jmh-core:$versions.jmh",
   jmhCoreBenchmarks: "org.openjdk.jmh:jmh-core-benchmarks:$versions.jmh",
diff --git a/release.py b/release.py
index 0917184a44b..7eff6cc0d1b 100755
--- a/release.py
+++ b/release.py
@@ -272,7 +272,7 @@ def command_stage_docs():
 
 if not user_ok("""Requirements:
 1. Updated docs to reference the new release version where appropriate.
-2. JDK7 and JDK8 compilers and libraries
+2. JDK8 compilers and libraries
 3. Your Apache ID, already configured with SSH keys on id.apache.org and SSH 
keys available in this shell session
 4. All issues in the target release resolved with valid resolutions (if not, 
this script will report the problematic JIRAs)
 5. A GPG key used for signing the release. This key should have been added to 
public Apache servers and the KEYS file on the Kafka site
@@ -363,7 +363,6 @@ def command_stage_docs():
 # Prereq checks
 apache_id = get_pref(prefs, 'apache_id', lambda: raw_input("Enter your apache 
username: "))
 
-jdk7_env = get_jdk(prefs, 7)
 jdk8_env = get_jdk(prefs, 8)
 
 
@@ -448,10 +447,8 @@ def select_gpg_key():
            }
 cmd("Creating source archive", "git archive --format tar.gz --prefix 
kafka-%(release_version)s-src/ -o 
%(artifacts_dir)s/kafka-%(release_version)s-src.tgz %(rc_tag)s" % params)
 
-cmd("Building artifacts", "gradle", cwd=kafka_dir, env=jdk7_env)
-cmd("Building artifacts", "./gradlew clean releaseTarGzAll aggregatedJavadoc", 
cwd=kafka_dir, env=jdk7_env)
-# we need extra cmd to build 2.12 with jdk8 specifically
-cmd("Building artifacts for Scala 2.12", "./gradlew releaseTarGz 
-PscalaVersion=2.12", cwd=kafka_dir, env=jdk8_env)
+cmd("Building artifacts", "gradle", cwd=kafka_dir, env=jdk8_env)
+cmd("Building artifacts", "./gradlew clean releaseTarGzAll aggregatedJavadoc", 
cwd=kafka_dir, env=jdk8_env)
 cmd("Copying artifacts", "cp %s/core/build/distributions/* %s" % (kafka_dir, 
artifacts_dir), shell=True)
 cmd("Copying artifacts", "cp -R %s/build/docs/javadoc %s" % (kafka_dir, 
artifacts_dir))
 
@@ -497,9 +494,9 @@ def select_gpg_key():
     contents = f.read()
 if not user_ok("Going to build and upload mvn artifacts based on these 
settings:\n" + contents + '\nOK (y/n)?: '):
     fail("Retry again later")
-cmd("Building and uploading archives", "./gradlew uploadArchivesAll", 
cwd=kafka_dir, env=jdk7_env)
+cmd("Building and uploading archives", "./gradlew uploadArchivesAll", 
cwd=kafka_dir, env=jdk8_env)
 cmd("Building and uploading archives", "./gradlew uploadCoreArchives_2_12 
-PscalaVersion=2.12", cwd=kafka_dir, env=jdk8_env)
-cmd("Building and uploading archives", "mvn deploy -Pgpg-signing", 
cwd=streams_quickstart_dir, env=jdk7_env)
+cmd("Building and uploading archives", "mvn deploy -Pgpg-signing", 
cwd=streams_quickstart_dir, env=jdk8_env)
 
 release_notification_props = { 'release_version': release_version,
                                'rc': rc,
@@ -593,7 +590,7 @@ def select_gpg_key():
 http://kafka.apache.org/%(docs_version)s/protocol.html
 
 * Successful Jenkins builds for the %(dev_branch)s branch:
-Unit/integration tests: 
https://builds.apache.org/job/kafka-%(dev_branch)s-jdk7/<BUILD NUMBER>/
+Unit/integration tests: 
https://builds.apache.org/job/kafka-%(dev_branch)s-jdk8/<BUILD NUMBER>/
 System tests: 
https://jenkins.confluent.io/job/system-test-kafka-%(dev_branch)s/<BUILD_NUMBER>/
 
 /**************************************
diff --git a/vagrant/base.sh b/vagrant/base.sh
index 33ee056cf43..c16225d0568 100755
--- a/vagrant/base.sh
+++ b/vagrant/base.sh
@@ -100,8 +100,9 @@ popd
 popd
 
 # Test multiple Kafka versions
-# we want to use the latest Scala version per Kafka version
-# however, we cannot pull in Scala 2.12 builds atm, because Scala 2.12 
requires Java 8, but we use Java 7 to run the system tests
+# We want to use the latest Scala version per Kafka version
+# Previously we could not pull in Scala 2.12 builds, because Scala 2.12 
requires Java 8 and we were running the system
+# tests with Java 7. We have since switched to Java 8, so 2.0.0 and later use 
Scala 2.12.
 get_kafka 0.8.2.2 2.11
 chmod a+rw /opt/kafka-0.8.2.2
 get_kafka 0.9.0.1 2.11


 

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


> Drop support for Java 7
> -----------------------
>
>                 Key: KAFKA-4423
>                 URL: https://issues.apache.org/jira/browse/KAFKA-4423
>             Project: Kafka
>          Issue Type: Task
>            Reporter: Ismael Juma
>            Assignee: Ismael Juma
>            Priority: Major
>              Labels: kip
>             Fix For: 2.0.0
>
>
> Java 7 was released in July 2011, it hasn't received public updates since 
> April 2015, Java 8 was released in March 2014 and Java 9 is scheduled to be 
> released in July 2017.
> The last public release of JDK 7 by Oracle contains a large number of known 
> security vulnerabilities and Java 8 introduces a number of
> compelling features and we will soon have to support Java 9 so it would be 
> good to drop support for Java 7 in 2017. The actual timing would depend on 
> when we release the next major release of Kafka.
> More details can be found in the KIP:
> https://cwiki.apache.org/confluence/display/KAFKA/KIP-118%3A+Drop+Support+for+Java+7+in+Kafka+0.11



--
This message was sent by Atlassian JIRA
(v7.6.3#76005)

Reply via email to