[flink] branch release-1.3 updated: [FLINK-10172] [table] Fix Table.orderBy(String) by adding asc & desc to ExpressionParser.

2018-08-21 Thread fhueske
This is an automated email from the ASF dual-hosted git repository.

fhueske pushed a commit to branch release-1.3
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/release-1.3 by this push:
 new d3c206e  [FLINK-10172] [table] Fix Table.orderBy(String) by adding asc 
& desc to ExpressionParser.
d3c206e is described below

commit d3c206efc7b0b0612cb3d4c9d8272ffed8841f52
Author: Rong Rong 
AuthorDate: Mon Aug 20 09:13:56 2018 -0700

[FLINK-10172] [table] Fix Table.orderBy(String) by adding asc & desc to 
ExpressionParser.

* Add tests to ensure asc and desc won't be dropped in the future.

This closes #6585.
---
 .../flink/table/api/scala/expressionDsl.scala  | 11 
 .../flink/table/expressions/ExpressionParser.scala | 10 
 .../stringexpr/SortStringExpressionTest.scala  | 60 ++
 3 files changed, 81 insertions(+)

diff --git 
a/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/api/scala/expressionDsl.scala
 
b/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/api/scala/expressionDsl.scala
index b87bb6d..06bc866 100644
--- 
a/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/api/scala/expressionDsl.scala
+++ 
b/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/api/scala/expressionDsl.scala
@@ -230,7 +230,18 @@ trait ImplicitExpressionOperations {
 */
   def as(name: Symbol, extraNames: Symbol*) = Alias(expr, name.name, 
extraNames.map(_.name))
 
+  /**
+* Specifies ascending order of an expression i.e. a field for orderBy call.
+*
+* @return ascend expression
+*/
   def asc = Asc(expr)
+
+  /**
+* Specifies descending order of an expression i.e. a field for orderBy 
call.
+*
+* @return descend expression
+*/
   def desc = Desc(expr)
 
   /**
diff --git 
a/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/expressions/ExpressionParser.scala
 
b/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/expressions/ExpressionParser.scala
index 201679b..687e514 100644
--- 
a/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/expressions/ExpressionParser.scala
+++ 
b/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/expressions/ExpressionParser.scala
@@ -48,6 +48,8 @@ object ExpressionParser extends JavaTokenParsers with 
PackratParsers {
   // Keyword
   lazy val AS: Keyword = Keyword("as")
   lazy val CAST: Keyword = Keyword("cast")
+  lazy val ASC: Keyword = Keyword("asc")
+  lazy val DESC: Keyword = Keyword("desc")
   lazy val NULL: Keyword = Keyword("Null")
   lazy val IF: Keyword = Keyword("?")
   lazy val TO_DATE: Keyword = Keyword("toDate")
@@ -209,6 +211,12 @@ object ExpressionParser extends JavaTokenParsers with 
PackratParsers {
 
   // suffix operators
 
+  lazy val suffixAsc : PackratParser[Expression] =
+composite <~ "." ~ ASC ~ opt("()") ^^ { e => Asc(e) }
+
+  lazy val suffixDesc : PackratParser[Expression] =
+composite <~ "." ~ DESC ~ opt("()") ^^ { e => Desc(e) }
+
   lazy val suffixCast: PackratParser[Expression] =
 composite ~ "." ~ CAST ~ "(" ~ dataType ~ ")" ^^ {
 case e ~ _ ~ _ ~ _ ~ dt ~ _ => Cast(e, dt)
@@ -305,6 +313,8 @@ object ExpressionParser extends JavaTokenParsers with 
PackratParsers {
 // expressions that need special expression conversion
 suffixAs | suffixTimeInterval | suffixRowInterval | suffixToTimestamp | 
suffixToTime |
 suffixToDate |
+// expression for ordering
+suffixAsc | suffixDesc |
 // expressions that take enumerations
 suffixCast | suffixTrim | suffixTrimWithoutArgs | suffixExtract | 
suffixFloor | suffixCeil |
 // expressions that take literals
diff --git 
a/flink-libraries/flink-table/src/test/scala/org/apache/flink/table/api/batch/table/stringexpr/SortStringExpressionTest.scala
 
b/flink-libraries/flink-table/src/test/scala/org/apache/flink/table/api/batch/table/stringexpr/SortStringExpressionTest.scala
new file mode 100644
index 000..204ec77
--- /dev/null
+++ 
b/flink-libraries/flink-table/src/test/scala/org/apache/flink/table/api/batch/table/stringexpr/SortStringExpressionTest.scala
@@ -0,0 +1,60 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language

[flink] branch release-1.4 updated: [FLINK-10172] [table] Fix Table.orderBy(String) by adding asc & desc to ExpressionParser.

2018-08-21 Thread fhueske
This is an automated email from the ASF dual-hosted git repository.

fhueske pushed a commit to branch release-1.4
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/release-1.4 by this push:
 new 7b34a03  [FLINK-10172] [table] Fix Table.orderBy(String) by adding asc 
& desc to ExpressionParser.
7b34a03 is described below

commit 7b34a03e678a565fe7c028b669b0417b06d23456
Author: Rong Rong 
AuthorDate: Mon Aug 20 09:13:56 2018 -0700

[FLINK-10172] [table] Fix Table.orderBy(String) by adding asc & desc to 
ExpressionParser.

* Add tests to ensure asc and desc won't be dropped in the future.

This closes #6585.
---
 .../flink/table/api/scala/expressionDsl.scala  | 11 
 .../flink/table/expressions/ExpressionParser.scala | 10 
 .../stringexpr/SortStringExpressionTest.scala  | 60 ++
 3 files changed, 81 insertions(+)

diff --git 
a/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/api/scala/expressionDsl.scala
 
b/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/api/scala/expressionDsl.scala
index b62e142..991caff 100644
--- 
a/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/api/scala/expressionDsl.scala
+++ 
b/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/api/scala/expressionDsl.scala
@@ -231,7 +231,18 @@ trait ImplicitExpressionOperations {
 */
   def as(name: Symbol, extraNames: Symbol*) = Alias(expr, name.name, 
extraNames.map(_.name))
 
+  /**
+* Specifies ascending order of an expression i.e. a field for orderBy call.
+*
+* @return ascend expression
+*/
   def asc = Asc(expr)
+
+  /**
+* Specifies descending order of an expression i.e. a field for orderBy 
call.
+*
+* @return descend expression
+*/
   def desc = Desc(expr)
 
   /**
diff --git 
a/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/expressions/ExpressionParser.scala
 
b/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/expressions/ExpressionParser.scala
index 201679b..687e514 100644
--- 
a/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/expressions/ExpressionParser.scala
+++ 
b/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/expressions/ExpressionParser.scala
@@ -48,6 +48,8 @@ object ExpressionParser extends JavaTokenParsers with 
PackratParsers {
   // Keyword
   lazy val AS: Keyword = Keyword("as")
   lazy val CAST: Keyword = Keyword("cast")
+  lazy val ASC: Keyword = Keyword("asc")
+  lazy val DESC: Keyword = Keyword("desc")
   lazy val NULL: Keyword = Keyword("Null")
   lazy val IF: Keyword = Keyword("?")
   lazy val TO_DATE: Keyword = Keyword("toDate")
@@ -209,6 +211,12 @@ object ExpressionParser extends JavaTokenParsers with 
PackratParsers {
 
   // suffix operators
 
+  lazy val suffixAsc : PackratParser[Expression] =
+composite <~ "." ~ ASC ~ opt("()") ^^ { e => Asc(e) }
+
+  lazy val suffixDesc : PackratParser[Expression] =
+composite <~ "." ~ DESC ~ opt("()") ^^ { e => Desc(e) }
+
   lazy val suffixCast: PackratParser[Expression] =
 composite ~ "." ~ CAST ~ "(" ~ dataType ~ ")" ^^ {
 case e ~ _ ~ _ ~ _ ~ dt ~ _ => Cast(e, dt)
@@ -305,6 +313,8 @@ object ExpressionParser extends JavaTokenParsers with 
PackratParsers {
 // expressions that need special expression conversion
 suffixAs | suffixTimeInterval | suffixRowInterval | suffixToTimestamp | 
suffixToTime |
 suffixToDate |
+// expression for ordering
+suffixAsc | suffixDesc |
 // expressions that take enumerations
 suffixCast | suffixTrim | suffixTrimWithoutArgs | suffixExtract | 
suffixFloor | suffixCeil |
 // expressions that take literals
diff --git 
a/flink-libraries/flink-table/src/test/scala/org/apache/flink/table/api/batch/table/stringexpr/SortStringExpressionTest.scala
 
b/flink-libraries/flink-table/src/test/scala/org/apache/flink/table/api/batch/table/stringexpr/SortStringExpressionTest.scala
new file mode 100644
index 000..204ec77
--- /dev/null
+++ 
b/flink-libraries/flink-table/src/test/scala/org/apache/flink/table/api/batch/table/stringexpr/SortStringExpressionTest.scala
@@ -0,0 +1,60 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language

[flink] branch release-1.5 updated: [FLINK-10172] [table] Fix Table.orderBy(String) by adding asc & desc to ExpressionParser.

2018-08-21 Thread fhueske
This is an automated email from the ASF dual-hosted git repository.

fhueske pushed a commit to branch release-1.5
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/release-1.5 by this push:
 new a25a241  [FLINK-10172] [table] Fix Table.orderBy(String) by adding asc 
& desc to ExpressionParser.
a25a241 is described below

commit a25a241c48027ba6178605af8e1112e2e27ed259
Author: Rong Rong 
AuthorDate: Mon Aug 20 09:13:56 2018 -0700

[FLINK-10172] [table] Fix Table.orderBy(String) by adding asc & desc to 
ExpressionParser.

* Add tests to ensure asc and desc won't be dropped in the future.

This closes #6585.
---
 .../flink/table/api/scala/expressionDsl.scala  | 11 
 .../flink/table/expressions/ExpressionParser.scala | 10 
 .../stringexpr/SortStringExpressionTest.scala  | 60 ++
 3 files changed, 81 insertions(+)

diff --git 
a/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/api/scala/expressionDsl.scala
 
b/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/api/scala/expressionDsl.scala
index f73442b..d6154d4 100644
--- 
a/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/api/scala/expressionDsl.scala
+++ 
b/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/api/scala/expressionDsl.scala
@@ -236,7 +236,18 @@ trait ImplicitExpressionOperations {
 */
   def as(name: Symbol, extraNames: Symbol*) = Alias(expr, name.name, 
extraNames.map(_.name))
 
+  /**
+* Specifies ascending order of an expression i.e. a field for orderBy call.
+*
+* @return ascend expression
+*/
   def asc = Asc(expr)
+
+  /**
+* Specifies descending order of an expression i.e. a field for orderBy 
call.
+*
+* @return descend expression
+*/
   def desc = Desc(expr)
 
   /**
diff --git 
a/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/expressions/ExpressionParser.scala
 
b/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/expressions/ExpressionParser.scala
index aa82464..6d62c56 100644
--- 
a/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/expressions/ExpressionParser.scala
+++ 
b/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/expressions/ExpressionParser.scala
@@ -48,6 +48,8 @@ object ExpressionParser extends JavaTokenParsers with 
PackratParsers {
   // Keyword
   lazy val AS: Keyword = Keyword("as")
   lazy val CAST: Keyword = Keyword("cast")
+  lazy val ASC: Keyword = Keyword("asc")
+  lazy val DESC: Keyword = Keyword("desc")
   lazy val NULL: Keyword = Keyword("Null")
   lazy val IF: Keyword = Keyword("?")
   lazy val TO_DATE: Keyword = Keyword("toDate")
@@ -211,6 +213,12 @@ object ExpressionParser extends JavaTokenParsers with 
PackratParsers {
 
   // suffix operators
 
+  lazy val suffixAsc : PackratParser[Expression] =
+composite <~ "." ~ ASC ~ opt("()") ^^ { e => Asc(e) }
+
+  lazy val suffixDesc : PackratParser[Expression] =
+composite <~ "." ~ DESC ~ opt("()") ^^ { e => Desc(e) }
+
   lazy val suffixCast: PackratParser[Expression] =
 composite ~ "." ~ CAST ~ "(" ~ dataType ~ ")" ^^ {
 case e ~ _ ~ _ ~ _ ~ dt ~ _ => Cast(e, dt)
@@ -307,6 +315,8 @@ object ExpressionParser extends JavaTokenParsers with 
PackratParsers {
 // expressions that need special expression conversion
 suffixAs | suffixTimeInterval | suffixRowInterval | suffixToTimestamp | 
suffixToTime |
 suffixToDate |
+// expression for ordering
+suffixAsc | suffixDesc |
 // expressions that take enumerations
 suffixCast | suffixTrim | suffixTrimWithoutArgs | suffixExtract | 
suffixFloor | suffixCeil |
 // expressions that take literals
diff --git 
a/flink-libraries/flink-table/src/test/scala/org/apache/flink/table/api/batch/table/stringexpr/SortStringExpressionTest.scala
 
b/flink-libraries/flink-table/src/test/scala/org/apache/flink/table/api/batch/table/stringexpr/SortStringExpressionTest.scala
new file mode 100644
index 000..204ec77
--- /dev/null
+++ 
b/flink-libraries/flink-table/src/test/scala/org/apache/flink/table/api/batch/table/stringexpr/SortStringExpressionTest.scala
@@ -0,0 +1,60 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language

[flink] branch release-1.6 updated: [FLINK-10172] [table] Fix Table.orderBy(String) by adding asc & desc to ExpressionParser.

2018-08-21 Thread fhueske
This is an automated email from the ASF dual-hosted git repository.

fhueske pushed a commit to branch release-1.6
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/release-1.6 by this push:
 new b6d8fb9  [FLINK-10172] [table] Fix Table.orderBy(String) by adding asc 
& desc to ExpressionParser.
b6d8fb9 is described below

commit b6d8fb941fb3c3f2346b02d5afbd87573e4a48bb
Author: Rong Rong 
AuthorDate: Mon Aug 20 09:13:56 2018 -0700

[FLINK-10172] [table] Fix Table.orderBy(String) by adding asc & desc to 
ExpressionParser.

* Add tests to ensure asc and desc won't be dropped in the future.

This closes #6585.
---
 .../flink/table/api/scala/expressionDsl.scala  | 11 
 .../flink/table/expressions/ExpressionParser.scala | 10 
 .../stringexpr/SortStringExpressionTest.scala  | 60 ++
 3 files changed, 81 insertions(+)

diff --git 
a/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/api/scala/expressionDsl.scala
 
b/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/api/scala/expressionDsl.scala
index fe705d4..39c5937 100644
--- 
a/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/api/scala/expressionDsl.scala
+++ 
b/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/api/scala/expressionDsl.scala
@@ -236,7 +236,18 @@ trait ImplicitExpressionOperations {
 */
   def as(name: Symbol, extraNames: Symbol*) = Alias(expr, name.name, 
extraNames.map(_.name))
 
+  /**
+* Specifies ascending order of an expression i.e. a field for orderBy call.
+*
+* @return ascend expression
+*/
   def asc = Asc(expr)
+
+  /**
+* Specifies descending order of an expression i.e. a field for orderBy 
call.
+*
+* @return descend expression
+*/
   def desc = Desc(expr)
 
   /**
diff --git 
a/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/expressions/ExpressionParser.scala
 
b/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/expressions/ExpressionParser.scala
index faf6268..67d8711 100644
--- 
a/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/expressions/ExpressionParser.scala
+++ 
b/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/expressions/ExpressionParser.scala
@@ -48,6 +48,8 @@ object ExpressionParser extends JavaTokenParsers with 
PackratParsers {
   // Keyword
   lazy val AS: Keyword = Keyword("as")
   lazy val CAST: Keyword = Keyword("cast")
+  lazy val ASC: Keyword = Keyword("asc")
+  lazy val DESC: Keyword = Keyword("desc")
   lazy val NULL: Keyword = Keyword("Null")
   lazy val IF: Keyword = Keyword("?")
   lazy val TO_DATE: Keyword = Keyword("toDate")
@@ -212,6 +214,12 @@ object ExpressionParser extends JavaTokenParsers with 
PackratParsers {
 
   // suffix operators
 
+  lazy val suffixAsc : PackratParser[Expression] =
+composite <~ "." ~ ASC ~ opt("()") ^^ { e => Asc(e) }
+
+  lazy val suffixDesc : PackratParser[Expression] =
+composite <~ "." ~ DESC ~ opt("()") ^^ { e => Desc(e) }
+
   lazy val suffixCast: PackratParser[Expression] =
 composite ~ "." ~ CAST ~ "(" ~ dataType ~ ")" ^^ {
 case e ~ _ ~ _ ~ _ ~ dt ~ _ => Cast(e, dt)
@@ -316,6 +324,8 @@ object ExpressionParser extends JavaTokenParsers with 
PackratParsers {
 suffixToDate |
 // expression for log
 suffixLog |
+// expression for ordering
+suffixAsc | suffixDesc |
 // expressions that take enumerations
 suffixCast | suffixTrim | suffixTrimWithoutArgs | suffixExtract | 
suffixFloor | suffixCeil |
 // expressions that take literals
diff --git 
a/flink-libraries/flink-table/src/test/scala/org/apache/flink/table/api/batch/table/stringexpr/SortStringExpressionTest.scala
 
b/flink-libraries/flink-table/src/test/scala/org/apache/flink/table/api/batch/table/stringexpr/SortStringExpressionTest.scala
new file mode 100644
index 000..204ec77
--- /dev/null
+++ 
b/flink-libraries/flink-table/src/test/scala/org/apache/flink/table/api/batch/table/stringexpr/SortStringExpressionTest.scala
@@ -0,0 +1,60 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.table.api.batc

[flink] 03/03: [FLINK-10172] [table] Fix Table.orderBy(String) by adding asc & desc to ExpressionParser.

2018-08-21 Thread fhueske
This is an automated email from the ASF dual-hosted git repository.

fhueske pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git

commit 12bae25d56ca16380a724dcd15028da49c2d653e
Author: Rong Rong 
AuthorDate: Mon Aug 20 09:13:56 2018 -0700

[FLINK-10172] [table] Fix Table.orderBy(String) by adding asc & desc to 
ExpressionParser.

* Add tests to ensure asc and desc won't be dropped in the future.

This closes #6585.
---
 .../flink/table/api/scala/expressionDsl.scala  | 11 
 .../flink/table/expressions/ExpressionParser.scala | 10 
 .../stringexpr/SortStringExpressionTest.scala  | 60 ++
 3 files changed, 81 insertions(+)

diff --git 
a/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/api/scala/expressionDsl.scala
 
b/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/api/scala/expressionDsl.scala
index 8b08af6..429c37f 100644
--- 
a/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/api/scala/expressionDsl.scala
+++ 
b/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/api/scala/expressionDsl.scala
@@ -236,7 +236,18 @@ trait ImplicitExpressionOperations {
 */
   def as(name: Symbol, extraNames: Symbol*) = Alias(expr, name.name, 
extraNames.map(_.name))
 
+  /**
+* Specifies ascending order of an expression i.e. a field for orderBy call.
+*
+* @return ascend expression
+*/
   def asc = Asc(expr)
+
+  /**
+* Specifies descending order of an expression i.e. a field for orderBy 
call.
+*
+* @return descend expression
+*/
   def desc = Desc(expr)
 
   /**
diff --git 
a/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/expressions/ExpressionParser.scala
 
b/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/expressions/ExpressionParser.scala
index 4b2440c..c0a577d 100644
--- 
a/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/expressions/ExpressionParser.scala
+++ 
b/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/expressions/ExpressionParser.scala
@@ -48,6 +48,8 @@ object ExpressionParser extends JavaTokenParsers with 
PackratParsers {
   // Keyword
   lazy val AS: Keyword = Keyword("as")
   lazy val CAST: Keyword = Keyword("cast")
+  lazy val ASC: Keyword = Keyword("asc")
+  lazy val DESC: Keyword = Keyword("desc")
   lazy val NULL: Keyword = Keyword("Null")
   lazy val IF: Keyword = Keyword("?")
   lazy val TO_DATE: Keyword = Keyword("toDate")
@@ -216,6 +218,12 @@ object ExpressionParser extends JavaTokenParsers with 
PackratParsers {
 
   // suffix operators
 
+  lazy val suffixAsc : PackratParser[Expression] =
+composite <~ "." ~ ASC ~ opt("()") ^^ { e => Asc(e) }
+
+  lazy val suffixDesc : PackratParser[Expression] =
+composite <~ "." ~ DESC ~ opt("()") ^^ { e => Desc(e) }
+
   lazy val suffixCast: PackratParser[Expression] =
 composite ~ "." ~ CAST ~ "(" ~ dataType ~ ")" ^^ {
 case e ~ _ ~ _ ~ _ ~ dt ~ _ => Cast(e, dt)
@@ -324,6 +332,8 @@ object ExpressionParser extends JavaTokenParsers with 
PackratParsers {
 suffixToDate |
 // expression for log
 suffixLog |
+// expression for ordering
+suffixAsc | suffixDesc |
 // expressions that take enumerations
 suffixCast | suffixTrim | suffixTrimWithoutArgs | suffixExtract | 
suffixFloor | suffixCeil |
 // expressions that take literals
diff --git 
a/flink-libraries/flink-table/src/test/scala/org/apache/flink/table/api/batch/table/stringexpr/SortStringExpressionTest.scala
 
b/flink-libraries/flink-table/src/test/scala/org/apache/flink/table/api/batch/table/stringexpr/SortStringExpressionTest.scala
new file mode 100644
index 000..204ec77
--- /dev/null
+++ 
b/flink-libraries/flink-table/src/test/scala/org/apache/flink/table/api/batch/table/stringexpr/SortStringExpressionTest.scala
@@ -0,0 +1,60 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.table.api.batch.table.stringexpr
+
+import org.apache.flink.api.scala._
+import org.apache.flink.table.api.scala._
+import org.apache.flink.table.utils.TableTestBase
+import org.junit.Test
+
+class SortStringExpressionTest extends T

[flink] branch master updated (f803280 -> 12bae25)

2018-08-21 Thread fhueske
This is an automated email from the ASF dual-hosted git repository.

fhueske pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git.


from f803280  [FLINK-10042][state] (part 2) Refactoring of snapshot 
algorithms for better abstraction and cleaner resource management
 new e1798fd  [FLINK-10127] [core] Add TypeInformation for 
java.time.Instant.
 new d8c45fa  [FLINK-10187] [table] Fix LogicalUnnestRule after upgrading 
to Calcite 1.17.
 new 12bae25  [FLINK-10172] [table] Fix Table.orderBy(String) by adding asc 
& desc to ExpressionParser.

The 3 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 .../flink/api/common/typeinfo/BasicTypeInfo.java   |6 +
 .../apache/flink/api/common/typeinfo/Types.java|7 +
 .../common/typeutils/base/InstantComparator.java   |  106 +
 .../{IntSerializer.java => InstantSerializer.java} |   68 +-
 ...paratorTest.java => InstantComparatorTest.java} |   31 +-
 ...ializerTest.java => InstantSerializerTest.java} |   44 +-
 .../org/apache/flink/types/BasicTypeInfoTest.java  |3 +-
 .../apache/calcite/sql2rel/SqlToRelConverter.java  | 5619 
 .../flink/table/api/scala/expressionDsl.scala  |   11 +
 .../flink/table/expressions/ExpressionParser.scala |   10 +
 .../plan/rules/logical/LogicalUnnestRule.scala |   13 +
 .../stringexpr/SortStringExpressionTest.scala} |   52 +-
 .../apache/flink/api/scala/typeutils/Types.scala   |5 +
 13 files changed, 282 insertions(+), 5693 deletions(-)
 create mode 100644 
flink-core/src/main/java/org/apache/flink/api/common/typeutils/base/InstantComparator.java
 copy 
flink-core/src/main/java/org/apache/flink/api/common/typeutils/base/{IntSerializer.java
 => InstantSerializer.java} (50%)
 copy 
flink-core/src/test/java/org/apache/flink/api/common/typeutils/base/{BooleanComparatorTest.java
 => InstantComparatorTest.java} (58%)
 copy 
flink-core/src/test/java/org/apache/flink/api/common/typeutils/base/{BigDecSerializerTest.java
 => InstantSerializerTest.java} (55%)
 delete mode 100644 
flink-libraries/flink-table/src/main/java/org/apache/calcite/sql2rel/SqlToRelConverter.java
 copy 
flink-libraries/flink-table/src/test/scala/org/apache/flink/table/api/batch/{sql/CalcTest.scala
 => table/stringexpr/SortStringExpressionTest.scala} (52%)



[flink] 01/03: [FLINK-10127] [core] Add TypeInformation for java.time.Instant.

2018-08-21 Thread fhueske
This is an automated email from the ASF dual-hosted git repository.

fhueske pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git

commit e1798fd857f81337e7521ad284ee31e58c5d2be5
Author: Alexey Trenikhin 
AuthorDate: Mon Aug 13 14:19:59 2018 -0700

[FLINK-10127] [core] Add TypeInformation for java.time.Instant.

This closes #6549.
---
 .../flink/api/common/typeinfo/BasicTypeInfo.java   |   6 ++
 .../apache/flink/api/common/typeinfo/Types.java|   7 ++
 .../common/typeutils/base/InstantComparator.java   | 106 +
 .../common/typeutils/base/InstantSerializer.java   | 105 
 .../typeutils/base/InstantComparatorTest.java  |  56 +++
 .../typeutils/base/InstantSerializerTest.java  |  67 +
 .../org/apache/flink/types/BasicTypeInfoTest.java  |   3 +-
 .../apache/flink/api/scala/typeutils/Types.scala   |   5 +
 8 files changed, 354 insertions(+), 1 deletion(-)

diff --git 
a/flink-core/src/main/java/org/apache/flink/api/common/typeinfo/BasicTypeInfo.java
 
b/flink-core/src/main/java/org/apache/flink/api/common/typeinfo/BasicTypeInfo.java
index f19865e..00c4c31 100644
--- 
a/flink-core/src/main/java/org/apache/flink/api/common/typeinfo/BasicTypeInfo.java
+++ 
b/flink-core/src/main/java/org/apache/flink/api/common/typeinfo/BasicTypeInfo.java
@@ -40,6 +40,8 @@ import 
org.apache.flink.api.common.typeutils.base.DoubleComparator;
 import org.apache.flink.api.common.typeutils.base.DoubleSerializer;
 import org.apache.flink.api.common.typeutils.base.FloatComparator;
 import org.apache.flink.api.common.typeutils.base.FloatSerializer;
+import org.apache.flink.api.common.typeutils.base.InstantComparator;
+import org.apache.flink.api.common.typeutils.base.InstantSerializer;
 import org.apache.flink.api.common.typeutils.base.IntComparator;
 import org.apache.flink.api.common.typeutils.base.IntSerializer;
 import org.apache.flink.api.common.typeutils.base.LongComparator;
@@ -53,6 +55,7 @@ import 
org.apache.flink.api.common.typeutils.base.VoidSerializer;
 import java.lang.reflect.Constructor;
 import java.math.BigDecimal;
 import java.math.BigInteger;
+import java.time.Instant;
 import java.util.Arrays;
 import java.util.Date;
 import java.util.HashMap;
@@ -83,6 +86,8 @@ public class BasicTypeInfo extends TypeInformation 
implements AtomicType VOID_TYPE_INFO = new 
BasicTypeInfo<>(Void.class, new Class[]{}, VoidSerializer.INSTANCE, null);
public static final BasicTypeInfo BIG_INT_TYPE_INFO = new 
BasicTypeInfo<>(BigInteger.class, new Class[]{}, BigIntSerializer.INSTANCE, 
BigIntComparator.class);
public static final BasicTypeInfo BIG_DEC_TYPE_INFO = new 
BasicTypeInfo<>(BigDecimal.class, new Class[]{}, BigDecSerializer.INSTANCE, 
BigDecComparator.class);
+   public static final BasicTypeInfo INSTANT_TYPE_INFO = new 
BasicTypeInfo<>(Instant.class, new Class[]{}, InstantSerializer.INSTANCE, 
InstantComparator.class);
+
 
// 

 
@@ -250,5 +255,6 @@ public class BasicTypeInfo extends TypeInformation 
implements AtomicType SQL_TIMESTAMP = 
SqlTimeTypeInfo.TIMESTAMP;
 
+
+   /**
+* Returns type infomation for {@link java.time.Instant}. Supports a 
null value.
+*/
+   public static final TypeInformation INSTANT = 
BasicTypeInfo.INSTANT_TYPE_INFO;
+
//CHECKSTYLE.OFF: MethodName
 
/**
diff --git 
a/flink-core/src/main/java/org/apache/flink/api/common/typeutils/base/InstantComparator.java
 
b/flink-core/src/main/java/org/apache/flink/api/common/typeutils/base/InstantComparator.java
new file mode 100644
index 000..28a7be0
--- /dev/null
+++ 
b/flink-core/src/main/java/org/apache/flink/api/common/typeutils/base/InstantComparator.java
@@ -0,0 +1,106 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.api.common.typeutils.base;
+
+import org.apache.flink.annotation.Internal;
+import org.apache.flink.api.common.typeutils.TypeComparator;
+import org.apache.flink.core.memory.DataInputView;
+import org.apache.flink.core.memory.MemorySegment;
+
+import java.io.IOException;
+import j

[flink] 02/02: [FLINK-10042][state] (part 2) Refactoring of snapshot algorithms for better abstraction and cleaner resource management

2018-08-21 Thread srichter
This is an automated email from the ASF dual-hosted git repository.

srichter pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git

commit f803280bb933d968976e79b9efb5953bed308d96
Author: Stefan Richter 
AuthorDate: Thu Aug 9 22:23:42 2018 +0200

[FLINK-10042][state] (part 2) Refactoring of snapshot algorithms for better 
abstraction and cleaner resource management

This closes #6556.
---
 .../async/AbstractAsyncCallableWithResources.java  | 194 
 .../flink/runtime/io/async/AsyncDoneCallback.java  |  33 --
 .../flink/runtime/io/async/AsyncStoppable.java |  45 --
 .../io/async/AsyncStoppableTaskWithCallback.java   |  59 ---
 .../io/async/StoppableCallbackCallable.java|  30 --
 .../runtime/state/AbstractSnapshotStrategy.java|  79 +++
 .../flink/runtime/state/AsyncSnapshotCallable.java | 190 +++
 .../runtime/state/DefaultOperatorStateBackend.java | 369 +++---
 .../flink/runtime/state/SnapshotStrategy.java  |  13 +-
 .../apache/flink/runtime/state/Snapshotable.java   |  27 +-
 .../runtime/state/heap/HeapKeyedStateBackend.java  | 145 +++---
 .../runtime/state/AsyncSnapshotCallableTest.java   | 326 
 .../runtime/state/OperatorStateBackendTest.java|   4 +-
 .../flink/runtime/state/StateBackendTestBase.java  |   6 +-
 .../state/ttl/mock/MockKeyedStateBackend.java  |   5 +-
 .../streaming/state/RocksDBKeyedStateBackend.java  |  53 +-
 ...yBase.java => RocksDBSnapshotStrategyBase.java} |  57 ++-
 .../state/snapshot/RocksFullSnapshotStrategy.java  | 255 --
 .../snapshot/RocksIncrementalSnapshotStrategy.java | 552 ++---
 .../flink/streaming/runtime/tasks/StreamTask.java  |   4 +-
 .../tasks/TaskCheckpointingBehaviourTest.java  |  11 +-
 .../apache/flink/core/testutils/OneShotLatch.java  |  18 +-
 22 files changed, 1329 insertions(+), 1146 deletions(-)

diff --git 
a/flink-runtime/src/main/java/org/apache/flink/runtime/io/async/AbstractAsyncCallableWithResources.java
 
b/flink-runtime/src/main/java/org/apache/flink/runtime/io/async/AbstractAsyncCallableWithResources.java
deleted file mode 100644
index bc0116c..000
--- 
a/flink-runtime/src/main/java/org/apache/flink/runtime/io/async/AbstractAsyncCallableWithResources.java
+++ /dev/null
@@ -1,194 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.runtime.io.async;
-
-import org.apache.flink.util.ExceptionUtils;
-
-import java.io.IOException;
-
-/**
- * This abstract class encapsulates the lifecycle and execution strategy for 
asynchronous operations that use resources.
- *
- * @param  return type of the asynchronous call.
- */
-public abstract class AbstractAsyncCallableWithResources implements 
StoppableCallbackCallable {
-
-   /** Tracks if the stop method was called on this object. */
-   private volatile boolean stopped;
-
-   /** Tracks if call method was executed (only before stop calls). */
-   private volatile boolean called;
-
-   /** Stores a collected exception if there was one during stop. */
-   private volatile Exception stopException;
-
-   public AbstractAsyncCallableWithResources() {
-   this.stopped = false;
-   this.called = false;
-   }
-
-   /**
-* This method implements the strategy for the actual IO operation:
-* 
-* 1) Acquire resources asynchronously and atomically w.r.t stopping.
-* 2) Performs the operation
-* 3) Releases resources.
-*
-* @return Result of the IO operation, e.g. a deserialized object.
-* @throws Exception exception that happened during the call.
-*/
-   @Override
-   public final V call() throws Exception {
-
-   V result = null;
-   Exception collectedException = null;
-
-   try {
-   synchronized (this) {
-
-   if (stopped) {
-   throw new IOException("Task was already 
stopped.");
-   }
-
-   called = true;
-   // Get resources in async part, atomically 
w.r

[flink] branch master updated (07bb90d -> f803280)

2018-08-21 Thread srichter
This is an automated email from the ASF dual-hosted git repository.

srichter pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git.


from 07bb90d  [FLINK-10181][rest][docs] Add anchor links to rest requests
 new aba02eb  [FLINK-10042][state] (part 1) Extract snapshot algorithms 
from inner classes of RocksDBKeyedStateBackend into full classes
 new f803280  [FLINK-10042][state] (part 2) Refactoring of snapshot 
algorithms for better abstraction and cleaner resource management

The 2 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 .../async/AbstractAsyncCallableWithResources.java  |  194 
 .../flink/runtime/io/async/AsyncDoneCallback.java  |   33 -
 .../flink/runtime/io/async/AsyncStoppable.java |   45 -
 .../io/async/AsyncStoppableTaskWithCallback.java   |   59 --
 .../io/async/StoppableCallbackCallable.java|   30 -
 .../runtime/state/AbstractSnapshotStrategy.java|   79 ++
 .../flink/runtime/state/AsyncSnapshotCallable.java |  190 
 .../runtime/state/DefaultOperatorStateBackend.java |  369 +++
 .../flink/runtime/state/SnapshotStrategy.java  |   12 +-
 .../apache/flink/runtime/state/Snapshotable.java   |   27 +-
 .../runtime/state/heap/HeapKeyedStateBackend.java  |  140 +--
 .../runtime/state/AsyncSnapshotCallableTest.java   |  326 ++
 .../runtime/state/OperatorStateBackendTest.java|4 +-
 .../flink/runtime/state/StateBackendTestBase.java  |6 +-
 .../state/ttl/mock/MockKeyedStateBackend.java  |5 +-
 .../streaming/state/RocksDBKeyedStateBackend.java  | 1098 +++-
 .../snapshot/RocksDBSnapshotStrategyBase.java  |  141 +++
 .../state/snapshot/RocksFullSnapshotStrategy.java  |  421 
 .../snapshot/RocksIncrementalSnapshotStrategy.java |  534 ++
 .../state/snapshot/RocksSnapshotUtil.java  |   37 +-
 .../streaming/state/RocksDBAsyncSnapshotTest.java  |   27 +-
 .../streaming/state/RocksDBStateBackendTest.java   |1 +
 .../flink/streaming/runtime/tasks/StreamTask.java  |4 +-
 .../tasks/TaskCheckpointingBehaviourTest.java  |   11 +-
 .../apache/flink/core/testutils/OneShotLatch.java  |   18 +-
 25 files changed, 2138 insertions(+), 1673 deletions(-)
 delete mode 100644 
flink-runtime/src/main/java/org/apache/flink/runtime/io/async/AbstractAsyncCallableWithResources.java
 delete mode 100644 
flink-runtime/src/main/java/org/apache/flink/runtime/io/async/AsyncDoneCallback.java
 delete mode 100644 
flink-runtime/src/main/java/org/apache/flink/runtime/io/async/AsyncStoppable.java
 delete mode 100644 
flink-runtime/src/main/java/org/apache/flink/runtime/io/async/AsyncStoppableTaskWithCallback.java
 delete mode 100644 
flink-runtime/src/main/java/org/apache/flink/runtime/io/async/StoppableCallbackCallable.java
 create mode 100644 
flink-runtime/src/main/java/org/apache/flink/runtime/state/AbstractSnapshotStrategy.java
 create mode 100644 
flink-runtime/src/main/java/org/apache/flink/runtime/state/AsyncSnapshotCallable.java
 create mode 100644 
flink-runtime/src/test/java/org/apache/flink/runtime/state/AsyncSnapshotCallableTest.java
 create mode 100644 
flink-state-backends/flink-statebackend-rocksdb/src/main/java/org/apache/flink/contrib/streaming/state/snapshot/RocksDBSnapshotStrategyBase.java
 create mode 100644 
flink-state-backends/flink-statebackend-rocksdb/src/main/java/org/apache/flink/contrib/streaming/state/snapshot/RocksFullSnapshotStrategy.java
 create mode 100644 
flink-state-backends/flink-statebackend-rocksdb/src/main/java/org/apache/flink/contrib/streaming/state/snapshot/RocksIncrementalSnapshotStrategy.java
 copy flink-core/src/main/java/org/apache/flink/util/CollectionUtil.java => 
flink-state-backends/flink-statebackend-rocksdb/src/main/java/org/apache/flink/contrib/streaming/state/snapshot/RocksSnapshotUtil.java
 (52%)



[flink] 01/02: [FLINK-10042][state] (part 1) Extract snapshot algorithms from inner classes of RocksDBKeyedStateBackend into full classes

2018-08-21 Thread srichter
This is an automated email from the ASF dual-hosted git repository.

srichter pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git

commit aba02eb3fcc4472c3d5f5a0f527960d79c659c31
Author: Stefan Richter 
AuthorDate: Tue Aug 7 15:57:27 2018 +0200

[FLINK-10042][state] (part 1) Extract snapshot algorithms from inner 
classes of RocksDBKeyedStateBackend into full classes
---
 .../flink/runtime/state/SnapshotStrategy.java  |3 +-
 .../runtime/state/heap/HeapKeyedStateBackend.java  |5 +
 .../streaming/state/RocksDBKeyedStateBackend.java  | 1071 ++--
 .../state/snapshot/RocksFullSnapshotStrategy.java  |  478 +
 .../snapshot/RocksIncrementalSnapshotStrategy.java |  578 +++
 .../state/snapshot/RocksSnapshotUtil.java  |   51 +
 .../state/snapshot/SnapshotStrategyBase.java   |   90 ++
 .../streaming/state/RocksDBAsyncSnapshotTest.java  |   27 +-
 .../streaming/state/RocksDBStateBackendTest.java   |1 +
 9 files changed, 1317 insertions(+), 987 deletions(-)

diff --git 
a/flink-runtime/src/main/java/org/apache/flink/runtime/state/SnapshotStrategy.java
 
b/flink-runtime/src/main/java/org/apache/flink/runtime/state/SnapshotStrategy.java
index 9139fa7..3ad68af 100644
--- 
a/flink-runtime/src/main/java/org/apache/flink/runtime/state/SnapshotStrategy.java
+++ 
b/flink-runtime/src/main/java/org/apache/flink/runtime/state/SnapshotStrategy.java
@@ -28,8 +28,7 @@ import java.util.concurrent.RunnableFuture;
  *
  * @param  type of the returned state object that represents the result of 
the snapshot operation.
  */
-@FunctionalInterface
-public interface SnapshotStrategy {
+public interface SnapshotStrategy extends 
CheckpointListener {
 
/**
 * Operation that writes a snapshot into a stream that is provided by 
the given {@link CheckpointStreamFactory} and
diff --git 
a/flink-runtime/src/main/java/org/apache/flink/runtime/state/heap/HeapKeyedStateBackend.java
 
b/flink-runtime/src/main/java/org/apache/flink/runtime/state/heap/HeapKeyedStateBackend.java
index bc1e0f5..0e2f16c 100644
--- 
a/flink-runtime/src/main/java/org/apache/flink/runtime/state/heap/HeapKeyedStateBackend.java
+++ 
b/flink-runtime/src/main/java/org/apache/flink/runtime/state/heap/HeapKeyedStateBackend.java
@@ -882,6 +882,11 @@ public class HeapKeyedStateBackend extends 
AbstractKeyedStateBackend {
}
}
}
+
+   @Override
+   public void notifyCheckpointComplete(long checkpointId) throws 
Exception {
+   // nothing to do.
+   }
}
 
private interface StateFactory {
diff --git 
a/flink-state-backends/flink-statebackend-rocksdb/src/main/java/org/apache/flink/contrib/streaming/state/RocksDBKeyedStateBackend.java
 
b/flink-state-backends/flink-statebackend-rocksdb/src/main/java/org/apache/flink/contrib/streaming/state/RocksDBKeyedStateBackend.java
index c159976..87c7e55 100644
--- 
a/flink-state-backends/flink-statebackend-rocksdb/src/main/java/org/apache/flink/contrib/streaming/state/RocksDBKeyedStateBackend.java
+++ 
b/flink-state-backends/flink-statebackend-rocksdb/src/main/java/org/apache/flink/contrib/streaming/state/RocksDBKeyedStateBackend.java
@@ -35,9 +35,8 @@ import 
org.apache.flink.api.common.typeutils.base.array.BytePrimitiveArraySerial
 import org.apache.flink.api.java.tuple.Tuple2;
 import org.apache.flink.configuration.ConfigConstants;
 import 
org.apache.flink.contrib.streaming.state.iterator.RocksStateKeysIterator;
-import 
org.apache.flink.contrib.streaming.state.iterator.RocksStatesPerKeyGroupMergeIterator;
-import 
org.apache.flink.contrib.streaming.state.iterator.RocksTransformingIteratorWrapper;
-import org.apache.flink.core.fs.CloseableRegistry;
+import 
org.apache.flink.contrib.streaming.state.snapshot.RocksFullSnapshotStrategy;
+import 
org.apache.flink.contrib.streaming.state.snapshot.RocksIncrementalSnapshotStrategy;
 import org.apache.flink.core.fs.FSDataInputStream;
 import org.apache.flink.core.fs.FSDataOutputStream;
 import org.apache.flink.core.fs.FileStatus;
@@ -47,32 +46,22 @@ import org.apache.flink.core.memory.ByteArrayDataInputView;
 import org.apache.flink.core.memory.ByteArrayDataOutputView;
 import org.apache.flink.core.memory.DataInputView;
 import org.apache.flink.core.memory.DataInputViewStreamWrapper;
-import org.apache.flink.core.memory.DataOutputView;
 import org.apache.flink.core.memory.DataOutputViewStreamWrapper;
 import org.apache.flink.runtime.checkpoint.CheckpointOptions;
-import org.apache.flink.runtime.checkpoint.CheckpointType;
-import org.apache.flink.runtime.io.async.AbstractAsyncCallableWithResources;
-import org.apache.flink.runtime.io.async.AsyncStoppableTaskWithCallback;
 import org.apache.flink.runtime.query.TaskKvStateRegistry;
 import org.apache.flink.runtime.state.AbstractKeyedStateBackend;
 import org.apache.flink.runtime.state.CheckpointStrea

[flink-web] 02/02: Rebuild website

2018-08-21 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a commit to branch asf-site
in repository https://gitbox.apache.org/repos/asf/flink-web.git

commit ca874dd3bf7d35613ab154449479473f0ea286c5
Author: zentol 
AuthorDate: Tue Aug 21 14:11:29 2018 +0200

Rebuild website
---
 content/blog/feed.xml  | 113 +++
 content/blog/index.html|  38 ++--
 content/blog/page2/index.html  |  36 ++--
 content/blog/page3/index.html  |  37 ++--
 content/blog/page4/index.html  |  38 ++--
 content/blog/page5/index.html  |  44 +++--
 content/blog/page6/index.html  |  45 +++--
 content/blog/page7/index.html  |  25 +++
 content/downloads.html |   1 +
 content/index.html |  10 +-
 content/news/2018/08/21/release-1.5.3.html | 302 +
 11 files changed, 595 insertions(+), 94 deletions(-)

diff --git a/content/blog/feed.xml b/content/blog/feed.xml
index 937e74c..acd55d2 100644
--- a/content/blog/feed.xml
+++ b/content/blog/feed.xml
@@ -7,6 +7,119 @@
 https://flink.apache.org/blog/feed.xml"; rel="self" 
type="application/rss+xml" />
 
 
+Apache Flink 1.5.3 Released
+

The Apache Flink community released the third bugfix version of the Apache Flink 1.5 series.

+ +

This release includes more than 20 fixes and minor improvements for Flink 1.5.3. The list below includes a detailed list of all fixes.

+ +

We highly recommend all users to upgrade to Flink 1.5.3.

+ +

Updated Maven dependencies:

+ +
<dependency>
+  <groupId>org.apache.flink</groupId>
+  <artifactId>flink-java</artifactId>
+  <version>1.5.3</version>
+</dependency>
+<dependency>
+  <groupId>org.apache.flink</groupId>
+  <artifactId>flink-streaming-java_2.11</artifactId>
+  <version>1.5.3</version>
+</dependency>
+<dependency>
+  <groupId>org.apache.flink</groupId>
+  <artifactId>flink-clients_2.11</artifactId>
+  <version>1.5.3</version>
+</dependency>
+ +

You can find the binaries on the updated Downloads page.

+ +

List of resolved issues:

+ +

Sub-task +

+
    +
  • [FLINK-9951;] - Update scm developerConnection +
  • +
+ +

Bug +

+
    +
  • [FLINK-5750;] - Incorrect translation of n-ary Union +
  • +
  • [FLINK-9289;] - Parallelism of generated operators should have max parallism of input +
  • +
  • [FLINK-9546;] - The heartbeatTimeoutIntervalMs of HeartbeatMonitor should be larger than 0 +
  • +
  • [FLINK-9655;] - Externalized checkpoint E2E test fails on travis +
  • +
  • [FLINK-9693;] - Possible memory leak in jobmanager retaining archived checkpoints +
  • +
  • [FLINK-9694;] - Potentially NPE in CompositeTypeSerializerConfigSnapshot constructor +
  • +
  • [FLINK-9923;] - OneInputStreamTaskTest.testWatermarkMetrics fails on Travis +
  • +
  • [FLINK-9935;] - Batch Table API: grouping by window and attribute causes java.lang.ClassCastException: +
  • +
  • [FLINK-9936;] - Mesos resource manager unable to connect to master after failover +
  • +
  • [FLINK-9946;] - Quickstart E2E test archetype version is hard-coded +
  • +
  • [FLINK-9969;] - Unreasonable memory re

[flink-web] 01/02: Add 1.5.3 release

2018-08-21 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a commit to branch asf-site
in repository https://gitbox.apache.org/repos/asf/flink-web.git

commit 2530eb5c56b819ae5d51ed2d23c30bb3d22543ba
Author: zentol 
AuthorDate: Thu Aug 16 12:44:00 2018 +0200

Add 1.5.3 release
---
 _posts/2018-08-21-release-1.5.3.md | 114 +
 downloads.md   |   1 +
 2 files changed, 115 insertions(+)

diff --git a/_posts/2018-08-21-release-1.5.3.md 
b/_posts/2018-08-21-release-1.5.3.md
new file mode 100644
index 000..15372bc
--- /dev/null
+++ b/_posts/2018-08-21-release-1.5.3.md
@@ -0,0 +1,114 @@
+---
+layout: post
+title:  "Apache Flink 1.5.3 Released"
+date:   2018-08-21 12:00:00
+categories: news
+---
+
+The Apache Flink community released the third bugfix version of the Apache 
Flink 1.5 series.
+
+This release includes more than 20 fixes and minor improvements for Flink 
1.5.3. The list below includes a detailed list of all fixes.
+
+We highly recommend all users to upgrade to Flink 1.5.3.
+
+Updated Maven dependencies:
+
+```xml
+
+  org.apache.flink
+  flink-java
+  1.5.3
+
+
+  org.apache.flink
+  flink-streaming-java_2.11
+  1.5.3
+
+
+  org.apache.flink
+  flink-clients_2.11
+  1.5.3
+
+```
+
+You can find the binaries on the updated [Downloads 
page](http://flink.apache.org/downloads.html).
+
+List of resolved issues:
+
+Sub-task
+
+
+[FLINK-9951] -   
  Update scm developerConnection
+
+
+
+Bug
+
+
+[FLINK-5750] -   
  Incorrect translation of n-ary Union
+
+[FLINK-9289] -   
  Parallelism of generated operators should have max parallism of input
+
+[FLINK-9546] -   
  The heartbeatTimeoutIntervalMs of HeartbeatMonitor should be larger than 0
+
+[FLINK-9655] -   
  Externalized checkpoint E2E test fails on travis
+
+[FLINK-9693] -   
  Possible memory leak in jobmanager retaining archived checkpoints
+
+[FLINK-9694] -   
  Potentially NPE in CompositeTypeSerializerConfigSnapshot constructor
+
+[FLINK-9923] -   
  OneInputStreamTaskTest.testWatermarkMetrics fails on Travis
+
+[FLINK-9935] -   
  Batch Table API: grouping by window and attribute causes 
java.lang.ClassCastException:
+
+[FLINK-9936] -   
  Mesos resource manager unable to connect to master after failover
+
+[FLINK-9946] -   
  Quickstart E2E test archetype version is hard-coded
+
+[FLINK-9969] -   
  Unreasonable memory requirements to complete examples/batch/WordCount
+
+[FLINK-9972] -   
  Debug memory logging not working 
+
+[FLINK-9978] -   
  Source release sha contains absolute file path
+
+[FLINK-9985] -   
  Incorrect parameter order in document
+
+[FLINK-9988] -   
job manager does not respect property jobmanager.web.address
+
+[FLINK-10013] - 
Fix Kerberos integration for FLIP-6 YarnTaskExecutorRunner 
+
+[FLINK-10033] - 
Let Task release reference to Invokable on shutdown
+
+[FLINK-10070] - 
Flink cannot be compiled with maven 3.0.x
+
+
+
+New Feature
+
+
+[FLINK-10022] - 
Add metrics for input/output buffers
+
+
+
+Improvement
+
+
+[FLINK-9446] -   
  Compatibility table not up-to-date
+
+[FLINK-9765] -   
  Improve CLI responsiveness when cluster is not reachable
+
+[FLINK-9806] -   
  Add a canonical link element to documentation HTML
+
+[FLINK-9859] -   
  More Akka config options
+
+[FLINK-9942] -   
  Guard handlers against null fields in requests
+
+[FLINK-9986] -   
  Remove unnecessary information from .version.properties file
+
+[FLINK-9987] -   
  Rework ClassLoader E2E test to not rely on .version.properties file
+
+[FLINK-10006] - 
Improve logging in BarrierBuffer
+
+[FLINK-10016] - 
Make YARN/Kerberos end-to-end test stricter
+
+
diff --git a/downloads.md b/downloads.md
index d3096eb..3df2af8 100644
--- a/downloads.md
+++ b/downloads.md
@@ -105,6 +105,7 @@ Note that the community is always open to discussing bugfix 
releases for even ol
 All Flink releases are available via 
[https://archive.apache.org/dist/flink/](https://archive.apache.org/dist/flink/)
 including checksums and cryptographic signatures. At the time of writing, this 
includes the following versions:
 
 - Flink 1.6.0 - 2018-08-08 
([Source](https://archive.apache.org/dist/flink/flink-1.6.0/flink-1.6.0-src.tgz),
 [Binaries](https://archive.apache.org/dist/flink/flink-1.6.0/), 
[Docs]({{site.DOCS_BASE_URL}}flink-docs-release-1.6/), 
[Javadocs]({{site.DOCS_BASE_URL}}flink-docs-release-1.6/api/java), 
[ScalaDocs]({{site.DOCS_BASE_URL}}flink-docs-release-1.6/api/scala/index.html))
+- Flink 1.5.3 - 2018-08-21 
([Source](https://archive.apache.org/dist/flink/flink-1.5.3/flink-1.5.3-src.tgz),
 [Binaries](https://archive.apache.org/dist/flink/flink-1.5.3/), 
[Docs]({{site.DOCS_BASE_URL}}flink-docs-release-1.5/), 
[Javadocs]({{site.DOCS_BASE_URL}}flink-docs-release-1.5/api/java), 
[Sc

[flink-web] branch asf-site updated (89de17c -> ca874dd)

2018-08-21 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a change to branch asf-site
in repository https://gitbox.apache.org/repos/asf/flink-web.git.


from 89de17c  Rebuild website
 new 2530eb5  Add 1.5.3 release
 new ca874dd  Rebuild website

The 2 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 _posts/2018-08-21-release-1.5.3.md | 114 +
 content/blog/feed.xml  | 113 
 content/blog/index.html|  38 ---
 content/blog/page2/index.html  |  36 ---
 content/blog/page3/index.html  |  37 ---
 content/blog/page4/index.html  |  38 ---
 content/blog/page5/index.html  |  44 
 content/blog/page6/index.html  |  45 +---
 content/blog/page7/index.html  |  25 +
 content/downloads.html |   1 +
 content/index.html |  10 +-
 .../21/release-1.5.3.html} |  88 ++--
 downloads.md   |   1 +
 13 files changed, 463 insertions(+), 127 deletions(-)
 create mode 100644 _posts/2018-08-21-release-1.5.3.md
 copy content/news/2018/{03/08/release-1.4.2.html => 08/21/release-1.5.3.html} 
(65%)



[flink-web] branch asf-site updated: Rebuild website

2018-08-21 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a commit to branch asf-site
in repository https://gitbox.apache.org/repos/asf/flink-web.git


The following commit(s) were added to refs/heads/asf-site by this push:
 new 89de17c  Rebuild website
89de17c is described below

commit 89de17c91393e3bfba3a8058b5a7de8569d11391
Author: zentol 
AuthorDate: Tue Aug 21 14:10:07 2018 +0200

Rebuild website
---
 content/downloads.html | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/content/downloads.html b/content/downloads.html
index a995b37..3b57a20 100644
--- a/content/downloads.html
+++ b/content/downloads.html
@@ -276,7 +276,7 @@ the classpath.
 
 
   Flink 1.6.0 - 2018-08-08 (https://archive.apache.org/dist/flink/flink-1.6.0/flink-1.6.0-src.tgz";>Source,
 https://archive.apache.org/dist/flink/flink-1.6.0/";>Binaries, https://ci.apache.org/projects/flink/flink-docs-release-1.6/";>Docs, 
https://ci.apache.org/projects/flink/flink-docs-release-1.6/api/java";>Javadocs,
 https://ci.apache.org/projects/flink/flink-docs-release-1.6/api/scala/index.html";>ScalaDocs)
-  Flink 1.5.2 - 2018-7-31 (https://archive.apache.org/dist/flink/flink-1.5.2/flink-1.5.2-src.tgz";>Source,
 https://archive.apache.org/dist/flink/flink-1.5.2/";>Binaries, https://ci.apache.org/projects/flink/flink-docs-release-1.5/";>Docs, 
https://ci.apache.org/projects/flink/flink-docs-release-1.5/api/java";>Javadocs,
 https://ci.apache.org/projects/flink/flink-docs-release-1.5/api/scala/index.html";>ScalaDocs)
+  Flink 1.5.2 - 2018-07-31 (https://archive.apache.org/dist/flink/flink-1.5.2/flink-1.5.2-src.tgz";>Source,
 https://archive.apache.org/dist/flink/flink-1.5.2/";>Binaries, https://ci.apache.org/projects/flink/flink-docs-release-1.5/";>Docs, 
https://ci.apache.org/projects/flink/flink-docs-release-1.5/api/java";>Javadocs,
 https://ci.apache.org/projects/flink/flink-docs-release-1.5/api/scala/index.html";>ScalaDocs)
   Flink 1.5.1 - 2018-07-12 (https://archive.apache.org/dist/flink/flink-1.5.1/flink-1.5.1-src.tgz";>Source,
 https://archive.apache.org/dist/flink/flink-1.5.1/";>Binaries, https://ci.apache.org/projects/flink/flink-docs-release-1.5/";>Docs, 
https://ci.apache.org/projects/flink/flink-docs-release-1.5/api/java";>Javadocs,
 https://ci.apache.org/projects/flink/flink-docs-release-1.5/api/scala/index.html";>ScalaDocs)
   Flink 1.5.0 - 2018-05-25 (https://archive.apache.org/dist/flink/flink-1.5.0/flink-1.5.0-src.tgz";>Source,
 https://archive.apache.org/dist/flink/flink-1.5.0/";>Binaries, https://ci.apache.org/projects/flink/flink-docs-release-1.5/";>Docs, 
https://ci.apache.org/projects/flink/flink-docs-release-1.5/api/java";>Javadocs,
 https://ci.apache.org/projects/flink/flink-docs-release-1.5/api/scala/index.html";>ScalaDocs)
   Flink 1.4.2 - 2018-03-08 (https://archive.apache.org/dist/flink/flink-1.4.2/flink-1.4.2-src.tgz";>Source,
 https://archive.apache.org/dist/flink/flink-1.4.2/";>Binaries, https://ci.apache.org/projects/flink/flink-docs-release-1.4/";>Docs, 
https://ci.apache.org/projects/flink/flink-docs-release-1.4/api/java";>Javadocs,
 https://ci.apache.org/projects/flink/flink-docs-release-1.4/api/scala/index.html";>ScalaDocs)



[flink] annotated tag release-1.5.3 created (now 1c0dff6)

2018-08-21 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a change to annotated tag release-1.5.3
in repository https://gitbox.apache.org/repos/asf/flink.git.


  at 1c0dff6  (tag)
 tagging 947a0cc4cbc1606b871dd797955adc9d39d6d4a3 (tag)
  length 972 bytes
  by zentol
  on Tue Aug 21 13:58:23 2018 +0200

- Log -
release-1.5.3
-BEGIN PGP SIGNATURE-
Version: GnuPG v2

iQIcBAABCAAGBQJbe/5oAAoJEMLu17ER1GS6d3IP/RsgZzBrX31GC/kbqwQQUg82
SJITI+mfhDAeZRovemq06IOqypk04lSAvCIqfP+tiskWKjdPY9WLOX7hgzNOX96N
KLMyneGEfPrwl2KoiL6EtzIwx/Go8wMw6tJ/qjWm9uJix2WhAchofcGU1u4DJypH
doJF2tJ6kboCXjSQdEU3xnqX8kaqcvtURgdUpJ7wKlUNNt8/NbyzlH4epGYrfM5G
IBbINmBtMZGx1LoSDoAXkSFLgf4Bwaj9xGy6hL4eLi5jHFDYJGEv6lCI4pDyEs6l
S13skbei/LrAvwelGQevRSqGe13nxThRVIJ5C4OulFRBOnzZDVNq42apKXgU8Ktx
+mfqNGcj8OffH6XnLLWvCXTqsRKncc/SHy9Uy3o4pNMtCpxhAHONmSeLNxAuaAyO
E+J2ttmFBdDLYGIfVyrbBO3bZLRHbTddsj7cKkylFLdjSY0zpvB7dXrE/tPxyBHY
Jkp/szxKxgmDQ0J9UHCUlxnbOcPnKVoUJDWWoxhf2bvCkf9m0QIQfGLcBhFKez1m
TjLBqzjRuHZei/IgWz2eOPF2ccdX2ELAi0jnyCqqmSog893k+cfIfWDr2v33FvPG
y3AIOrR0byk6aYnKdA5GcEXrR6oWfMD744Kuat+cHWa93j7R+37QIL3RJ4qPRGYP
IWcCPn+x4C6bbNgA7ylH
=gre/
-END PGP SIGNATURE-
---

No new revisions were added by this update.



svn commit: r28880 - /release/flink/flink-1.5.2/

2018-08-21 Thread chesnay
Author: chesnay
Date: Tue Aug 21 11:56:21 2018
New Revision: 28880

Log:
Remove Flink 1.5.2 release files

Removed:
release/flink/flink-1.5.2/



svn commit: r28879 - /dev/flink/flink-1.5.3/ /release/flink/flink-1.5.3/

2018-08-21 Thread chesnay
Author: chesnay
Date: Tue Aug 21 11:55:40 2018
New Revision: 28879

Log:
Release Flink 1.5.3

Added:
release/flink/flink-1.5.3/
  - copied from r28878, dev/flink/flink-1.5.3/
Removed:
dev/flink/flink-1.5.3/



[flink] branch master updated: [FLINK-10181][rest][docs] Add anchor links to rest requests

2018-08-21 Thread chesnay
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/master by this push:
 new 07bb90d  [FLINK-10181][rest][docs] Add anchor links to rest requests
07bb90d is described below

commit 07bb90d0e6166cda644af555870c483acd5c2390
Author: zentol 
AuthorDate: Mon Aug 20 17:30:53 2018 +0200

[FLINK-10181][rest][docs] Add anchor links to rest requests
---
 docs/_includes/generated/rest_dispatcher.html  | 96 +++---
 .../flink/docs/rest/RestAPIDocGenerator.java   |  2 +-
 2 files changed, 49 insertions(+), 49 deletions(-)

diff --git a/docs/_includes/generated/rest_dispatcher.html 
b/docs/_includes/generated/rest_dispatcher.html
index 845dd8b..ec6989b 100644
--- a/docs/_includes/generated/rest_dispatcher.html
+++ b/docs/_includes/generated/rest_dispatcher.html
@@ -1,7 +1,7 @@
 
   
 
-  /cluster
+  /cluster
 
 
   Verb: DELETE
@@ -37,7 +37,7 @@
 
   
 
-  /config
+  /config
 
 
   Verb: GET
@@ -93,7 +93,7 @@
 
   
 
-  /jars
+  /jars
 
 
   Verb: GET
@@ -170,7 +170,7 @@
 
   
 
-  /jars/upload
+  /jars/upload
 
 
   Verb: POST
@@ -219,7 +219,7 @@ Using 'curl' you can upload a jar via 'curl -X POST -H 
"Expect:" -F "jarfile=#pa
 
   
 
-  /jars/:jarid
+  /jars/:jarid
 
 
   Verb: DELETE
@@ -265,7 +265,7 @@ Using 'curl' you can upload a jar via 'curl -X POST -H 
"Expect:" -F "jarfile=#pa
 
   
 
-  /jars/:jarid/plan
+  /jars/:jarid/plan
 
 
   Verb: GET
@@ -331,7 +331,7 @@ Using 'curl' you can upload a jar via 'curl -X POST -H 
"Expect:" -F "jarfile=#pa
 
   
 
-  /jars/:jarid/run
+  /jars/:jarid/run
 
 
   Verb: POST
@@ -419,7 +419,7 @@ Using 'curl' you can upload a jar via 'curl -X POST -H 
"Expect:" -F "jarfile=#pa
 
   
 
-  /jobmanager/config
+  /jobmanager/config
 
 
   Verb: GET
@@ -469,7 +469,7 @@ Using 'curl' you can upload a jar via 'curl -X POST -H 
"Expect:" -F "jarfile=#pa
 
   
 
-  /jobmanager/metrics
+  /jobmanager/metrics
 
 
   Verb: GET
@@ -517,7 +517,7 @@ Using 'curl' you can upload a jar via 'curl -X POST -H 
"Expect:" -F "jarfile=#pa
 
   
 
-  /jobs
+  /jobs
 
 
   Verb: GET
@@ -574,7 +574,7 @@ Using 'curl' you can upload a jar via 'curl -X POST -H 
"Expect:" -F "jarfile=#pa
 
   
 
-  /jobs
+  /jobs
 
 
   Verb: POST
@@ -647,7 +647,7 @@ Using 'curl' you can upload a jar via 'curl -X POST -H 
"Expect:" -F "jarfile=#pa
 
   
 
-  /jobs/metrics
+  /jobs/metrics
 
 
   Verb: GET
@@ -697,7 +697,7 @@ Using 'curl' you can upload a jar via 'curl -X POST -H 
"Expect:" -F "jarfile=#pa
 
   
 
-  /jobs/overview
+  /jobs/overview
 
 
   Verb: GET
@@ -744,7 +744,7 @@ Using 'curl' you can upload a jar via 'curl -X POST -H 
"Expect:" -F "jarfile=#pa
 
   
 
-  /jobs/:jobid
+  /jobs/:jobid
 
 
   Verb: GET
@@ -902,7 +902,7 @@ Using 'curl' you can upload a jar via 'curl -X POST -H 
"Expect:" -F "jarfile=#pa
 
   
 
-  /jobs/:jobid
+  /jobs/:jobid
 
 
   Verb: PATCH
@@ -958,7 +958,7 @@ Using 'curl' you can upload a jar via 'curl -X POST -H 
"Expect:" -F "jarfile=#pa
 
   
 
-  /jobs/:jobid/accumulators
+  /jobs/:jobid/accumulators
 
 
   Verb: GET
@@ -1049,7 +1049,7 @@ Using 'curl' you can upload a jar via 'curl -X POST -H 
"Expect:" -F "jarfile=#pa
 
   
 
-  /jobs/:jobid/checkpoints
+  /jobs/:jobid/checkpoints
 
 
   Verb: GET
@@ -1352,7 +1352,7 @@ Using 'curl' you can upload a jar via 'curl -X POST -H 
"Expect:" -F "jarfile=#pa
 
   
 
-  /jobs/:jobid/checkpoints/config
+  /jobs/:jobid/checkpoints/config
 
 
   Verb: GET
@@ -1430,7 +1430,7 @@ Using 'curl' you can upload a jar via 'curl -X POST -H 
"Expect:" -F "jarfile=#pa
 
   
 
-  /jobs/:jobid/checkpoints/details/:checkpointid
+  /jobs/:jobid/checkpoints/details/:checkpointid
 
 
   Verb: GET
@@ -1547,7 +1547,7 @@ Using 'curl' you can upload a jar via 'curl -X POST -H 
"Expect:" -F "jarfile=#pa
 
   
 
-  /jobs/:jobid/checkpoints/details/:checkpointid/subtasks/:vertexid
+  /jobs/:jobid/checkpoints/details/:checkpointid/subtasks/:vertexid
 
 
   Verb: GET
@@ -1693,7 +1693,7 @@ Using 'curl' you can upload a jar via 'curl -X POST -H 
"Expect:" -F "jarfile=#pa
 
   
 
-  /jobs/:jobid/config
+  /jobs/:jobid/config
 
 
   Verb: GET
@@ -1741,7 +1741,7 @@ Using 'curl' you can upload a jar via 'curl -X POST -H 
"Expect:" -F "jarfile=#pa
 
   
 
-  /jobs/:jobid/exceptions
+  /jobs/:jobid/exceptions
 
 
   Verb: GET
@@ -1822,7 +1822,7 @@ Using 'curl' you