This is an automated email from the ASF dual-hosted git repository. dwysakowicz pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/flink.git
commit 4e56ca11fb275c72f4a70f8dd12ff71dc12983d3 Author: Dawid Wysakowicz <dwysakow...@apache.org> AuthorDate: Mon May 18 19:49:59 2020 +0200 [FLINK-15947] Update docs with updated package structure. This closes #12232 --- docs/dev/table/common.md | 18 ++++++++++-------- docs/dev/table/common.zh.md | 18 ++++++++++-------- docs/dev/table/tableApi.md | 7 +++---- docs/dev/table/tableApi.zh.md | 7 +++---- docs/getting-started/walkthroughs/table_api.md | 6 +++--- docs/getting-started/walkthroughs/table_api.zh.md | 5 +++-- 6 files changed, 32 insertions(+), 29 deletions(-) diff --git a/docs/dev/table/common.md b/docs/dev/table/common.md index 907cb15..4dae5b7 100644 --- a/docs/dev/table/common.md +++ b/docs/dev/table/common.md @@ -157,7 +157,7 @@ If both planner jars are on the classpath (the default behavior), you should exp // ********************** import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; import org.apache.flink.table.api.EnvironmentSettings; -import org.apache.flink.table.api.java.StreamTableEnvironment; +import org.apache.flink.table.api.bridge.java.StreamTableEnvironment; EnvironmentSettings fsSettings = EnvironmentSettings.newInstance().useOldPlanner().inStreamingMode().build(); StreamExecutionEnvironment fsEnv = StreamExecutionEnvironment.getExecutionEnvironment(); @@ -168,7 +168,7 @@ StreamTableEnvironment fsTableEnv = StreamTableEnvironment.create(fsEnv, fsSetti // FLINK BATCH QUERY // ****************** import org.apache.flink.api.java.ExecutionEnvironment; -import org.apache.flink.table.api.java.BatchTableEnvironment; +import org.apache.flink.table.api.bridge.java.BatchTableEnvironment; ExecutionEnvironment fbEnv = ExecutionEnvironment.getExecutionEnvironment(); BatchTableEnvironment fbTableEnv = BatchTableEnvironment.create(fbEnv); @@ -178,7 +178,7 @@ BatchTableEnvironment fbTableEnv = BatchTableEnvironment.create(fbEnv); // ********************** import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; import org.apache.flink.table.api.EnvironmentSettings; -import org.apache.flink.table.api.java.StreamTableEnvironment; +import org.apache.flink.table.api.bridge.java.StreamTableEnvironment; StreamExecutionEnvironment bsEnv = StreamExecutionEnvironment.getExecutionEnvironment(); EnvironmentSettings bsSettings = EnvironmentSettings.newInstance().useBlinkPlanner().inStreamingMode().build(); @@ -205,7 +205,7 @@ TableEnvironment bbTableEnv = TableEnvironment.create(bbSettings); // ********************** import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment import org.apache.flink.table.api.EnvironmentSettings -import org.apache.flink.table.api.scala.StreamTableEnvironment +import org.apache.flink.table.api.bridge.scala.StreamTableEnvironment val fsSettings = EnvironmentSettings.newInstance().useOldPlanner().inStreamingMode().build() val fsEnv = StreamExecutionEnvironment.getExecutionEnvironment @@ -216,7 +216,7 @@ val fsTableEnv = StreamTableEnvironment.create(fsEnv, fsSettings) // FLINK BATCH QUERY // ****************** import org.apache.flink.api.scala.ExecutionEnvironment -import org.apache.flink.table.api.scala.BatchTableEnvironment +import org.apache.flink.table.api.bridge.scala.BatchTableEnvironment val fbEnv = ExecutionEnvironment.getExecutionEnvironment val fbTableEnv = BatchTableEnvironment.create(fbEnv) @@ -226,7 +226,7 @@ val fbTableEnv = BatchTableEnvironment.create(fbEnv) // ********************** import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment import org.apache.flink.table.api.EnvironmentSettings -import org.apache.flink.table.api.scala.StreamTableEnvironment +import org.apache.flink.table.api.bridge.scala.StreamTableEnvironment val bsEnv = StreamExecutionEnvironment.getExecutionEnvironment val bsSettings = EnvironmentSettings.newInstance().useBlinkPlanner().inStreamingMode().build() @@ -552,7 +552,9 @@ val revenue = orders // execute query {% endhighlight %} -**Note:** The Scala Table API uses Scala Symbols, which start with a single tick (`'`) to reference the attributes of a `Table`. The Table API uses Scala implicits. Make sure to import `org.apache.flink.api.scala._` and `org.apache.flink.table.api.scala._` in order to use Scala implicit conversions. +**Note:** The Scala Table API uses Scala String interpolation that starts with a dollar sign (`$`) to reference the attributes of a `Table`. The Table API uses Scala implicits. Make sure to import +* `org.apache.flink.table.api._` - for implicit expression conversions +* `org.apache.flink.api.scala._` and `org.apache.flink.table.api.bridge.scala._` if you want to convert from/to DataStream. </div> <div data-lang="python" markdown="1"> @@ -879,7 +881,7 @@ This interaction can be achieved by converting a `DataStream` or `DataSet` into ### Implicit Conversion for Scala -The Scala Table API features implicit conversions for the `DataSet`, `DataStream`, and `Table` classes. These conversions are enabled by importing the package `org.apache.flink.table.api.scala._` in addition to `org.apache.flink.api.scala._` for the Scala DataStream API. +The Scala Table API features implicit conversions for the `DataSet`, `DataStream`, and `Table` classes. These conversions are enabled by importing the package `org.apache.flink.table.api.bridge.scala._` in addition to `org.apache.flink.api.scala._` for the Scala DataStream API. ### Create a View from a DataStream or DataSet diff --git a/docs/dev/table/common.zh.md b/docs/dev/table/common.zh.md index 91aab89..831c19f 100644 --- a/docs/dev/table/common.zh.md +++ b/docs/dev/table/common.zh.md @@ -157,7 +157,7 @@ table_env.execute("python_job") // ********************** import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; import org.apache.flink.table.api.EnvironmentSettings; -import org.apache.flink.table.api.java.StreamTableEnvironment; +import org.apache.flink.table.api.bridge.java.StreamTableEnvironment; EnvironmentSettings fsSettings = EnvironmentSettings.newInstance().useOldPlanner().inStreamingMode().build(); StreamExecutionEnvironment fsEnv = StreamExecutionEnvironment.getExecutionEnvironment(); @@ -168,7 +168,7 @@ StreamTableEnvironment fsTableEnv = StreamTableEnvironment.create(fsEnv, fsSetti // FLINK BATCH QUERY // ****************** import org.apache.flink.api.java.ExecutionEnvironment; -import org.apache.flink.table.api.java.BatchTableEnvironment; +import org.apache.flink.table.api.bridge.java.BatchTableEnvironment; ExecutionEnvironment fbEnv = ExecutionEnvironment.getExecutionEnvironment(); BatchTableEnvironment fbTableEnv = BatchTableEnvironment.create(fbEnv); @@ -178,7 +178,7 @@ BatchTableEnvironment fbTableEnv = BatchTableEnvironment.create(fbEnv); // ********************** import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; import org.apache.flink.table.api.EnvironmentSettings; -import org.apache.flink.table.api.java.StreamTableEnvironment; +import org.apache.flink.table.api.bridge.java.StreamTableEnvironment; StreamExecutionEnvironment bsEnv = StreamExecutionEnvironment.getExecutionEnvironment(); EnvironmentSettings bsSettings = EnvironmentSettings.newInstance().useBlinkPlanner().inStreamingMode().build(); @@ -205,7 +205,7 @@ TableEnvironment bbTableEnv = TableEnvironment.create(bbSettings); // ********************** import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment import org.apache.flink.table.api.EnvironmentSettings -import org.apache.flink.table.api.scala.StreamTableEnvironment +import org.apache.flink.table.api.bridge.scala.StreamTableEnvironment val fsSettings = EnvironmentSettings.newInstance().useOldPlanner().inStreamingMode().build() val fsEnv = StreamExecutionEnvironment.getExecutionEnvironment @@ -216,7 +216,7 @@ val fsTableEnv = StreamTableEnvironment.create(fsEnv, fsSettings) // FLINK BATCH QUERY // ****************** import org.apache.flink.api.scala.ExecutionEnvironment -import org.apache.flink.table.api.scala.BatchTableEnvironment +import org.apache.flink.table.api.bridge.scala.BatchTableEnvironment val fbEnv = ExecutionEnvironment.getExecutionEnvironment val fbTableEnv = BatchTableEnvironment.create(fbEnv) @@ -226,7 +226,7 @@ val fbTableEnv = BatchTableEnvironment.create(fbEnv) // ********************** import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment import org.apache.flink.table.api.EnvironmentSettings -import org.apache.flink.table.api.scala.StreamTableEnvironment +import org.apache.flink.table.api.bridge.scala.StreamTableEnvironment val bsEnv = StreamExecutionEnvironment.getExecutionEnvironment val bsSettings = EnvironmentSettings.newInstance().useBlinkPlanner().inStreamingMode().build() @@ -531,7 +531,9 @@ val revenue = orders // execute query {% endhighlight %} -**Note:** The Scala Table API uses Scala Symbols, which start with a single tick (`'`) to reference the attributes of a `Table`. The Table API uses Scala implicits. Make sure to import `org.apache.flink.api.scala._` and `org.apache.flink.table.api.scala._` in order to use Scala implicit conversions. +**Note:** The Scala Table API uses Scala String interpolation that starts with a dollar sign (`$`) to reference the attributes of a `Table`. The Table API uses Scala implicits. Make sure to import +* `org.apache.flink.table.api._` - for implicit expression conversions +* `org.apache.flink.api.scala._` and `org.apache.flink.table.api.bridge.scala._` if you want to convert from/to DataStream. </div> <div data-lang="python" markdown="1"> @@ -858,7 +860,7 @@ Table API 和 SQL 可以被很容易地集成并嵌入到 [DataStream]({{ site.b ### Scala 隐式转换 -Scala Table API 含有对 `DataSet`、`DataStream` 和 `Table` 类的隐式转换。 通过为 Scala DataStream API 导入 `org.apache.flink.table.api.scala._` 包以及 `org.apache.flink.api.scala._` 包,可以启用这些转换。 +Scala Table API 含有对 `DataSet`、`DataStream` 和 `Table` 类的隐式转换。 通过为 Scala DataStream API 导入 `org.apache.flink.table.api.bridge.scala._` 包以及 `org.apache.flink.api.scala._` 包,可以启用这些转换。 ### 通过 DataSet 或 DataStream 创建`视图` diff --git a/docs/dev/table/tableApi.md b/docs/dev/table/tableApi.md index 71977bd..e39af04 100644 --- a/docs/dev/table/tableApi.md +++ b/docs/dev/table/tableApi.md @@ -46,7 +46,6 @@ For the Expression DSL it is also necessary to import static `org.apache.flink.t {% highlight java %} import org.apache.flink.table.api.* -import org.apache.flink.table.api.java.* import static org.apache.flink.table.api.Expressions.* @@ -73,14 +72,14 @@ result.print(); <div data-lang="scala" markdown="1"> -The Scala Table API is enabled by importing `org.apache.flink.api.scala._` and `org.apache.flink.table.api.scala._`. +The Scala Table API is enabled by importing `org.apache.flink.table.api._`, `org.apache.flink.api.scala._`, and `org.apache.flink.table.api.bridge.scala._` (for bridging to/from DataStream). The following example shows how a Scala Table API program is constructed. Table fields are referenced using Scala's String interpolation using a dollar character (`$`). {% highlight scala %} import org.apache.flink.api.scala._ import org.apache.flink.table.api._ -import org.apache.flink.table.api.scala._ +import org.apache.flink.table.api.bridge.scala._ // environment configuration val env = ExecutionEnvironment.getExecutionEnvironment @@ -3158,6 +3157,6 @@ timeIndicator = fieldReference , "." , ( "proctime" | "rowtime" ) ; **Temporal intervals:** Temporal intervals can be represented as number of months (`Types.INTERVAL_MONTHS`) or number of milliseconds (`Types.INTERVAL_MILLIS`). Intervals of same type can be added or subtracted (e.g. `1.hour + 10.minutes`). Intervals of milliseconds can be added to time points (e.g. `"2016-08-10".toDate + 5.days`). -**Scala expressions:** Scala expressions use implicit conversions. Therefore, make sure to add the wildcard import `org.apache.flink.table.api.scala._` to your programs. In case a literal is not treated as an expression, use `.toExpr` such as `3.toExpr` to force a literal to be converted. +**Scala expressions:** Scala expressions use implicit conversions. Therefore, make sure to add the wildcard import `org.apache.flink.table.api._` to your programs. In case a literal is not treated as an expression, use `.toExpr` such as `3.toExpr` to force a literal to be converted. {% top %} diff --git a/docs/dev/table/tableApi.zh.md b/docs/dev/table/tableApi.zh.md index 25e2904..9db6055a 100644 --- a/docs/dev/table/tableApi.zh.md +++ b/docs/dev/table/tableApi.zh.md @@ -46,7 +46,6 @@ For the Expression DSL it is also necessary to import static `org.apache.flink.t {% highlight java %} import org.apache.flink.table.api.* -import org.apache.flink.table.api.java.* import static org.apache.flink.table.api.Expressions.* @@ -73,14 +72,14 @@ result.print(); <div data-lang="scala" markdown="1"> -The Scala Table API is enabled by importing `org.apache.flink.api.scala._` and `org.apache.flink.table.api.scala._`. +The Scala Table API is enabled by importing `org.apache.flink.table.api._`, `org.apache.flink.api.scala._`, and `org.apache.flink.table.api.bridge.scala._` (for bridging to/from DataStream). The following example shows how a Scala Table API program is constructed. Table fields are referenced using Scala's String interpolation using a dollar character (`$`). {% highlight scala %} import org.apache.flink.api.scala._ import org.apache.flink.table.api._ -import org.apache.flink.table.api.scala._ +import org.apache.flink.table.api.bridge.scala._ // environment configuration val env = ExecutionEnvironment.getExecutionEnvironment @@ -3157,6 +3156,6 @@ timeIndicator = fieldReference , "." , ( "proctime" | "rowtime" ) ; **Temporal intervals:** Temporal intervals can be represented as number of months (`Types.INTERVAL_MONTHS`) or number of milliseconds (`Types.INTERVAL_MILLIS`). Intervals of same type can be added or subtracted (e.g. `1.hour + 10.minutes`). Intervals of milliseconds can be added to time points (e.g. `"2016-08-10".toDate + 5.days`). -**Scala expressions:** Scala expressions use implicit conversions. Therefore, make sure to add the wildcard import `org.apache.flink.table.api.scala._` to your programs. In case a literal is not treated as an expression, use `.toExpr` such as `3.toExpr` to force a literal to be converted. +**Scala expressions:** Scala expressions use implicit conversions. Therefore, make sure to add the wildcard import `org.apache.flink.table.api._` to your programs. In case a literal is not treated as an expression, use `.toExpr` such as `3.toExpr` to force a literal to be converted. {% top %} diff --git a/docs/getting-started/walkthroughs/table_api.md b/docs/getting-started/walkthroughs/table_api.md index 0c99dbc..f1293fb 100644 --- a/docs/getting-started/walkthroughs/table_api.md +++ b/docs/getting-started/walkthroughs/table_api.md @@ -450,7 +450,7 @@ import org.apache.flink.walkthrough.common.table.TransactionTableSource; import org.apache.flink.streaming.api.TimeCharacteristic; import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; import org.apache.flink.table.api.Tumble; -import org.apache.flink.table.api.java.StreamTableEnvironment; +import org.apache.flink.table.api.bridge.java.StreamTableEnvironment; public class SpendReport { @@ -482,8 +482,8 @@ package spendreport import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment import org.apache.flink.streaming.api.TimeCharacteristic -import org.apache.flink.table.api.Tumble -import org.apache.flink.table.api.scala._ +import org.apache.flink.table.api._ +import org.apache.flink.table.api.bridge.scala.StreamTableEnvironment; import org.apache.flink.walkthrough.common.table._ object SpendReport { diff --git a/docs/getting-started/walkthroughs/table_api.zh.md b/docs/getting-started/walkthroughs/table_api.zh.md index f6b23a4..3194ae8 100644 --- a/docs/getting-started/walkthroughs/table_api.zh.md +++ b/docs/getting-started/walkthroughs/table_api.zh.md @@ -451,7 +451,7 @@ import org.apache.flink.walkthrough.common.table.TransactionTableSource; import org.apache.flink.streaming.api.TimeCharacteristic; import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; import org.apache.flink.table.api.Tumble; -import org.apache.flink.table.api.java.StreamTableEnvironment; +import org.apache.flink.table.api.bridge.java.StreamTableEnvironment; public class SpendReport { @@ -484,7 +484,8 @@ package spendreport import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment import org.apache.flink.streaming.api.TimeCharacteristic import org.apache.flink.table.api.Tumble -import org.apache.flink.table.api.scala._ +import org.apache.flink.table.api._ +import org.apache.flink.table.api.bridge.scala.StreamTableEnvironment import org.apache.flink.walkthrough.common.table._ object SpendReport {