[
https://issues.apache.org/jira/browse/FLINK-3551?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=15836687#comment-15836687
]
ASF GitHub Bot commented on FLINK-3551:
---------------------------------------
Github user fhueske commented on a diff in the pull request:
https://github.com/apache/flink/pull/2761#discussion_r97657755
--- Diff:
flink-examples/flink-examples-streaming/src/main/scala/org/apache/flink/streaming/scala/examples/ml/IncrementalLearningSkeleton.scala
---
@@ -0,0 +1,205 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.streaming.scala.examples.ml
+
+import java.util.concurrent.TimeUnit
+
+import org.apache.flink.api.java.utils.ParameterTool
+import org.apache.flink.api.scala._
+import org.apache.flink.streaming.api.TimeCharacteristic
+import
org.apache.flink.streaming.api.functions.AssignerWithPunctuatedWatermarks
+import org.apache.flink.streaming.api.functions.co.CoMapFunction
+import org.apache.flink.streaming.api.functions.source.SourceFunction
+import
org.apache.flink.streaming.api.functions.source.SourceFunction.SourceContext
+import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment
+import org.apache.flink.streaming.api.scala.function.AllWindowFunction
+import org.apache.flink.streaming.api.watermark.Watermark
+import org.apache.flink.streaming.api.windowing.time.Time
+import org.apache.flink.streaming.api.windowing.windows.TimeWindow
+import org.apache.flink.util.Collector
+
+/**
+ * Skeleton for incremental machine learning algorithm consisting of a
+ * pre-computed model, which gets updated for the new inputs and new
input data
+ * for which the job provides predictions.
+ *
+ * <p>
+ * This may serve as a base of a number of algorithms, e.g. updating an
+ * incremental Alternating Least Squares model while also providing the
+ * predictions.
+ *
+ * <p>
+ * This example shows how to use:
+ * <ul>
+ * <li>Connected streams
+ * <li>CoFunctions
+ * <li>Tuple data types
+ * </ul>
+ */
+object IncrementalLearningSkeleton {
+
+ //
*************************************************************************
+ // PROGRAM
+ //
*************************************************************************
+
+ def main(args: Array[String]): Unit = {
+ // Checking input parameters
+ val params = ParameterTool.fromArgs(args)
+
+ // set up the execution environment
+ val env = StreamExecutionEnvironment.getExecutionEnvironment
+ env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime)
+
+ // build new model on every second of new data
+ val trainingData = env.addSource(new FiniteTrainingDataSource)
+ val newData = env.addSource(new FiniteNewDataSource)
+
+ val model = trainingData
+ .assignTimestampsAndWatermarks(new LinearTimestamp)
+ .timeWindowAll(Time.of(5000, TimeUnit.MILLISECONDS))
+ .apply(new PartialModelBuilder)
+
+ // use partial model for newData
+ val prediction = newData.connect(model).map(
+ (_: Int) => 0,
+ (_: Array[Double]) => 1
+ )
+
+ // emit result
+ if (params.has("output")) {
+ prediction.writeAsText(params.get("output"))
+ } else {
+ println("Printing result to stdout. Use --output to specify output
path.")
+ prediction.print()
+ }
+
+ // execute program
+ env.execute("Streaming Incremental Learning")
+ }
+
+ //
*************************************************************************
+ // USER FUNCTIONS
+ //
*************************************************************************
+
+ /**
+ * Feeds new data for newData. By default it is implemented as
constantly
+ * emitting the Integer 1 in a loop.
+ */
+ private class FiniteNewDataSource extends SourceFunction[Int] {
+ var counter: Int = 0
+
+ override def run(ctx: SourceContext[Int]) = {
+ Thread.sleep(15)
+ while (counter < 50) {
+ ctx.collect(getNewData)
+ }
+ }
+
+ def getNewData = {
+ Thread.sleep(5)
+ counter += 1
+ 1
+ }
+
+ override def cancel() = {
+ // No cleanup needed
+ }
+ }
+
+ /**
+ * Feeds new training data for the partial model builder. By default it
is
+ * implemented as constantly emitting the Integer 1 in a loop.
+ */
+ private class FiniteTrainingDataSource extends SourceFunction[Int] {
+ var counter = 0
+
+ override def run(ctx: SourceContext[Int]) = {
+ while (counter < 8200) ctx.collect(getTrainingData)
--- End diff --
simplify to
```
(0 until 8200).foreach( _ => ctx.collect(1) )
```
> Sync Scala and Java Streaming Examples
> --------------------------------------
>
> Key: FLINK-3551
> URL: https://issues.apache.org/jira/browse/FLINK-3551
> Project: Flink
> Issue Type: Sub-task
> Components: Examples
> Affects Versions: 1.0.0
> Reporter: Stephan Ewen
> Assignee: Lim Chee Hau
>
> The Scala Examples lack behind the Java Examples
--
This message was sent by Atlassian JIRA
(v6.3.4#6332)