[ 
https://issues.apache.org/jira/browse/FLINK-2901?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=14998400#comment-14998400
 ] 

ASF GitHub Bot commented on FLINK-2901:
---------------------------------------

Github user zentol commented on a diff in the pull request:

    https://github.com/apache/flink/pull/1306#discussion_r44392775
  
    --- Diff: 
flink-tests/src/test/java/org/apache/flink/test/iterative/IterationTerminationWithTwoTails.java
 ---
    @@ -1,134 +0,0 @@
    -/*
    - * Licensed to the Apache Software Foundation (ASF) under one
    - * or more contributor license agreements.  See the NOTICE file
    - * distributed with this work for additional information
    - * regarding copyright ownership.  The ASF licenses this file
    - * to you under the Apache License, Version 2.0 (the
    - * "License"); you may not use this file except in compliance
    - * with the License.  You may obtain a copy of the License at
    - *
    - *     http://www.apache.org/licenses/LICENSE-2.0
    - *
    - * Unless required by applicable law or agreed to in writing, software
    - * distributed under the License is distributed on an "AS IS" BASIS,
    - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    - * See the License for the specific language governing permissions and
    - * limitations under the License.
    - */
    -
    -package org.apache.flink.test.iterative;
    -
    -import java.io.Serializable;
    -import java.util.Iterator;
    -
    -import org.apache.flink.api.common.Plan;
    -import org.apache.flink.api.java.record.functions.MapFunction;
    -import org.apache.flink.api.java.record.functions.ReduceFunction;
    -import org.apache.flink.api.java.record.io.CsvOutputFormat;
    -import org.apache.flink.api.java.record.io.TextInputFormat;
    -import org.apache.flink.api.java.record.operators.BulkIteration;
    -import org.apache.flink.api.java.record.operators.FileDataSink;
    -import org.apache.flink.api.java.record.operators.FileDataSource;
    -import org.apache.flink.api.java.record.operators.MapOperator;
    -import org.apache.flink.api.java.record.operators.ReduceOperator;
    -import org.apache.flink.test.util.RecordAPITestBase;
    -import org.apache.flink.types.Record;
    -import org.apache.flink.types.StringValue;
    -import org.apache.flink.util.Collector;
    -import org.junit.Assert;
    -
    -@SuppressWarnings("deprecation")
    -public class IterationTerminationWithTwoTails extends RecordAPITestBase {
    -
    -   private static final String INPUT = "1\n" + "2\n" + "3\n" + "4\n" + 
"5\n";
    -   private static final String EXPECTED = "22\n";
    -
    -   protected String dataPath;
    -   protected String resultPath;
    -
    -   public IterationTerminationWithTwoTails(){
    -           setTaskManagerNumSlots(parallelism);
    -   }
    -
    -   @Override
    -   protected void preSubmit() throws Exception {
    -           dataPath = createTempFile("datapoints.txt", INPUT);
    -           resultPath = getTempFilePath("result");
    -   }
    -   
    -   @Override
    -   protected void postSubmit() throws Exception {
    -           compareResultsByLinesInMemory(EXPECTED, resultPath);
    -   }
    -
    -   @Override
    -   protected Plan getTestJob() {
    -           return getTestPlanPlan(parallelism, dataPath, resultPath);
    -   }
    -   
    -   private static Plan getTestPlanPlan(int numSubTasks, String input, 
String output) {
    -
    -           FileDataSource initialInput = new 
FileDataSource(TextInputFormat.class, input, "input");
    -           
    -           BulkIteration iteration = new BulkIteration("Loop");
    -           iteration.setInput(initialInput);
    -           iteration.setMaximumNumberOfIterations(5);
    -           Assert.assertTrue(iteration.getMaximumNumberOfIterations() > 1);
    -
    -           ReduceOperator sumReduce = ReduceOperator.builder(new 
SumReducer())
    -                           .input(iteration.getPartialSolution())
    -                           .name("Compute sum (Reduce)")
    -                           .build();
    -           
    -           iteration.setNextPartialSolution(sumReduce);
    -           
    -           MapOperator terminationMapper = MapOperator.builder(new 
TerminationMapper())
    -                           .input(iteration.getPartialSolution())
    --- End diff --
    
    so THAT was the difference, will port it!


> Several flink-test ITCases depend on Record API features
> --------------------------------------------------------
>
>                 Key: FLINK-2901
>                 URL: https://issues.apache.org/jira/browse/FLINK-2901
>             Project: Flink
>          Issue Type: Sub-task
>          Components: Tests
>    Affects Versions: 0.10
>            Reporter: Fabian Hueske
>            Assignee: Chesnay Schepler
>
> There are several ITCases and utility classes in {{flink-tests}} that depend 
> on the Record API including:
> - ITCases for Record API operators in 
> {{flink-tests/src/test/java/org/apache/flink/test/operators}}
> - ITCases for Record API programs in 
> {{flink-tests/src/test/java/org/apache/flink/test/recordJobTests}}
> - Record API programs in 
> {{flink-tests/src/test/java/org/apache/flink/test/recordJobs}}
> - Several ITCases for iterations in 
> {{flink-tests/src/test/java/org/apache/flink/test/iterative}}
> - Tests for job canceling in 
> {{flink-tests/src/test/java/org/apache/flink/test/cancelling}}
> - Test for failing jobs in 
> {{flink-tests/src/test/java/org/apache/flink/test/failingPrograms/TaskFailureITCase}}
> - Optimizer tests in 
> {{flink-tests/src/test/java/org/apache/flink/test/optimizer}}
> - Accumulator test in 
> {{flink-tests/src/test/java/org/apache/flink/test/accumulators/AccumulatorIterativeITCase}}
> - Broadcast test in 
> {{flink-tests/src/test/java/org/apache/flink/test/broadcastvasr/BroadcastBranchingITCase}}
> - distributed cache test in 
> {{flink-tests/src/test/java/org/apache/flink/test/distributedCache/DistributedCacheTest}}
> and probably a few more.



--
This message was sent by Atlassian JIRA
(v6.3.4#6332)

Reply via email to