mxm closed pull request #7276: [FLINK-10566] Fix exponential planning time of 
large programs
URL: https://github.com/apache/flink/pull/7276
 
 
   

This is a PR merged from a forked repository.
As GitHub hides the original diff on merge, it is displayed below for
the sake of provenance:

As this is a foreign pull request (from a fork), the diff is supplied
below (as it won't show otherwise due to GitHub magic):

diff --git a/flink-core/src/main/java/org/apache/flink/api/common/Plan.java 
b/flink-core/src/main/java/org/apache/flink/api/common/Plan.java
index efbc4fac039..32eed69e2ce 100644
--- a/flink-core/src/main/java/org/apache/flink/api/common/Plan.java
+++ b/flink-core/src/main/java/org/apache/flink/api/common/Plan.java
@@ -23,6 +23,7 @@
 import java.util.Collection;
 import java.util.Collections;
 import java.util.HashMap;
+import java.util.HashSet;
 import java.util.List;
 import java.util.Map.Entry;
 import java.util.Set;
@@ -361,10 +362,14 @@ public int getMaximumParallelism() {
        
        private static final class MaxDopVisitor implements 
Visitor<Operator<?>> {
 
+               private final Set<Operator> visitedOperators = new HashSet<>();
                private int maxDop = -1;
-               
+
                @Override
                public boolean preVisit(Operator<?> visitable) {
+                       if (!visitedOperators.add(visitable)) {
+                               return false;
+                       }
                        this.maxDop = Math.max(this.maxDop, 
visitable.getParallelism());
                        return true;
                }
diff --git 
a/flink-java/src/main/java/org/apache/flink/api/java/ExecutionEnvironment.java 
b/flink-java/src/main/java/org/apache/flink/api/java/ExecutionEnvironment.java
index 22a2a93f066..beb1b65c4a5 100644
--- 
a/flink-java/src/main/java/org/apache/flink/api/java/ExecutionEnvironment.java
+++ 
b/flink-java/src/main/java/org/apache/flink/api/java/ExecutionEnvironment.java
@@ -73,6 +73,7 @@
 import java.util.HashSet;
 import java.util.Iterator;
 import java.util.List;
+import java.util.Set;
 
 import static org.apache.flink.util.Preconditions.checkNotNull;
 
@@ -963,12 +964,16 @@ public Plan createProgramPlan(String jobName, boolean 
clearSinks) {
                if (!config.isAutoTypeRegistrationDisabled()) {
                        plan.accept(new 
Visitor<org.apache.flink.api.common.operators.Operator<?>>() {
 
-                               private final HashSet<Class<?>> deduplicator = 
new HashSet<>();
+                               private final Set<Class<?>> registeredTypes = 
new HashSet<>();
+                               private final 
Set<org.apache.flink.api.common.operators.Operator<?>> visitedOperators = new 
HashSet<>();
 
                                @Override
                                public boolean 
preVisit(org.apache.flink.api.common.operators.Operator<?> visitable) {
+                                       if (!visitedOperators.add(visitable)) {
+                                               return false;
+                                       }
                                        OperatorInformation<?> opInfo = 
visitable.getOperatorInfo();
-                                       
Serializers.recursivelyRegisterType(opInfo.getOutputType(), config, 
deduplicator);
+                                       
Serializers.recursivelyRegisterType(opInfo.getOutputType(), config, 
registeredTypes);
                                        return true;
                                }
 
diff --git 
a/flink-tests/src/test/java/org/apache/flink/test/planning/LargePlanTest.java 
b/flink-tests/src/test/java/org/apache/flink/test/planning/LargePlanTest.java
new file mode 100644
index 00000000000..6c30af88ad4
--- /dev/null
+++ 
b/flink-tests/src/test/java/org/apache/flink/test/planning/LargePlanTest.java
@@ -0,0 +1,81 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.test.planning;
+
+import org.apache.flink.api.common.functions.RichMapFunction;
+import org.apache.flink.api.common.typeinfo.Types;
+import org.apache.flink.api.java.DataSet;
+import org.apache.flink.api.java.ExecutionEnvironment;
+import org.apache.flink.api.java.io.DiscardingOutputFormat;
+import org.apache.flink.api.java.tuple.Tuple2;
+import org.apache.flink.client.program.OptimizerPlanEnvironment;
+import org.apache.flink.client.program.PreviewPlanEnvironment;
+
+import org.junit.Test;
+
+/**
+ * Tests that large programs can be compiled to a Plan in reasonable amount of 
time.
+ */
+public class LargePlanTest {
+
+       @Test(expected = OptimizerPlanEnvironment.ProgramAbortException.class, 
timeout = 15_000)
+       public void testPlanningOfLargePlan() throws Exception {
+               runProgram(new PreviewPlanEnvironment(), 10, 50);
+       }
+
+       private static void runProgram(ExecutionEnvironment env, int depth, int 
width) throws Exception {
+               DataSet<String> input = env.fromElements("a", "b", "c");
+               DataSet<String> stats = null;
+
+               for (int i = 0; i < depth; i++) {
+                       stats = analyze(input, stats, width / (i + 1) + 1);
+               }
+
+               stats.output(new DiscardingOutputFormat<>());
+               env.execute("depth " + depth + " width " + width);
+       }
+
+       private static DataSet<String> analyze(DataSet<String> input, 
DataSet<String> stats, int branches) {
+               for (int i = 0; i < branches; i++) {
+                       final int ii = i;
+
+                       if (stats != null) {
+                               input = input.map(
+                                       new RichMapFunction<String, String>() {
+                                               @Override
+                                               public String map(String value) 
{
+                                                       return value;
+                                               }
+                               }).withBroadcastSet(stats.map(s -> "(" + s + 
").map"), "stats");
+                       }
+
+                       DataSet<String> branch = input
+                               .map(s -> new Tuple2<>(0, s + 
ii)).returns(Types.TUPLE(Types.STRING, Types.INT))
+                               .groupBy(0)
+                               .minBy(1)
+                               .map(kv -> kv.f1).returns(Types.STRING);
+                       if (stats == null) {
+                               stats = branch;
+                       } else {
+                               stats = stats.union(branch);
+                       }
+               }
+               return stats.map(s -> "(" + s + ").stats");
+       }
+}


 

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


With regards,
Apache Git Services

Reply via email to