phet commented on code in PR #3776: URL: https://github.com/apache/gobblin/pull/3776#discussion_r1326690781
########## gobblin-service/src/main/java/org/apache/gobblin/service/modules/orchestration/DagTaskStream.java: ########## @@ -0,0 +1,275 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.gobblin.service.modules.orchestration; + +import java.io.IOException; +import java.net.URI; +import java.net.URISyntaxException; +import java.util.Iterator; +import java.util.Map; +import java.util.concurrent.BlockingDeque; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.LinkedBlockingDeque; +import java.util.concurrent.TimeUnit; + +import com.codahale.metrics.Timer; +import com.google.common.base.Optional; +import com.typesafe.config.Config; +import com.typesafe.config.ConfigException; + +import lombok.Getter; +import lombok.extern.slf4j.Slf4j; + +import org.apache.gobblin.config.ConfigBuilder; +import org.apache.gobblin.configuration.ConfigurationKeys; +import org.apache.gobblin.instrumented.Instrumented; +import org.apache.gobblin.metrics.event.TimingEvent; +import org.apache.gobblin.runtime.api.DagActionStore; +import org.apache.gobblin.runtime.api.FlowSpec; +import org.apache.gobblin.runtime.api.SpecNotFoundException; +import org.apache.gobblin.runtime.spec_catalog.FlowCatalog; +import org.apache.gobblin.service.ExecutionStatus; +import org.apache.gobblin.service.FlowId; +import org.apache.gobblin.service.modules.flowgraph.Dag; +import org.apache.gobblin.service.modules.spec.JobExecutionPlan; +import org.apache.gobblin.service.modules.utils.FlowCompilationValidationHelper; +import org.apache.gobblin.service.monitoring.JobStatus; +import org.apache.gobblin.service.monitoring.JobStatusRetriever; + +import static org.apache.gobblin.service.ExecutionStatus.ORCHESTRATED; +import static org.apache.gobblin.service.ExecutionStatus.valueOf; + + +/** + * Holds a stream of {@link DagTask} that needs to be processed by the {@link DagManager}. + * It provides an implementation for {@link DagManagement} defines the rules for a flow and job. + * Implements {@link Iterator} to provide the next {@link DagTask} if available to {@link DagManager} + */ +@WorkInProgress +@Slf4j +public class DagTaskStream implements Iterator<Optional<DagTask>>, DagManagement { + @Getter + private final BlockingDeque<DagTask> taskStream = new LinkedBlockingDeque<>(); + private JobStatusRetriever jobStatusRetriever; + private Optional<Timer> jobStatusPolledTimer; + + private DagManagerMetrics dagManagerMetrics; + + private DagManagementStateStore dagManagementStateStore; + + private Long defaultJobStartSlaTimeMillis; + private FlowTriggerHandler flowTriggerHandler; + private Optional<DagActionStore> dagActionStore; + private DagStateStore failedDagStateStore; + private FlowCompilationValidationHelper flowCompilationValidationHelper; + private FlowCatalog flowCatalog = new FlowCatalog(ConfigBuilder.create().build()); + + //TODO: add ctor for instantiating the attributes (will be handled in the subsequent PR) + + @Override + public boolean hasNext() { + return !taskStream.isEmpty(); + } + + @Override + public Optional<DagTask> next() { + + DagTask dagTask = taskStream.peek(); + + try { + if(flowTriggerHandler.attemptDagTaskLeaseAcquisition(dagTask)) { + return Optional.of(taskStream.poll()); + } + } catch (IOException e) { + throw new RuntimeException(e); + } + return Optional.absent(); + } + + @Override + public void launchFlow(LaunchDagTask launchDagTask) { + long triggerTimeStamp = System.currentTimeMillis(); + FlowId flowId = new FlowId().setFlowGroup(launchDagTask.flowGroup).setFlowName(launchDagTask.flowName); + try { + URI flowUri = FlowSpec.Utils.createFlowSpecUri(flowId); + FlowSpec spec = (FlowSpec) flowCatalog.getSpecs(flowUri); + Optional<Dag<JobExecutionPlan>> optionalJobExecutionPlanDag = + this.flowCompilationValidationHelper.createExecutionPlanIfValid(spec); + launchDagTask.initialize(optionalJobExecutionPlanDag.get().getNodes(), triggerTimeStamp); + this.taskStream.offer(launchDagTask); + } catch (URISyntaxException e) { + log.warn("Could not create URI object for flowId {} due to exception {}", flowId, e.getMessage()); + } catch (SpecNotFoundException e) { + log.warn("Spec not found for flowId {} due to exception {}", flowId, e.getMessage()); + } catch (IOException e) { + log.warn("Failed to add Job Execution Plan for flowId {} OR delete dag action from dagActionStore (check " + + "stacktrace) due to exception {}", flowId, e.getMessage()); + } catch (InterruptedException e) { + log.warn("SpecCompiler failed to reach healthy state before compilation of flowId {}. Exception: ", flowId, e); + } + } + + @Override + public void resumeFlow(ResumeDagTask resumeDagTask) throws IOException { + + long triggerTimeStamp = System.currentTimeMillis(); + String dagId = resumeDagTask.resumeDagId.toString(); + Dag<JobExecutionPlan> dag = this.failedDagStateStore.getDag(dagId); + if (dag == null) { + log.error("Dag " + dagId + " was found in memory but not found in failed dag state store"); + return; + } + resumeDagTask.initialize(dag.getNodes(), triggerTimeStamp); + this.taskStream.offer(resumeDagTask); + + } + + @Override + public void killFlow(KillDagTask killDagTask) { + long triggerTimeStamp = System.currentTimeMillis(); + Map<String, Dag<JobExecutionPlan>> dags = this.dagManagementStateStore.getDags(); + String killDagId = killDagTask.killDagId.toString(); + if(!dags.containsKey(killDagId)) { + log.info("Invalid dag since not present in map. Hence cannot cancel it"); + return; + } + Dag<JobExecutionPlan> killDag = dags.get(killDagId); + killDagTask.initialize(killDag.getNodes(), triggerTimeStamp); + this.taskStream.offer(killDagTask); + + } + /** + * Check if the SLA is configured for the flow this job belongs to. + * If it is, this method will try to cancel the job when SLA is reached. + * + * @param node dag node of the job + * @return true if the job is killed because it reached sla + * @throws ExecutionException exception + * @throws InterruptedException exception + */ + @Override + public boolean enforceFlowCompletionDeadline(Dag.DagNode<JobExecutionPlan> node) throws ExecutionException, InterruptedException { + long flowStartTime = DagManagerUtils.getFlowStartTime(node); + long currentTime = System.currentTimeMillis(); + String dagId = DagManagerUtils.generateDagId(node).toString(); + + long flowSla; + if (this.dagManagementStateStore.getDagToSLA().containsKey(dagId)) { + flowSla = this.dagManagementStateStore.getDagToSLA().get(dagId); + } else { + try { + flowSla = DagManagerUtils.getFlowSLA(node); Review Comment: this is way too much processing/impl. a task is meant to be a light-weight indicator of something to be done. the corresponding dag proc then encapsulates how to actually do that work, by talking to the relevant state stores etc. at this early stage, the `DagTask` not only hasn't been turned into a `DagProc`, but it hasn't even been pulled from the task stream. this code, comprising "how" to enforce the deadline belongs inside a `DagProc`. ditto for many of the other methods of this class -- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. To unsubscribe, e-mail: [email protected] For queries about this service, please contact Infrastructure at: [email protected]
