dongjoon-hyun commented on code in PR #12:
URL: 
https://github.com/apache/spark-kubernetes-operator/pull/12#discussion_r1677222344


##########
spark-operator/src/main/java/org/apache/spark/k8s/operator/config/SparkOperatorConf.java:
##########
@@ -0,0 +1,429 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.spark.k8s.operator.config;
+
+import java.time.Duration;
+
+import io.javaoperatorsdk.operator.api.config.LeaderElectionConfiguration;
+import io.javaoperatorsdk.operator.processing.event.rate.LinearRateLimiter;
+import io.javaoperatorsdk.operator.processing.event.rate.RateLimiter;
+import io.javaoperatorsdk.operator.processing.retry.GenericRetry;
+import lombok.extern.slf4j.Slf4j;
+
+import org.apache.spark.k8s.operator.reconciler.SparkReconcilerUtils;
+
+/** Spark Operator Configuration options. */
+@Slf4j
+public class SparkOperatorConf {
+  public static final ConfigOption<String> OPERATOR_APP_NAME =
+      ConfigOption.<String>builder()
+          .key("spark.kubernetes.operator.name")
+          .typeParameterClass(String.class)
+          .description("Name of the operator.")
+          .defaultValue("spark-kubernetes-operator")
+          .enableDynamicOverride(false)
+          .build();
+
+  public static final ConfigOption<String> OPERATOR_NAMESPACE =
+      ConfigOption.<String>builder()
+          .key("spark.kubernetes.operator.namespace")
+          .typeParameterClass(String.class)
+          .description("Namespace that operator is deployed within.")
+          .defaultValue("default")
+          .enableDynamicOverride(false)
+          .build();
+
+  public static final ConfigOption<String> OPERATOR_WATCHED_NAMESPACES =
+      ConfigOption.<String>builder()
+          .key("spark.kubernetes.operator.watchedNamespaces")
+          .description(
+              "Comma-separated list of namespaces that the operator would be "
+                  + "watching for Spark resources. If unset, operator would "
+                  + "watch all namespaces by default.")
+          .defaultValue(null)
+          .typeParameterClass(String.class)
+          .build();
+
+  public static final ConfigOption<Boolean> TERMINATE_ON_INFORMER_FAILURE =
+      ConfigOption.<Boolean>builder()
+          .key("spark.kubernetes.operator.terminateOnInformerFailure")
+          .typeParameterClass(Boolean.class)
+          .description(
+              "Enable to indicate informer errors should stop operator 
startup. If "
+                  + "disabled, operator startup will ignore recoverable 
errors, "
+                  + "caused for example by RBAC issues and will retry "
+                  + "periodically.")
+          .defaultValue(false)
+          .enableDynamicOverride(false)
+          .build();
+
+  public static final ConfigOption<Integer> 
RECONCILER_TERMINATION_TIMEOUT_SECONDS =
+      ConfigOption.<Integer>builder()
+          
.key("spark.kubernetes.operator.reconciler.terminationTimeoutSeconds")
+          .description(
+              "Grace period for operator shutdown before reconciliation 
threads are killed.")
+          .enableDynamicOverride(false)
+          .typeParameterClass(Integer.class)
+          .defaultValue(30)
+          .build();
+
+  public static final ConfigOption<Integer> RECONCILER_PARALLELISM =
+      ConfigOption.<Integer>builder()
+          .key("spark.kubernetes.operator.reconciler.parallelism")
+          .description(
+              "Thread pool size for Spark Operator reconcilers. Use -1 for 
unbounded pool.")
+          .enableDynamicOverride(false)
+          .typeParameterClass(Integer.class)
+          .defaultValue(30)
+          .build();
+
+  public static final ConfigOption<Long> 
RECONCILER_FOREGROUND_REQUEST_TIMEOUT_SECONDS =
+      ConfigOption.<Long>builder()
+          
.key("spark.kubernetes.operator.reconciler.foregroundRequestTimeoutSeconds")
+          .description(
+              "Timeout (in seconds) to for requests made to API server. this "
+                  + "applies only to foreground requests.")
+          .defaultValue(120L)
+          .typeParameterClass(Long.class)
+          .build();
+
+  public static final ConfigOption<Long> SPARK_APP_RECONCILE_INTERVAL_SECONDS =
+      ConfigOption.<Long>builder()
+          .key("spark.kubernetes.operator.reconciler.intervalSeconds")
+          .description(
+              "Interval (in seconds) to reconcile when application is is 
starting "
+                  + "up. Note that reconcile is always expected to be 
triggered "
+                  + "per update - this interval controls the reconcile 
behavior "
+                  + "when operator still need to reconcile even when there's 
no "
+                  + "update ,e.g. for timeout checks.")
+          .defaultValue(120L)
+          .typeParameterClass(Long.class)
+          .build();
+
+  public static final ConfigOption<Boolean> 
TRIM_ATTEMPT_STATE_TRANSITION_HISTORY =
+      ConfigOption.<Boolean>builder()
+          
.key("spark.kubernetes.operator.reconciler.trimStateTransitionHistoryEnabled")

Review Comment:
   Where can we control the number of retrained transition history?



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to