Skip to content

Commit

Permalink
[Addon kubevela#603] Add Apache Spark as a experimental addon
Browse files Browse the repository at this point in the history
Signed-off-by: yanghua <[email protected]>
  • Loading branch information
yanghua committed Feb 27, 2023
1 parent 9b1db74 commit 91dcf66
Showing 1 changed file with 13 additions and 2 deletions.
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
"spark-cluster": {
"spark-application": {
annotations: {}
attributes: workload: type: "autodetects.core.oam.dev"
description: "Spark Component."
description: "Describes a containerized spark application that can specify resource spec."
labels: {}
type: "component"
}
Expand Down Expand Up @@ -32,6 +32,17 @@ template: {
driverCores: int
// +usage=Specify the number of CPU cores to request for the executor pod
executorCores: int
// +usage=Specify a list of arguments to be passed to the application
arguments ?: [...string]
// +usage=Specify the config information carries user-specified Spark configuration properties as they would use the "--conf" option in spark-submit
sparkConf ?: [string]: string
// +usage=Specify the config information carries user-specified Hadoop configuration properties as they would use the the "--conf" option in spark-submit. The SparkApplication controller automatically adds prefix "spark.hadoop." to Hadoop configuration properties
hadoopConf ?: [string]: string
// +usage=Specify the name of the ConfigMap containing Spark configuration files such as log4j.properties. The controller will add environment variable SPARK_CONF_DIR to the path where the ConfigMap is mounted to
sparkConfigMap ?: string
// +usage=Specify the name of the ConfigMap containing Hadoop configuration files such as core-site.xml. The controller will add environment variable HADOOP_CONF_DIR to the path where the ConfigMap is mounted to
hadoopConfigMap ?: string

}

output: {
Expand Down

0 comments on commit 91dcf66

Please sign in to comment.