diff --git a/datasophon-api/src/main/resources/meta/DDP-1.2.2/HDFS/service_ddl.json b/datasophon-api/src/main/resources/meta/DDP-1.2.2/HDFS/service_ddl.json index 0aa29cfe..3cc08f15 100644 --- a/datasophon-api/src/main/resources/meta/DDP-1.2.2/HDFS/service_ddl.json +++ b/datasophon-api/src/main/resources/meta/DDP-1.2.2/HDFS/service_ddl.json @@ -390,6 +390,24 @@ "hadoopHome", "rangerAdminUrl" ] + }, + { + "filename": "whitelist", + "configFormat": "custom", + "outputDirectory": "etc/hadoop/", + "templateName": "properties3.ftl", + "includeParams": [ + + ] + }, + { + "filename": "blacklist", + "configFormat": "custom", + "outputDirectory": "etc/hadoop/", + "templateName": "properties3.ftl", + "includeParams": [ + + ] } ] }, diff --git a/datasophon-api/src/main/resources/meta/DDP-1.2.2/KYUUBI/service_ddl.json b/datasophon-api/src/main/resources/meta/DDP-1.2.2/KYUUBI/service_ddl.json index 825e325f..ec0eb682 100644 --- a/datasophon-api/src/main/resources/meta/DDP-1.2.2/KYUUBI/service_ddl.json +++ b/datasophon-api/src/main/resources/meta/DDP-1.2.2/KYUUBI/service_ddl.json @@ -2,11 +2,11 @@ "name": "KYUUBI", "label": "Kyuubi", "description": "统一多租户JDBC网关", - "version": "1.7.3", + "version": "1.7.4", "sortNum": 30, "dependencies":[], - "packageName": "kyuubi-1.7.3.tar.gz", - "decompressPackageName": "kyuubi-1.7.3", + "packageName": "apache-kyuubi-1.7.4-bin.tgz", + "decompressPackageName": "apache-kyuubi-1.7.4-bin", "roles": [ { "name": "KyuubiServer", @@ -19,6 +19,17 @@ "cardinality": "1+", "jmxPort": "10019", "logFile": "logs/kyuubi-server-${host}.out", + "resourceStrategies":[{ + "type": "append_line", + "source": "bin/kyuubi", + "line": 206, + "text": " exit 1" + },{ + "type": "append_line", + "source": "bin/kyuubi", + "line": 210, + "text": " exit 1" + }], "startRunner": { "timeout": "60", "program": "bin/kyuubi", @@ -67,12 +78,32 @@ "configFormat": "properties2", "outputDirectory": "conf", "includeParams": [ - "kyuubi.ha.zookeeper.namespace", - "kyuubi.ha.zookeeper.quorum", + "kyuubi.ha.addresses", + "kyuubi.ha.namespace", "kyuubi.session.idle.timeout", "kyuubi.session.engine.idle.timeout", "kyuubi.session.engine.initialize.timeout", "spark.master", + "spark.submit.deployMode", + "spark.driver.memory", + "spark.executor.memory", + "spark.executor.cores", + "spark.dynamicAllocation.enabled", + "spark.shuffle.service.enabled", + "spark.shuffle.service.port", + "spark.dynamicAllocation.initialExecutors", + "spark.dynamicAllocation.minExecutors", + "spark.dynamicAllocation.maxExecutors", + "spark.dynamicAllocation.executorAllocationRatio", + "spark.dynamicAllocation.executorIdleTimeout", + "spark.dynamicAllocation.cachedExecutorIdleTimeout", + "spark.dynamicAllocation.shuffleTracking.enabled", + "spark.dynamicAllocation.shuffleTracking.timeout", + "spark.dynamicAllocation.schedulerBacklogTimeout", + "spark.dynamicAllocation.sustainedSchedulerBacklogTimeout", + "spark.cleaner.periodicGC.interval", + "flink.execution.target", + "kyuubi.session.engine.flink.max.rows", "kyuubi.metrics.reporters", "kyuubi.metrics.prometheus.port", "kyuubi.session.engine.spark.showProgress", @@ -102,26 +133,26 @@ }, "parameters": [ { - "name": "kyuubi.ha.zookeeper.quorum", + "name": "kyuubi.ha.addresses", "label": "zookeeper服务信息", "description": "zookeeper服务信息", "required": true, "type": "input", - "value": "", + "value": "${zkUrls}", "configurableInWizard": true, "hidden": false, - "defaultValue": "" + "defaultValue": "${zkUrls}" }, { - "name": "kyuubi.ha.zookeeper.namespace", + "name": "kyuubi.ha.namespace", "label": "zookeeper目录", "description": "zookeeper目录", "required": true, "type": "input", - "value": "", + "value": "kyuubi", "configurableInWizard": true, "hidden": false, - "defaultValue": "" + "defaultValue": "kyuubi" }, { "name": "kyuubi.session.idle.timeout", @@ -156,6 +187,204 @@ "hidden": false, "defaultValue": "yarn" }, + { + "name": "spark.submit.deployMode", + "label": "配置spark部署模式", + "description": "配置spark部署模式", + "required": true, + "type": "input", + "value": "cluster", + "configurableInWizard": true, + "hidden": false, + "defaultValue": "cluster" + }, + { + "name": "spark.driver.memory", + "label": "配置spark-driver运行内存", + "description": "配置spark-driver运行内存", + "required": true, + "type": "input", + "value": "2g", + "configurableInWizard": true, + "hidden": false, + "defaultValue": "2g" + }, + { + "name": "spark.executor.memory", + "label": "配置spark-executor运行内存", + "description": "配置spark-executor运行内存", + "required": true, + "type": "input", + "value": "3g", + "configurableInWizard": true, + "hidden": false, + "defaultValue": "3g" + }, + { + "name": "spark.executor.cores", + "label": "配置spark-executor运行核数", + "description": "配置spark-executor运行核数", + "required": true, + "type": "input", + "value": "2", + "configurableInWizard": true, + "hidden": false, + "defaultValue": "2" + }, + { + "name": "spark.shuffle.service.enabled", + "label": "启用spark辅助shuffle服务", + "description": "启用spark辅助shuffle服务", + "required": true, + "type": "switch", + "value": true, + "configurableInWizard": true, + "hidden": false, + "defaultValue": true + }, + { + "name": "spark.shuffle.service.port", + "label": "spark辅助shuffle服务端口", + "description": "spark辅助shuffle服务端口", + "required": false, + "type": "input", + "value": "7337", + "configurableInWizard": true, + "hidden": false, + "defaultValue": "7337" + }, + { + "name": "spark.dynamicAllocation.enabled", + "label": "启用spark动态资源分配", + "description": "启用spark动态资源分配", + "required": true, + "type": "switch", + "value": true, + "configurableInWizard": true, + "hidden": false, + "defaultValue": true + }, + { + "name": "spark.dynamicAllocation.initialExecutors", + "label": "初始executor数量", + "description": "初始executor数量", + "required": false, + "type": "input", + "value": "5", + "configurableInWizard": true, + "hidden": false, + "defaultValue": "5" + }, + { + "name": "spark.dynamicAllocation.minExecutors", + "label": "executor数量下限", + "description": "executor数量下限", + "required": false, + "type": "input", + "value": "5", + "configurableInWizard": true, + "hidden": false, + "defaultValue": "5" + }, + { + "name": "spark.dynamicAllocation.maxExecutors", + "label": "executor数量下限", + "description": "executor数量上限", + "required": false, + "type": "input", + "value": "200", + "configurableInWizard": true, + "hidden": false, + "defaultValue": "200" + }, + { + "name": "spark.dynamicAllocation.executorIdleTimeout", + "label": "executor空闲时间", + "description": "executor空闲时间", + "required": false, + "type": "input", + "value": "60s", + "configurableInWizard": true, + "hidden": false, + "defaultValue": "60s" + }, + { + "name": "spark.dynamicAllocation.cachedExecutorIdleTimeout", + "label": "executor缓存空闲时间", + "description": "executor空闲时间", + "required": false, + "type": "input", + "value": "30min", + "configurableInWizard": true, + "hidden": false, + "defaultValue": "30min" + }, + { + "name": "spark.dynamicAllocation.schedulerBacklogTimeout", + "label": "当task到来时,开始分配executor的时间间隔", + "description": "当task到来时,开始分配executor的时间间隔", + "required": false, + "type": "input", + "value": "1s", + "configurableInWizard": true, + "hidden": false, + "defaultValue": "1s" + }, + { + "name": "spark.dynamicAllocation.sustainedSchedulerBacklogTimeout", + "label": "分配executor后,再次申请executor的时间间隔", + "description": "分配executor后,再次申请executor的时间间隔", + "required": false, + "type": "input", + "value": "1s", + "configurableInWizard": true, + "hidden": false, + "defaultValue": "1s" + }, + { + "name": "spark.dynamicAllocation.shuffleTracking.enabled", + "label": "启用作业的 Shuffle 动态分配跟踪", + "description": "启用作业的 Shuffle 动态分配跟踪", + "required": true, + "type": "switch", + "value": false, + "configurableInWizard": true, + "hidden": false, + "defaultValue": false + }, + { + "name": "spark.dynamicAllocation.shuffleTracking.timeout", + "label": "Shuffle 动态分配跟踪的超时时间", + "description": "Shuffle 动态分配跟踪的超时时间", + "required": false, + "type": "input", + "value": "30min", + "configurableInWizard": true, + "hidden": false, + "defaultValue": "30min" + }, + { + "name": "flink.execution.target", + "label": "Flink部署模式", + "description": "Flink部署模式", + "required": true, + "type": "input", + "value": "yarn-session", + "configurableInWizard": true, + "hidden": false, + "defaultValue": "yarn-session" + }, + { + "name": "kyuubi.session.engine.flink.max.rows", + "label": "Flink查询结果最大行数", + "description": "Flink查询结果最大行数", + "required": true, + "type": "input", + "value": "5", + "configurableInWizard": true, + "hidden": false, + "defaultValue": "5" + }, { "name": "kyuubi.metrics.reporters", "label": "监控输出格式", @@ -223,30 +452,6 @@ "hidden": false, "defaultValue": "/usr/local/jdk1.8.0_333" }, - { - "name": "sparkHome", - "label": "spark安装目录", - "description": "spark安装目录", - "configType": "map", - "required": true, - "type": "input", - "value": "/opt/datasophon/spark-3.1.3/", - "configurableInWizard": true, - "hidden": false, - "defaultValue": "/opt/datasophon/spark-3.1.3/" - }, - { - "name": "hadoopConfDir", - "label": "hadoop配置目录", - "description": "hadoop配置目录", - "configType": "map", - "required": true, - "type": "input", - "value": "/opt/datasophon/hadoop/etc/hadoop", - "configurableInWizard": true, - "hidden": false, - "defaultValue": "/opt/datasophon/hadoop/etc/hadoop" - }, { "name": "kyuubiServerHeapSize", "label": "KyuubiServerjvm内存", diff --git a/datasophon-api/src/main/resources/meta/DDP-1.2.2/SEATUNNEL/service_ddl.json b/datasophon-api/src/main/resources/meta/DDP-1.2.2/SEATUNNEL/service_ddl.json index 761ae59e..62bc3237 100644 --- a/datasophon-api/src/main/resources/meta/DDP-1.2.2/SEATUNNEL/service_ddl.json +++ b/datasophon-api/src/main/resources/meta/DDP-1.2.2/SEATUNNEL/service_ddl.json @@ -18,7 +18,7 @@ "type": "replace", "source": "config/seatunnel-env.sh", "regex":"\/opt\/spark", - "replacement": "/opt/datasophon/spark" + "replacement": "/opt/datasophon/spark3" },{ "type": "replace", "source": "config/seatunnel-env.sh", diff --git a/datasophon-worker/src/main/resources/script/datasophon-env.sh b/datasophon-worker/src/main/resources/script/datasophon-env.sh index 7c6c421a..723e8def 100644 --- a/datasophon-worker/src/main/resources/script/datasophon-env.sh +++ b/datasophon-worker/src/main/resources/script/datasophon-env.sh @@ -3,7 +3,7 @@ CLASSPATH=.:$JAVA_HOME/lib/dt.jar:$JAVA_HOME/lib/tools.jar export JAVA_HOME CLASSPATH export KYUUBI_HOME=/opt/datasophon/kyuubi -export SPARK_HOME=/opt/datasophon/spark +export SPARK_HOME=/opt/datasophon/spark3 export PYSPARK_ALLOW_INSECURE_GATEWAY=1 export HIVE_HOME=/opt/datasophon/hive export KAFKA_HOME=/opt/datasophon/kafka diff --git a/datasophon-worker/src/main/resources/templates/kyuubi-env.ftl b/datasophon-worker/src/main/resources/templates/kyuubi-env.ftl index 670a9e1e..16da6555 100644 --- a/datasophon-worker/src/main/resources/templates/kyuubi-env.ftl +++ b/datasophon-worker/src/main/resources/templates/kyuubi-env.ftl @@ -61,11 +61,12 @@ export KYUUBI_BEELINE_OPTS="-Xmx${kyuubiClientHeapSize}g -XX:+UnlockDiagnosticVM #jdk export JAVA_HOME=${javaHome} #spark engine -export SPARK_HOME=${sparkHome} +export SPARK_HOME=/opt/datasophon/spark3/ +export FLINK_HOME=/opt/datasophon/flink/ #hadoop config -export HADOOP_CONF_DIR=${hadoopConfDir} -export YARN_CONF_DIR=${hadoopConfDir} +export HADOOP_CONF_DIR=/opt/datasophon/hadoop/etc/hadoop +export YARN_CONF_DIR=/opt/datasophon/hadoop/etc/hadoop # customer env <#list itemList as item>