mesosphere / universe

The Mesosphere Universe package repository.
http://mesosphere.github.io/universe
Apache License 2.0
304 stars 426 forks source link

Release beta-spark 2.12.0-3.0.1-beta (automated commit) #2595

Closed mesosphere-ci closed 3 years ago

mesosphere-ci commented 3 years ago

Release beta-spark 2.12.0-3.0.1-beta (automated commit)

Description: Source URL: https://infinity-artifacts.s3.amazonaws.com/permanent/spark/2.12.0-3.0.1-beta/stub-universe-spark.json

Changes between revisions 2 => 3: 0 files added: [] 0 files removed: [] 4 files changed:

--- 2/config.json
+++ 3/config.json
@@ -7,8 +7,9 @@
       "properties": {
         "name": {
           "default": "spark",
-          "description": "The Spark Dispatcher will register with Mesos with this as a framework name.  This service will be available at http://<dcos_url>/service/<name>/",
-          "type": "string"
+          "description": "The Spark Dispatcher will register with Mesos with this as a framework name.  This service will be available at https://<dcos-url>/service/<name>/",
+          "type": "string",
+          "pattern": "^(\\/?((\\.\\.)|(([a-z0-9]|[a-z0-9][a-z0-9\\-]*[a-z0-9])\\.)*([a-z0-9]|[a-z0-9][a-z0-9\\-]*[a-z0-9]))?($|\\/))+$"
         },
         "cpus": {
           "default": 1,
@@ -23,48 +24,110 @@
           "type": "number"
         },
         "role": {
-          "description": "The Spark Dispatcher will register with Mesos with this role.",
+          "description": "The Spark Dispatcher will register with Mesos with this role. In case Marathon group role is enforced, this setting defaults to the group role.",
           "type": "string",
           "default": "*"
+        },
+        "enforce_role": {
+          "description": "When enabled, Spark Dispatcher will reject all submissions which attempt to override the role the Dispatcher is using itself. In case Marathon group role is enforced, this setting defaults to 'true'.",
+          "type": "boolean",
+          "default": false
         },
         "service_account": {
           "description": "The Spark Dispatcher will register with Mesos with this principal.",
           "type": "string",
-          "default": ""
+          "default": "",
+          "media": {
+            "type": "application/x-service-account+string"
+          }
         },
         "service_account_secret": {
           "description": "The Spark Dispatcher will register with mesos with this secret.",
           "type": "string",
-          "default": ""
+          "default": "",
+          "media": {
+            "type": "application/x-secret+string"
+          }
+        },
+        "constraints": {
+          "type": "array",
+          "items": {
+            "type": "array",
+            "items": [
+              {
+                "type": "string"
+              }
+            ]
+          },
+          "description": "Placement constraint for the Spark Dispatcher",
+          "default": []
         },
         "user": {
           "description": "Executors will run as this user.",
           "type": "string",
-          "default": "root"
+          "default": "nobody"
         },
         "docker-image": {
           "type": "string",
-          "description": "The docker image used to run the dispatcher, drivers, and executors.  If, for example, you need a Spark built with a specific Hadoop version, set this variable to one of the images here: https://hub.docker.com/r/mesosphere/spark/tags/",
-          "default": "mesosphere/spark:2.3.1-2.2.1-2-beta-hadoop-2.6"
+          "description": "The docker image used to run the dispatcher, drivers, and executors. If no image is specified mesosphere/spark:2.12.0-3.0.1-scala-2.12-hadoop-3.2 will be used. If, for example, you need a Spark built with a specific Hadoop version, set this variable to one of the images here: https://hub.docker.com/r/mesosphere/spark/tags/",
+          "default": ""
         },
         "log-level": {
           "type": "string",
-          "description": "log4j log level for The Spark Dispatcher.  May be set to any valid log4j log level: https://logging.apache.org/log4j/1.2/apidocs/org/apache/log4j/Level.html",
-          "default": "INFO"
+          "description": "The log level for the Spark Dispatcher.",
+          "default": "INFO",
+          "enum": [
+            "OFF",
+            "FATAL",
+            "ERROR",
+            "WARN",
+            "INFO",
+            "DEBUG",
+            "TRACE",
+            "ALL"
+          ]
+        },
+        "virtual_network_enabled": {
+          "description": "Enable virtual networking",
+          "type": "boolean",
+          "default": false
+        },
+        "virtual_network_name": {
+          "description": "The name of the virtual network to join",
+          "type": "string",
+          "default": "dcos"
+        },
+        "virtual_network_plugin_labels": {
+          "type": "array",
+          "items": {
+            "type": "object",
+            "properties": {
+              "key": {
+                "type": "string",
+                "default": ""
+              },
+              "value": {
+                "type": "string",
+                "default": ""
+              }
+            }
+          },
+          "description": "Labels to pass to the virtual network plugin (e.g., [{\"key\": \"key_1\", \"value\": \"value_1\"}, {\"key\": \"key_2\", \"value\": \"value_2\"}])",
+          "default": []
         },
         "spark-history-server-url": {
           "type": "string",
-          "description": "URL of The Spark History Server (e.g. http://<dcos_url>/service/spark-history"
+          "description": "URL of The Spark History Server (e.g. https://<dcos-url>/service/spark-history )"
         },
         "UCR_containerizer": {
           "type": "boolean",
           "description": "Launch the Dispatcher using the Universal Container Runtime (UCR)",
-          "default": false
+          "default": true
         },
-        "use_bootstrap_for_IP_detect": {
-          "type": "boolean",
-          "description": "Use the bootstrap utility for detecting host IP as opposed to using Spark's internal mechanism, see troubleshooting.md.",
-          "default": false
+        "docker_user": {
+          "type": "string",
+          "description": "Specify the integer UID for the Linux user, overriding service.user, when running the Spark Driver and Executor containers with the Docker Engine. Directly translates to docker run '--user' parameter. Note: This should typically be set to 99 when running as nobody (default) on RHEL/CentOS.",
+          "default": ""
         }
       }
     },
--- 2/marathon.json.mustache
+++ 3/marathon.json.mustache
@@ -3,64 +3,89 @@
     "cpus": {{service.cpus}},
     "mem": {{service.mem}},
     "cmd": "/sbin/init.sh",
+    "constraints": {{service.constraints}},
+    {{#service.user}}
     "user": "{{service.user}}",
+    {{/service.user}}
+
     "env": {
-{{#security.kerberos.enabled}}
-        {{#security.kerberos.krb5conf}}
+        {{#security.kerberos.enabled}}
+            {{#security.kerberos.krb5conf}}
             "SPARK_MESOS_KRB5_CONF_BASE64": "{{security.kerberos.krb5conf}}",
-        {{/security.kerberos.krb5conf}}
-        {{#security.kerberos.kdc.hostname}}
+            {{/security.kerberos.krb5conf}}
+            {{#security.kerberos.kdc.hostname}}
             "SPARK_SECURITY_KERBEROS_KDC_HOSTNAME": "{{security.kerberos.kdc.hostname}}",
-        {{/security.kerberos.kdc.hostname}}
-        {{#security.kerberos.kdc.port}}
+            {{/security.kerberos.kdc.hostname}}
+            {{#security.kerberos.kdc.port}}
             "SPARK_SECURITY_KERBEROS_KDC_PORT": "{{security.kerberos.kdc.port}}",
-        {{/security.kerberos.kdc.port}}
-        {{#security.kerberos.realm}}
+            {{/security.kerberos.kdc.port}}
+            {{#security.kerberos.realm}}
             "SPARK_SECURITY_KERBEROS_REALM": "{{security.kerberos.realm}}",
-        {{/security.kerberos.realm}}
-{{/security.kerberos.enabled}}
-
-{{#service.use_bootstrap_for_IP_detect}}
-{{/service.use_bootstrap_for_IP_detect}}
-{{^service.use_bootstrap_for_IP_detect}}
-        "SKIP_BOOTSTRAP_IP_DETECT": "true",
-{{/service.use_bootstrap_for_IP_detect}}
-
-{{#service.service_account_secret}}
+            {{/security.kerberos.realm}}
+        {{/security.kerberos.enabled}}
+
+        {{#service.virtual_network_enabled}}
+        "VIRTUAL_NETWORK_ENABLED": "true",
+        "VIRTUAL_NETWORK_NAME": "{{service.virtual_network_name}}",
+            {{^service.virtual_network_plugin_labels.isEmpty}}
+            "VIRTUAL_NETWORK_PLUGIN_LABELS": "{{#service.virtual_network_plugin_labels}}{{key}}:{{value}},{{/service.virtual_network_plugin_labels}}",
+            {{/service.virtual_network_plugin_labels.isEmpty}}
+        "VIRTUAL_NETWORK_LABELS": "{{service.virtual_network_name}}",
+        "SPARK_BIND_ADDRESS_DETECTION_METHOD": "hostname=ip-address",
+        {{/service.virtual_network_enabled}}
+        {{#service.service_account_secret}}
         "DCOS_SERVICE_ACCOUNT_CREDENTIAL": { "secret": "serviceCredential" },
         "MESOS_MODULES": "{\"libraries\": [{\"file\": \"libdcos_security.so\", \"modules\": [{\"name\": \"com_mesosphere_dcos_ClassicRPCAuthenticatee\"}]}]}",
         "MESOS_AUTHENTICATEE": "com_mesosphere_dcos_ClassicRPCAuthenticatee",
-{{/service.service_account_secret}}
-
-{{#service.spark-history-server-url}}
+        {{/service.service_account_secret}}
+
+        {{#service.spark-history-server-url}}
         "SPARK_HISTORY_SERVER_URL": "{{service.spark-history-server-url}}",
-{{/service.spark-history-server-url}}
-        "JAVA_HOME": "/usr/lib/jvm/jre1.8.0_152",
-        "LD_LIBRARY_PATH": "/opt/mesosphere/lib:/opt/mesosphere/libmesos-bundle/lib:/usr/lib",
+        {{/service.spark-history-server-url}}
         "DCOS_SERVICE_NAME": "{{service.name}}",
         "SPARK_HDFS_CONFIG_URL": "{{hdfs.config-url}}",
         "SPARK_USER": "{{service.user}}",
+        {{#service.docker_user}}
+        "SPARK_DOCKER_USER": "{{service.docker_user}}",
+        {{/service.docker_user}}
         "SPARK_DISPATCHER_MESOS_ROLE": "{{service.role}}",
+        "SPARK_DISPATCHER_ENFORCE_ROLE": "{{service.enforce_role}}",
         "SPARK_DISPATCHER_MESOS_PRINCIPAL": "{{service.service_account}}",
-        "SPARK_DISPATCHER_MESOS_SECRET": "{{service.secret}}",
+        "SPARK_DISPATCHER_MESOS_SECRET": "{{service.service_account_secret}}",
         "SPARK_LOG_LEVEL": "{{service.log-level}}"
     },

-{{#service.service_account_secret}}
+    {{#service.service_account_secret}}
     "secrets": {
         "serviceCredential": {
             "source": "{{service.service_account_secret}}"
         }
     },
-{{/service.service_account_secret}}
-
+    {{/service.service_account_secret}}
+    {{#service.virtual_network_enabled}}
+    "networks": [
+        {
+          "name": "{{service.virtual_network_name}}",
+          {{^service.virtual_network_plugin_labels.isEmpty}}
+          "labels": {
+            {{#service.virtual_network_plugin_labels}}
+            "{{key}}": "{{value}}",
+            {{/service.virtual_network_plugin_labels}}
+            "network_name": "{{service.virtual_network_name}}"
+          },
+          {{/service.virtual_network_plugin_labels.isEmpty}}
+          "mode": "container"
+        }
+    ],
+    {{/service.virtual_network_enabled}}
+    {{^service.virtual_network_enabled}}
     "portDefinitions": [
         {
             "port": 0,
             "protocol": "tcp",
             "name": "dispatcher",
             "labels": {
-                "VIP_0": "spark-dispatcher:7077"
+                "VIP_0": "dispatcher.{{service.name}}:7077"
             }
         },
         {
@@ -68,7 +93,7 @@
             "protocol": "tcp",
             "name": "dispatcher-ui",
             "labels": {
-                "VIP_1": "spark-dispatcher:4040"
+                "VIP_1": "dispatcher.{{service.name}}:4040"
             }
         },
         {
@@ -76,54 +101,89 @@
             "protocol": "tcp",
             "name": "dispatcher-proxy",
             "labels": {
-                "VIP_2": "spark-dispatcher:80"
+                "VIP_2": "dispatcher.{{service.name}}:80"
             }
         }
     ],
-
-{{#service.UCR_containerizer}}
+    {{/service.virtual_network_enabled}}
     "container": {
-    "type": "MESOS",
-    "docker": {
-    {{#service.docker-image}}
-        "image": "{{service.docker-image}}",
-    {{/service.docker-image}}
-    {{^service.docker-image}}
-        "image": "{{resource.assets.container.docker.spark_docker}}",
-    {{/service.docker-image}}
-    "forcePullImage": true
-    }
-    },
-{{/service.UCR_containerizer}}
-{{^service.UCR_containerizer}}
-    "container": {
-    "type": "DOCKER",
-    "docker": {
-    {{#service.docker-image}}
-        "image": "{{service.docker-image}}",
-    {{/service.docker-image}}
-    {{^service.docker-image}}
-        "image": "{{resource.assets.container.docker.spark_docker}}",
-    {{/service.docker-image}}
-    "network": "HOST",
-    {{#service.user}}
-        "parameters": [
-        {
-        "key": "user",
-        "value": "{{service.user}}"
-        }
-        ],
-    {{/service.user}}
-    "forcePullImage": true
-    }
-    },
-{{/service.UCR_containerizer}}
-
+        {{#service.UCR_containerizer}}
+        "type": "MESOS",
+        "docker": {
+            {{#service.docker-image}}
+            "image": "{{service.docker-image}}",
+            {{/service.docker-image}}
+            {{^service.docker-image}}
+            "image": "{{resource.assets.container.docker.spark_docker}}",
+            {{/service.docker-image}}
+            "forcePullImage": true
+        }
+        {{/service.UCR_containerizer}}
+        {{^service.UCR_containerizer}}
+        "type": "DOCKER",
+        "docker": {
+            {{#service.docker-image}}
+                "image": "{{service.docker-image}}",
+            {{/service.docker-image}}
+            {{^service.docker-image}}
+                "image": "{{resource.assets.container.docker.spark_docker}}",
+            {{/service.docker-image}}
+            {{^service.virtual_network_enabled}}
+                "network": "HOST",
+            {{/service.virtual_network_enabled}}
+            {{#service.docker_user}}
+                "parameters": [
+                    {
+                        "key": "user",
+                        "value": "{{service.docker_user}}"
+                    }
+                ],
+            {{/service.docker_user}}
+            {{^service.docker_user}}
+                "parameters": [
+                    {
+                        "key": "user",
+                        "value": "{{service.user}}"
+                    }
+                ],
+            {{/service.docker_user}}
+            "forcePullImage": true
+        }
+        {{/service.UCR_containerizer}}
+        {{#service.virtual_network_enabled}}
+        , "portMappings": [
+            {
+                "containerPort": 7077,
+                "protocol": "tcp",
+                "name": "dispatcher",
+                "labels": {
+                    "VIP_0": "dispatcher.{{service.name}}:7077"
+                }
+            },
+            {
+                "containerPort": 4040,
+                "protocol": "tcp",
+                "name": "dispatcher-ui",
+                "labels": {
+                    "VIP_1": "dispatcher.{{service.name}}:4040"
+                }
+            },
+            {
+                "containerPort": 8080,
+                "protocol": "tcp",
+                "name": "dispatcher-proxy",
+                "labels": {
+                    "VIP_2": "dispatcher.{{service.name}}:8080"
+                }
+            }
+          ]
+        {{/service.virtual_network_enabled}}
+    },
     "healthChecks": [
         {
-            "portIndex": 2,
             "protocol": "MESOS_HTTP",
             "path": "/",
+            "portIndex": 2,
             "gracePeriodSeconds": 5,
             "intervalSeconds": 60,
             "timeoutSeconds": 10,
@@ -141,27 +201,24 @@
         "inactiveAfterSeconds": 0
     },
     "labels": {
-{{#hdfs.config-url}}
-        "SPARK_HDFS_CONFIG_URL": "{{hdfs.config-url}}",
-{{/hdfs.config-url}}
+        {{#hdfs.config-url}}
+            "SPARK_HDFS_CONFIG_URL": "{{hdfs.config-url}}",
+        {{/hdfs.config-url}}
         "SPARK_URI": "{{service.spark-dist-uri}}",
         "DCOS_PACKAGE_FRAMEWORK_NAME": "{{service.name}}",
         "DCOS_SERVICE_NAME": "{{service.name}}",
         "DCOS_SERVICE_PORT_INDEX": "2",
-{{#security.ssl.enabled}}
-        "DCOS_SERVICE_SCHEME": "https"
-{{/security.ssl.enabled}}
-{{^security.ssl.enabled}}
-        "DCOS_SERVICE_SCHEME": "http"
-{{/security.ssl.enabled}}
-    },
-{{#service.user}}
-    "user": "{{service.user}}",
-{{/service.user}}
+        {{#security.ssl.enabled}}
+            "DCOS_SERVICE_SCHEME": "https"
+        {{/security.ssl.enabled}}
+        {{^security.ssl.enabled}}
+            "DCOS_SERVICE_SCHEME": "http"
+        {{/security.ssl.enabled}}
+    },
     "uris": [
     {{#hdfs.config-url}}
-    "{{hdfs.config-url}}/hdfs-site.xml",
-    "{{hdfs.config-url}}/core-site.xml"
+        "{{hdfs.config-url}}/hdfs-site.xml",
+        "{{hdfs.config-url}}/core-site.xml"
     {{/hdfs.config-url}}
     ]
 }
--- 2/package.json
+++ 3/package.json
@@ -1,26 +1,33 @@
 {
-  "packagingVersion": "3.0",
-  "postInstallNotes": "DC/OS Spark is being installed!\n\n\tDocumentation: https://docs.mesosphere.com/service-docs/spark/\n\tIssues: https://docs.mesosphere.com/support/",
-  "scm": "https://github.com/apache/spark.git",
+  "packagingVersion": "4.0",
+  "name": "beta-spark",
+  "version": "2.12.0-3.0.1-beta",
+  "scm": "https://github.com/mesosphere/spark-build.git",
   "maintainer": "support@mesosphere.io",
-  "postUninstallNotes": "The Apache Spark DC/OS Service has been uninstalled and will no longer run.\nPlease follow the instructions at https://docs.mesosphere.com/service-docs/spark/uninstall/ to clean up any persisted state.",
-  "name": "beta-spark",
-  "description": "Spark is a fast and general cluster computing system for Big Data.  Documentation: https://docs.mesosphere.com/service-docs/spark/",
+  "website": "https://docs.mesosphere.com/services/spark/2.12.0-3.0.1/",
+  "description": "Apache Spark is a unified analytics engine for large-scale data processing. Documentation: https://docs.mesosphere.com/services/spark/2.12.0-3.0.1/",
+  "framework": true,
+  "upgradesFrom": [
+    "2.3.1-2.2.1-2-beta"
+  ],
+  "downgradesTo": [
+    "2.3.1-2.2.1-2-beta"
+  ],
+  "preInstallNotes": "Default configuration requires 1 agent node with: 1 CPU | 1024 MB MEM",
+  "postInstallNotes": "DC/OS Spark is being installed!\n\n\tDocumentation: https://docs.mesosphere.com/services/spark/2.12.0-3.0.1/\n\tIssues: https://docs.mesosphere.com/support/",
+  "postUninstallNotes": "The Apache Spark DC/OS Service has been uninstalled and will no longer run.\nPlease follow the instructions at https://docs.mesosphere.com/services/spark/2.12.0-3.0.1/uninstall/ to clean up any persisted state.",
+  "tags": [
+    "bigdata",
+    "mapreduce",
+    "batch",
+    "analytics"
+  ],
+  "selected": false,
   "licenses": [
     {
       "name": "Apache License Version 2.0",
       "url": "https://raw.githubusercontent.com/apache/spark/master/LICENSE"
     }
   ],
-  "tags": [
-    "bigdata",
-    "mapreduce",
-    "batch",
-    "analytics"
-  ],
-  "website": "https://docs.mesosphere.com/service-docs/spark/",
-  "selected": false,
-  "version": "2.3.1-2.2.1-2-beta",
-  "minDcosReleaseVersion": "1.9",
-  "lastUpdated": 1571120354
-}
+  "minDcosReleaseVersion": "1.11"
+}--- 2/resource.json
+++ 3/resource.json
@@ -2,14 +2,14 @@
   "assets": {
     "container": {
       "docker": {
-        "spark_docker": "mesosphere/spark:2.3.1-2.2.1-2-beta-hadoop-2.6"
+        "spark_docker": "mesosphere/spark:2.12.0-3.0.1-scala-2.12-hadoop-3.2"
       }
     }
   },
   "images": {
-    "icon-medium": "https://downloads.mesosphere.com/assets/universe/000/beta-spark-icon-medium.png",
-    "icon-small": "https://downloads.mesosphere.com/assets/universe/000/beta-spark-icon-small.png",
-    "icon-large": "https://downloads.mesosphere.com/assets/universe/000/beta-spark-icon-large.png"
+    "icon-medium": "https://downloads.mesosphere.io/spark/assets/icon-service-spark-medium.png",
+    "icon-small": "https://downloads.mesosphere.io/spark/assets/icon-service-spark-small.png",
+    "icon-large": "https://downloads.mesosphere.io/spark/assets/icon-service-spark-large.png"
   },
   "cli": {
     "binaries": {
@@ -18,11 +18,11 @@
           "contentHash": [
             {
               "algo": "sha256",
-              "value": "e61619468e90d98e996748d57786c8b03698fe52769d577e9caaecd8525f04a8"
+              "value": "a724ebf0d1c1dfc31b7f630a087d575f59d3672b507361703f0da1335cc61bfd"
             }
           ],
           "kind": "executable",
-          "url": "https://downloads.mesosphere.com/spark/assets/2.3.1-2.2.1-2-beta/dcos-spark-darwin"
+          "url": "https://downloads.mesosphere.com/spark/assets/2.12.0-3.0.1-beta/dcos-spark-darwin"
         }
       },
       "linux": {
@@ -30,11 +30,11 @@
           "contentHash": [
             {
               "algo": "sha256",
-              "value": "c5300860d6ba55da0abc953f11beab47da6304130acd9589faf96a6ad2de2ad9"
+              "value": "3ef41ceda4a024274dd48f3d6a271771c0bb2015e0a80f738c4cf2386b128635"
             }
           ],
           "kind": "executable",
-          "url": "https://downloads.mesosphere.com/spark/assets/2.3.1-2.2.1-2-beta/dcos-spark-linux"
+          "url": "https://downloads.mesosphere.com/spark/assets/2.12.0-3.0.1-beta/dcos-spark-linux"
         }
       },
       "windows": {
@@ -42,11 +42,11 @@
           "contentHash": [
             {
               "algo": "sha256",
-              "value": "82c15f1f651d532893357d561284346215516885094b6e948aaf943f5e63457c"
+              "value": "ec354bbbb9a8b3ab81e3b6e4691eb9a6fe3d7bb6dbc33ef583e73d30c9016f79"
             }
           ],
           "kind": "executable",
-          "url": "https://downloads.mesosphere.com/spark/assets/2.3.1-2.2.1-2-beta/dcos-spark.exe"
+          "url": "https://downloads.mesosphere.com/spark/assets/2.12.0-3.0.1-beta/dcos-spark.exe"
         }
       }
     }
DominikDary commented 3 years ago

🎉