TrivadisPF / platys-modern-data-platform

Support for generating modern platforms dynamically with services such as Kafka, Spark, Streamsets, HDFS, ....
Apache License 2.0
68 stars 15 forks source link

Error when starting "schema-registry-ui" and "topic-ui" #281

Closed gschmutz closed 3 years ago

gschmutz commented 3 years ago
Landoop Kafka Topics UI 0.9.4
Visit <https://github.com/Landoop/kafka-topics-ui/tree/master/docker>
to find more about how you can configure this container.

Enabling proxy.
Kafka REST Proxy URL to /api/kafka-rest-proxy.
Note: if you use a PORT lower than 1024, please note that kafka-topics-ui can
now run under any user. In the future a non-root user may become the default.
In this case you will have to explicitly allow binding to such ports, either by
setting the root user or something like '--sysctl net.ipv4.ip_unprivileged_port_start=0'.

Activating privacy features... done."
http://0.0.0.0:8000"
panic: runtime error: slice bounds out of range
gschmutz commented 3 years ago

This is the conifg.yml which produces the error:

      # Default values for the generator
      # this file can be used as a template for a custom configuration
      # or to know about the different variables available for the generator
      platys:
          platform-name: 'platys-test'
          platform-stack: 'trivadis/platys-modern-data-platform'
          platform-stack-version: '1.10.0'
          structure: 'flat'
      # ========================================================================
      # Global configuration, valid for all or a group of services 
      # ========================================================================
      # Timezone, use a Linux string such as Europe/Zurich or America/New_York
      use_timezone: 'Europe/Zurich'
      # the name of the repository to use for private images, which are not on docker hub (currently only Oracle images)
      private_docker_repository_name: 'trivadis'
      # the UID to use when using the "user" property in a service to override the user inside the container
      uid: '1000'
      # ========================================================================
      # External Services
      # ========================================================================
      external:
          KAFKA_enable: false
          KAFKA_bootstrap_servers:
          KAFKA_username:
          KAFKA_password:
          SCHEMA_REGISTRY_enable: false
          SCHEMA_REGISTRY_url:
          S3_enable: false
          S3_endpoint:
          S3_default_region:
          S3_path_style_access: false
      # ========================================================================
      # Platys Services
      # ========================================================================
      PROVISIONING_DATA_enable: true
      #
      # ===== Apache Zookeeper ========
      #
      ZOOKEEPER_enable: false
      ZOOKEEPER_volume_map_data: false
      ZOOKEEPER_nodes: 1 # either 1 or 3
      #
      # ===== Apache Zookeeper Navigator ========
      #
      ZOOKEEPER_NAVIGATOR_enable: false
      #
      # ===== Apache Kafka ========
      #
      KAFKA_enable: true
      # one of enterprise, community
      KAFKA_edition: 'community'
      KAFKA_volume_map_data: false
      KAFKA_datacenters: 1
      KAFKA_broker_nodes: 3
      KAFKA_internal_replication_factor: 3
      KAFKA_delete_topic_enable: true
      KAFKA_auto_create_topics_enable: false
      KAFKA_message_timestamp_type: CreateTime
      # KAFKA_log_segment_bytes:                        
      # KAFKA_log_retention_ms:                         
      # KAFKA_log_retention_hours: 
      # KAFKA_log_retention_bytes:      
      # KAFKA_compression_type: 
      # KAFKA_min_insync_replicas:
      # KAFKA_replica_selector_class: org.apache.kafka.common.replica.RackAwareReplicaSelector
      KAFKA_confluent_log_placement_constraints: {}
      KAFKA_confluent_tier_feature: false
      KAFKA_confluent_tier_enable: false
      KAFKA_confluent_tier_backend: S3
      KAFKA_confluent_tier_s3_bucket: kafka-logs
      KAFKA_confluent_tier_s3_region: us-east-1
      KAFKA_confluent_tier_s3_aws_endpoint_override:
      KAFKA_confluent_tier_local_hotset_bytes:
      KAFKA_confluent_tier_local_hotset_ms:
      KAFKA_confluent_tier_archiver_num_threads:
      KAFKA_confluent_tier_fetcher_num_threads:
      KAFKA_confluent_tier_topic_delete_check_interval_ms:
      KAFKA_confluent_tier_metadata_replication_factor: 1
      #
      # ===== Confluent Schema Registry ========
      #
      KAFKA_SCHEMA_REGISTRY_enable: true
      KAFKA_SCHEMA_REGISTRY_nodes: 1
      KAFKA_SCHEMA_REGISTRY_use_zookeeper_election: false
      KAFKA_SCHEMA_REGISTRY_replication_factor: 1
      #
      # ===== Apicurio Schema Registry ========
      #
      APICURIO_REGISTRY_enable: false
      APICURIO_REGISTRY_nodes: 1
      #
      # ===== Kafka Connect ========
      #
      KAFKA_CONNECT_enable: false
      KAFKA_CONNECT_nodes: 2
      KAFKA_CONNECT_connectors:
      KAFKA_CONNECT_config_providers: 'file'
      KAFKA_CONNECT_config_providers_classes: 'org.apache.kafka.common.config.provider.FileConfigProvider'
      KAFKA_CONNECT_map_settings_file: false
      #
      # ===== Confluent ksqlDB ========
      #
      KAFKA_KSQLDB_enable: false
      KAFKA_KSQLDB_edition: 'oss'
      KAFKA_KSQLDB_nodes: 1
      KAFKA_KSQLDB_internal_replication_factor: 1
      KAFKA_KSQLDB_suppress_enabled: false
      KAFKA_KSQLDB_suppress_buffer_size_bytes: -1
      KAFKA_KSQLDB_queries_file: ''
      #
      # ===== Azkarra Streams ========
      #
      AZKARRA_enable: false
      #
      # ===== Confluent Control Center ========
      #
      KAFKA_CCC_enable: false
      #
      # ===== Confluent Replicator ========
      #
      KAFKA_REPLICATOR_enable: false
      #
      # ===== Confluent Rest Proxy ========
      #
      KAFKA_RESTPROXY_enable: false
      #
      # ===== Confluent MQTT Proxy ========
      #
      KAFKA_MQTTPROXY_enable: true
      KAFKA_MQTTPROXY_topic_regex_list: truck_position:.*position
      #
      # ===== Kafkacat ========
      #
      KAFKACAT_enable: false
      #
      # ===== Various Kafka UIs ========
      #
      KAFKA_SCHEMA_REGISTRY_UI_enable: true
      KAFKA_SCHEMA_REGISTRY_UI_use_public_ip: true
      KAFKA_TOPICS_UI_enable: true
      KAFKA_CONNECT_UI_enable: false
      #
      # ===== Cluster Manager for Apache Kafka ========
      #
      KAFKA_CMAK_enable: false
      KAFKA_CMAK_auth_enabled: "'false'"
      KAFKA_CMAK_username: admin
      KAFKA_CMAK_password: abc123!
      #
      # ===== Kafdrop ========
      #
      KAFKA_KAFDROP_enable: false
      #
      # ===== KAdmin ========
      #
      KAFKA_KADMIN_enable: false
      #
      # ===== Apache Kafka HQ ========
      #
      KAFKA_AKHQ_enable: true
      #
      # ===== Linkedin Burrow ========
      #
      KAFKA_BURROW_enable: false
      #
      # ===== Apache Hadoop ========
      #
      HADOOP_enable: false
      HADOOP_datanodes: 2
      #
      # ===== Apache Spark ========
      #
      SPARK_enable: true
      # either 2 or 3
      SPARK_major_version: 3
      # "hive" or "in-memory"
      SPARK_catalog: in-memory
      SPARK_workers: 2
      # to restrict the default cores, use '-Dspark.deploy.defaultCores=6'  
      SPARK_master_opts: ''
      SPAKR_worker_cores:
      SPAKR_worker_memory:
      SPARK_worker_opts: '-Dspark.worker.cleanup.enabled=true'
      SPARK_jars_repositories: ''
      SPARK_jars_packages: ''
      SPARK_jars_excludes: ''
      SPARK_jars: ''
      SPARK_jars_ivySettings: ''
      SPARK_driver_extraJavaOptions: ''
      SPARK_executor_extraJavaOptions: ''
      SPARK_cores_max:
      SPARK_executor_memory:
      # misc spark 'addons'
      SPARK_HISTORY_enable: false
      SPARK_THRIFT_enable: false
      #
      # ===== Apache Livy ========
      #
      LIVY_enable: false
      #
      # ===== Apache Hive ========
      #
      HIVE_SERVER_enable: false
      #
      # ===== Apache Hive Metastore ========
      #
      HIVE_METASTORE_enable: false
      #
      # ===== Apache Ranger ========
      #
      RANGER_enable: false
      RANGER_postgresql_volume_map_data: false
      #
      # ===== Apache Atlas ========
      #
      ATLAS_enable: false
      ATLAS_provision_atlas_sample_data: false
      ATLAS_provision_amundsen_sample_data: false
      ATLAS_install_hive_hook: false
      #
      # ===== Data Hub ========
      #
      DATAHUB_enable: false
      DATAHUB_volume_map_data: false
      #
      #===== Amundsen ========
      #
      AMUNDSEN_enable: false
      # one of 'amundsen' or 'atlas'
      AMUNDSEN_metastore: 'amundsen'
      #
      #===== Marquez ========
      #
      MARQUEZ_enable: false
      MARQUEZ_volume_map_data: false
      MARQUEZ_provision_marquez_sample_data: false
      #
      # ===== Hue ========
      #
      HUE_enable: false
      #
      # =====  Streamsets Data Collector ========
      #
      STREAMSETS_enable: true
      STREAMSETS_volume_map_data: false
      STREAMSETS_volume_map_logs: false
      STREAMSETS_volume_map_security_policy: false
      STREAMSETS_activate_https: false
      STREAMSETS_additional_port_mappings: 0
      # one of 'none', 'basic', 'digest', or 'form'
      STREAMSETS_http_authentication: ''
      #
      # =====  Streamsets Transformer ========
      #
      STREAMSETS_TRANSFORMER_enable: false
      STREAMSETS_TRANSFORMER_volume_map_data: false
      #
      # =====  Streamsets Edge ========
      #
      STREAMSETS_EDGE_enable: false
      STREAMSETS_EDGE_volume_map_data: false
      #
      # ===== Apache NiFi ========
      #
      NIFI_enable: false
      NIFI_volume_map_data: false
      NIFI_volume_map_logs: false
      NIFI_jvm_heap_init:
      NIFI_jvm_heap_max:
      #
      # ===== Apache NiFi Registry ========
      #      
      NIFI_REGISTRY_enable: false
      #
      # ===== Node Red  ========
      #
      NODERED_enable: false
      NODERED_volume_map_data: false
      #
      # ===== Streamsheets ========
      #
      STREAMSHEETS_enable: false
      #
      # ===== Sqoop ========
      #
      SQOOP_enable: false
      #
      # ===== Apache Airflow  ========
      #
      AIRFLOW_enable: false
      AIRFLOW_provision_examples: false
      #
      # ===== Zeppelin ========
      #
      ZEPPELIN_enable: true
      ZEPPELIN_version: 0.9.0
      ZEPPELIN_volume_map_data: false
      ZEPPELIN_admin_username: admin
      ZEPPELIN_admin_password: abc123!
      ZEPPELIN_user_username: zeppelin
      ZEPPELIN_user_password: zeppelin
      ZEPPELIN_spark_cores_max: 2
      ZEPPELIN_spark_executor_memory:
      ZEPPELIN_notebook_cron_enable: true

      #
      # ===== Jupyter ========
      #
      JUPYTER_enable: false
      # one of 'minimal', 'r', 'scipy', 'tensorflow', 'datascience', 'all-spark'
      JUPYTER_edition: 'minimal'
      JUPYTER_volume_map_data: false
      #
      # ===== Grafana ========
      #
      GRAFANA_enable: false
      #
      # ===== Elastic Kibana ========
      #
      KIBANA_enable: false #needs to have elasticsearch enabled to work
      # one of 'oss', 'elastic',
      KIBANA_edition: 'oss'
      #
      # ===== Superset ========
      #
      SUPERSET_enable: false
      #
      # ===== Redash ========
      #
      REDASH_enable: false
      #
      # ===== Smashing Dashboard ========
      #
      SMASHING_enable: false
      SMASHING_volume_map_dashboards: false
      SMASHING_volume_map_jobs: false
      SMASHING_volume_map_widgets: false
      SMASHING_install_gems: ''
      SMASHING_install_widgets: ''
      #
      # ===== Tipboard Dashboard ========
      #
      TIPBOARD_enable: false
      TIPBOARD_volume_map_dashboards: false
      TIPBOARD_project_name: sample
      TIPBOARD_api_key: e2c3275d0e1a4bc0da360dd225d74a43
      TIPBOARD_port: 7272
      TIPBOARD_redis_host: redis-1
      TIPBOARD_redis_port: 6379
      TIPBOARD_redis_password:
      TIPBOARD_redis_db: 4
      TIPBOARD_flipboard_interval: 0
      TIPBOARD_flipboard_sequence: ''
      #
      # ===== Chartboard Dashboard ========
      #
      CHARTBOARD_enable: false
      CHARTBOARD_volume_map_dashboards: false
      #
      # ===== Memcached ========
      #
      MEMCACHED_enable: false
      #
      # ===== Redis ========
      #
      REDIS_enable: false
      REDIS_replicasets: 0
      REDIS_allow_empty_password: yes
      REDIS_disable_commands:
      REDIS_password: abc123!
      REDIS_aof_enabled: no
      REDIS_volume_map_data: false
      #
      # ===== Redis Insight ========
      #
      REDIS_INSIGHT_enable: false
      #
      # ===== Redis Commander ========
      #
      REDIS_COMMANDER_enable: false
      #
      # ===== Apache Casandra ========
      #
      CASSANDRA_enable: false
      #
      # ===== DataStax ========
      #
      DATASTAX_enable: false
      DATASTAX_nodes: 3
      #
      # ===== MongoDB ========
      #
      MONGO_enable: false
      MONGO_nodes: 1
      #
      # ===== SolR ========
      #
      SOLR_enable: false
      #
      # ===== Elasticsearch ========
      #
      ELASTICSEARCH_enable: false
      # one of 'oss', 'elastic',
      ELASTICSEARCH_edition: 'oss'
      DEJAVU_enable: false
      CEREBRO_enable: false
      ELASTICHQ_enable: false
      #
      # ===== Neo4J ========
      #
      NEO4J_enable: false
      NEO4J_volume_map_data: false
      NEO4J_volume_map_logs: false
      #
      # ===== Stardog ========
      #
      STARDOG_enable: false
      STARDOG_volume_map_data: false
      STARDOG_STUDIO_enable: false
      #
      # ===== Influx DB 1.x ========
      #
      INFLUXDB_enable: false
      INFLUXDB_volume_map_data: false
      INFLUXDB_TELEGRAF_enable: false
      INFLUXDB_CHRONOGRAF_enable: false
      INFLUXDB_CHRONOGRAF_volume_map_data: false
      INFLUXDB_KAPACITOR_enable: false
      INFLUXDB_KAPACITOR_volume_map_data: false
      #
      # ===== Influx DB 2.x ========
      #
      INFLUXDB2_enable: false
      INFLUXDB2_volume_map_data: false
      #
      # ===== Kudo ========
      #
      KUDO_enable: false
      #
      # ===== Druid ========
      #
      DRUID_enable: false
      # one of 'oss-sandbox', 'oss-cluster'
      DRUID_edition: 'oss-sandbox'
      DRUID_volume_map_data: false
      #
      # ===== NoSQL - Prometeus ========
      #
      PROMETHEUS_enable: false
      PROMETHEUS_volume_map_data: false
      PROMETHEUS_PUSHGATEWAY_enable: false
      #
      # ===== NoSQL - Tile38 ========
      #
      TILE38_enable: false
      #
      # ===== Yugabyte ========
      #
      YUGABYTE_enable: false
      #
      # ===== Oracle RDBMS ========
      #
      ORACLE_enable: false
      ORACLE_edition: 'ee'
      ORACLE_volume_map_data: false
      #
      # ===== Oracle REST Data Service ========
      #
      ORACLE_REST_DATA_SERVICE_enable: false
      #
      # ===== MySQL ========
      #
      MYSQL_enable: false
      #
      # ===== SQL Server ========
      #
      SQLSERVER_enable: false
      #
      # ===== PostgreSQL ========
      #
      POSTGRESQL_enable: false
      POSTGRESQL_volume_map_data: false
      POSTGRESQL_database: demodb
      POSTGRESQL_user: demo
      POSTGRESQL_password: abc123!
      #
      # ===== TimeScale DB ========
      #
      TIMESCALEDB_enable: false
      TIMESCALEDB_volume_map_data: false
      #
      # ===== Adminer DB UI ========
      #
      ADMINER_enable: false
      #
      # ===== Cloudbeaver DB UI ========
      #
      CLOUDBEAVER_enable: false
      #
      # ===== Quix Presto UI ========
      #
      QUIX_enable: false
      #
      # ===== Event Store ========
      #
      AXON_enable: false
      #
      # ===== Presto ========
      #
      PRESTO_enable: false
      # "single" or "cluster" install
      PRESTO_install: single
      PRESTO_workers: 3
      # one of starburstdata, prestosql, prestodb, ahana
      PRESTO_edition: 'starburstdata'
      # Presto-CLI is enabled by default
      PRESTO_CLI_enable: true
      #
      # ===== Dremio ========
      #
      DREMIO_enable: false
      #
      # ===== Apache Drill ========
      #
      DRILL_enable: false
      #
      # ===== MQTT ========
      #
      MOSQUITTO_enable: false
      MOSQUITTO_nodes: 1
      MOSQUITTO_volume_map_data: false
      HIVEMQ3_enable: false
      HIVEMQ4_enable: false
      MQTT_UI_enable: false
      CEDALO_MANAGEMENT_CENTER_enable: false
      #
      # ===== ActiveMQ ========
      #
      ACTIVEMQ_enable: false
      ACTIVEMQ_volume_map_data: false
      #
      # ===== RabbitMQ ========
      #
      RABBITMQ_enable: false
      RABBITMQ_volume_map_data: false
      RABBITMQ_volume_map_logs: false
      #
      #=====  MinIO Object Storage ========
      #
      MINIO_enable: true
      MINIO_volume_map_data: true
      MINIO_access_key: V42FCGRVMK24JJ8DHUYG
      MINIO_secret_key: bKhWxVF3kQoLY9kFmt91l+tDrEoZjqnWXzY9Eza
      # add additional buckets, comma separated
      MINIO_default_buckets: 'default-bucket'
      MINIO_browser_enable: true
      AWSCLI_enable: true
      #
      # ===== FTP ========
      #
      FTP_enable: false
      #
      # ===== Penthao Webspoon ========
      #
      PENTHAO_enable: false
      #
      # ===== Code Server ========
      #
      CODE_SERVER_enable: false
      CODE_SERVER_volume_map_platform_root: false
      #
      # ===== Vault ========
      #
      VAULT_enable: false
      VAULT_volume_map_data: false
      #
      # ===== Swagger API Management ========
      #
      SWAGGER_EDITOR_enable: false
      SWAGGER_UI_enable: false
      #
      # ===== Portainer Container UI ========
      #
      PORTAINER_enable: false
      #
      # ===== Cadvisor Container Mgmt UI ========
      #
      CADVISOR_enable: false
      #
      # ===== Hawtio ========
      #
      HAWTIO_enable: false
      #
      # ===== Wetty ========
      #
      WETTY_enable: false
      #
      # ===== Markdown Viewer ========
      #
      MARKDOWN_VIEWER_enable: true
      MARKDOWN_VIEWER_use_port_80: true
      MARKDOWN_VIEWER_use_public_ip: true
      #
      # ===== Python image ========
      #
      PYTHON_enable: false
      PYTHON_script_folder: ''
      PYTHON_script: ''
gschmutz commented 3 years ago

this has been fixed in 1.13.0