Skip to content

Commit

Permalink
#27
Browse files Browse the repository at this point in the history
  • Loading branch information
ufasoli committed Aug 7, 2020
1 parent bd19c49 commit efe718a
Show file tree
Hide file tree
Showing 2 changed files with 81 additions and 77 deletions.
34 changes: 34 additions & 0 deletions cmd/init.go
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,8 @@ var (
enableServices string
force bool
hwArch string
structure string
platformName string
)

func init() {
Expand All @@ -25,6 +27,8 @@ func init() {
initCmd.Flags().StringVarP(&hwArch, "hw-arch", "x", "x86-64", "Hardware architecture for the platform")
initCmd.Flags().StringVarP(&seedConfig, "seed-config", "e", "", "The name of a predefined stack to base this new platform on")
initCmd.Flags().StringP("config-file", "c", "config.yml", "The name of the local config file (defaults to config.yml)")
initCmd.Flags().StringVarP(&structure, "structure", "b", "", "defines the structure of the generated platform (flat = platform is generate on the level of the config.yml or subfolder = platform is generated into a subfolder)")
initCmd.Flags().StringVarP(&platformName, "platform-name", "n", "", "the name of the platform to generate.")

}

Expand Down Expand Up @@ -110,6 +114,14 @@ By default 'config.yml' is used for the name of the config file, which is create
ymlConfig.Content[0].Content = updatedYml
}

if len(platformName) > 0 {
updateConfig("platform-name", platformName, &ymlConfig)
}

if len(structure) > 0 {
updateConfig("structure", structure, &ymlConfig)
}

b, _ := yaml.Marshal(&ymlConfig)
b = addRootIndent(b, 6)

Expand Down Expand Up @@ -145,3 +157,25 @@ func in_array(val string, list []string) bool {
}
return false
}

//TODO: check for possible a cleaner more concise way
// to implement the following methods

func updateConfig(name string, value string, node *yaml.Node) {

platys := node.Content[0].Content

for i, n := range platys {

if n.Value == "platys" {
for j, sn := range platys[i+1].Content {
if sn.Value == name {
platys[i+1].Content[j+1].Value = value
break
}

}
}
}

}
124 changes: 47 additions & 77 deletions config.yml
Original file line number Diff line number Diff line change
@@ -1,119 +1,102 @@
# Default values for the generator
# this file can be used as a template for a custom configuration
# or to know about the different variables available for the generator
# Default values for the generator
# this file can be used as a template for a custom configuration
# or to know about the different variables available for the generator
platys:
platform-name: 'default'
stack-image-name: 'trivadis/platys-modern-data-platform'
stack-image-version: '1.5.2'
structure: 'flat'

platform-name: 'test'
stack-image-name: 'trivadis/platys-modern-data-platform'
stack-image-version: '1.6.0'
structure: 'subfolder'
# ===== Global configuation, valid for all or a group of services ========
# Timezone, use a Linux string such as Europe/Zurich or America/New_York
use_timezone: ''
# the name of the repository to use for private images, which are not on docker hub (currently only Oracle images)
private_docker_repository_name: 'trivadis'

# ===== Apache Zookeeper ========
ZOOKEEPER_enable: false
ZOOKEEPER_volume_map_data: false
ZOOKEEPER_nodes: 9 # either 1 or 3

ZOOKEEPER_nodes: 1 # either 1 or 3
ZOOKEEPER_navigator_enable: false

# ===== Apache Kafka ========
KAFKA_enable: false
# one of enterprise, community
KAFKA_edition: 'community'
KAFKA_volume_map_data: false
KAFKA_broker_nodes: 3
KAFKA_internal_replication_factor: 3
KAFKA_delete_topic_enable: false
KAFKA_auto_create_topics_enable: false

#kafka schema registry
KAFKA_SCHEMA_REGISTRY_enable: false
KAFKA_SCHEMA_REGISTRY_nodes: 1
KAFKA_SCHEMA_REGISTRY_use_zookeeper_election: false
KAFKA_SCHEMA_REGISTRY_replication_factor: 1

#kafka connect
KAFKA_CONNECT_enable: false
KAFKA_CONNECT_nodes: 2

KAFKA_CONNECT_connectors:
#ksqldb
KAFKA_KSQLDB_enable: false
KAFKA_KSQLDB_edition: 'oss'
KAFKA_KSQLDB_nodes: 1

#confluent control center
KAFKA_CCC_enable: false

#confluent replicator
KAFKA_REPLICATOR_enable: false
KAFKA_RESTPROXY_enable: false
KAFKA_MQTTPROXY_enable: false

KAFKACAT_enable: false
KAFKA_SCHEMA_REGISTRY_UI_enable: false
KAFKA_TOPICS_UI_enable: false
KAFKA_CONNECT_UI_enable: false

KAFKA_CMAK_enable: false
KAFKA_KAFDROP_enable: false
KAFKA_KADMIN_enable: false
KAFKA_AKHQ_enable: false
KAFKA_BURROW_enable: false

# ===== Hadoop ========
HADOOP_enable: false
HADOOP_datanodes: 2

# ===== Spark ========
SPARK_enable: false
# "hive" or "in-memory"
SPARK_catalog: in-memory
SPARK_workers: 2
SPARK_jars_repositories: ''
SPARK_jars_packages: ''
SPARK_jars: ''
SPARK_jars_ivySettings: ''
SPARK_driver_extraJavaOptions: ''
SPARK_executor_extraJavaOptions: ''

# misc spark 'addons'
SPARK_HISTORY_enable: false
SPARK_THRIFT_enable: false

# ===== Apache Livy ========
LIVY_enable: false

# ===== Apache Hive ========
HIVE_SERVER_enable: false

# ===== Apache Hive Metastore ========
HIVE_METASTORE_enable: false

# ===== Apache Ranger ========
RANGER_enable: false
RANGER_POSTRESQL_volume_map_data: false

# ===== Apache Atlas ========
ATLAS_enable: false
ATLAS_provision_atlas_sample_data: false
ATLAS_provision_amundsen_sample_data: false
ATLAS_install_hive_hook: false

# ===== Data Hub ========
DATAHUB_enable: false
DATAHUB_volume_map_data: false

#===== Amundsen ========
AMUNDSEN_enable: false
# one of 'amundsen' or 'atlas'
AMUNDSEN_metastore: 'amundsen'

#===== Marquez ========
MARQUEZ_enable: false
MARQUEZ_volume_map_data: false
MARQUEZ_provision_marquez_sample_data: false

# ===== Hue ========
HUE_enable: false

# ===== Streamsets and stremsets edge ========
STREAMSETS_enable: false
STREAMSETS_volume_map_data: false
Expand All @@ -123,60 +106,49 @@
STREAMSETS_TRANSFORMER_volume_map_data: false
STREAMSETS_EDGE_enable: false
STREAMSETS_EDGE_volume_map_data: false

# ===== Nifi ========
NIFI_enable: false

# ===== Node Red ========
NODERED_enable: false
NODERED_volume_map_data: false

# ===== Streamsheets ========
STREAMSHEETS_enable: false
# ===== Sqoop ========
SQOOP_enable: false

# ===== Apache Airflow ========
AIRFLOW_enable: false
AIRFLOW_provision_examples: false

# ===== Zeppelin ========
ZEPPELIN_enable: false
ZEPPELIN_volume_map_data: false

# ===== Jupyter ========
JUPYTER_enable: false
# one of 'minimal', 'r', 'scipy', 'tensorflow', 'datascience', 'all-spark'
JUPYTER_edition: 'minimal'
JUPYTER_volume_map_data: false

# ===== Visualization ========
GRAFANA_enable: false
# one of 'oss', 'elastic',
KIBANA_edition: 'oss'
KIBANA_enable: false #needs to have elasticsearch enabled to work

SUPERSET_enable: false

# ===== NoSQL ========
REDIS_enable: false
CASSANDRA_enable: false
DATASTAX_enable: false
DATASTAX_nodes: 3

MONGO_enable: false
MONGO_nodes: 1

SOLR_enable: false
ELASTICSEARCH_enable: false
# one of 'oss', 'elastic',
ELASTICSEARCH_edition: 'oss'
DEJAVU_enable: false
CEREBRO_enable: false
ELASTICHQ_enable: false

NEO4J_enable: false
NEO4J_volume_map_data: false
NEO4J_volume_map_logs: false

# ===== Influx DB 1.x ========
INFLUXDB_enable: false
INFLUXDB_volume_map_data: false
Expand All @@ -185,95 +157,93 @@
INFLUXDB_CHRONOGRAF_volume_map_data: false
INFLUXDB_KAPACITOR_enable: false
INFLUXDB_KAPACITOR_volume_map_data: false

# ===== Influx DB 2.x ========
INFLUXDB2_enable: false
INFLUXDB2_volume_map_data: false

# ===== Influx DB 2.x ========
# ===== Kudo ========
KUDO_enable: false
# ===== Druid ========
DRUID_enable: false
# one of 'oss-sandbox', 'oss-cluster'
DRUID_edition: 'oss-sandbox'
DRUID_volume_map_data: false

# ===== NoSQL - Prometeus ========
PROMETHEUS_enable: false
PROMETHEUS_volume_map_data: false
PROMETHEUS_PUSHGATEWAY_enable: false

# ===== NoSQL - Tile38 ========
TILE38_enable: false

# ===== Yugabyte ========
YUGABYTE_enable: false

# ===== Oracle ========
ORACLE_enable: false
ORACLE_edition: 'ee'
ORACLE_volume_map_data: false

ORACLE_REST_DATA_SERVICE_enable: false

# ===== Other RDBMS ========
MYSQL_enable: false
SQLSERVER_enable: false

POSTGRESQL_enable: false
POSTGRESQL_volume_map_data: false
TIMESCALEDB_enable: false
TIMESCALEDB_volume_map_data: false

# ===== Database Frontends ========
ADMINER_enable: false

CLOUDBEAVER_enable: false
QUIX_enable: false
# ===== Event Store ========
AXON_enable: false

# ===== Presto ========
PRESTO_enable: false
# one of starburstdata, oss
# "single" or "cluster" install
PRESTO_install: single
PRESTO_workers: 3
# one of starburstdata, prestosql, prestodb, ahana
PRESTO_edition: 'starburstdata'

# Presto-CLI is enabled by default
PRESTO_CLI_enable: true
# ===== Dremio ========
DREMIO_enable: false

# ===== Apache Drill ========
DRILL_enable: false

# ===== MQTT ========
MOSQUITTO_enable: false
MOSQUITTO_enable: false
MOSQUITTO_nodes: 1
MOSQUITTO_volume_map_data: false
HIVEMQ3_enable: false
HIVEMQ4_enable: false
MQTT_UI_enable: false

HIVEMQ3_enable: false
HIVEMQ4_enable: false
MQTT_UI_enable: false
# ===== ActiveMQ ========
ACTIVEMQ_enable: false

ACTIVEMQ_volume_map_data: false
# ===== RabbitMQ ========
RABBITMQ_enable: false
RABBITMQ_volume_map_data: false
RABBITMQ_volume_map_logs: false
#===== MinIO Object Storage ========
MINIO_enable: false
MINIO_volume_map_data: false

AWSCLI_enable: false

# ===== FTP ========
FTP_enable: false

# ===== Penthao Webspoon ========
PENTHAO_enable: false
# ===== Code Server ========
CODE_SERVER_enable: false
CODE_SERVER_volume_map_platform_root: false

# ===== Vualt ========
# ===== Vault ========
VAULT_enable: false
VAULT_volume_map_data: false

# ===== Swagger API Management ========
SWAGGER_EDITOR_enable: false
SWAGGER_UI_enable: false
# ===== Container Mgmt ========
PORTAINER_enable: false
CADVISOR_enable: false

# ===== Hawtio ========
HAWTIO_enable: false
# ===== Python image ========
PYTHON_enable: false
PYTHON_script_folder: ''
PYTHON_script: ''



0 comments on commit efe718a

Please sign in to comment.