20482: Allows the cluster operator to use an arbitrary domain.
authorLucas Di Pentima <lucas.dipentima@curii.com>
Thu, 18 May 2023 14:22:10 +0000 (11:22 -0300)
committerLucas Di Pentima <lucas.dipentima@curii.com>
Fri, 19 May 2023 15:40:03 +0000 (12:40 -0300)
Instead of making domains like cluster_prefix.domain mandatory, let the site
admin to select whichever domain they need for the deployment.

Arvados-DCO-1.1-Signed-off-by: Lucas Di Pentima <lucas.dipentima@curii.com>

27 files changed:
tools/salt-install/config_examples/multi_host/aws/certs/README.md
tools/salt-install/config_examples/multi_host/aws/pillars/arvados.sls
tools/salt-install/config_examples/multi_host/aws/pillars/grafana.sls
tools/salt-install/config_examples/multi_host/aws/pillars/letsencrypt_controller_configuration.sls
tools/salt-install/config_examples/multi_host/aws/pillars/letsencrypt_grafana_configuration.sls
tools/salt-install/config_examples/multi_host/aws/pillars/letsencrypt_keepproxy_configuration.sls
tools/salt-install/config_examples/multi_host/aws/pillars/letsencrypt_keepweb_configuration.sls
tools/salt-install/config_examples/multi_host/aws/pillars/letsencrypt_prometheus_configuration.sls
tools/salt-install/config_examples/multi_host/aws/pillars/letsencrypt_webshell_configuration.sls
tools/salt-install/config_examples/multi_host/aws/pillars/letsencrypt_websocket_configuration.sls
tools/salt-install/config_examples/multi_host/aws/pillars/letsencrypt_workbench2_configuration.sls
tools/salt-install/config_examples/multi_host/aws/pillars/letsencrypt_workbench_configuration.sls
tools/salt-install/config_examples/multi_host/aws/pillars/nginx_api_configuration.sls
tools/salt-install/config_examples/multi_host/aws/pillars/nginx_collections_configuration.sls
tools/salt-install/config_examples/multi_host/aws/pillars/nginx_controller_configuration.sls
tools/salt-install/config_examples/multi_host/aws/pillars/nginx_download_configuration.sls
tools/salt-install/config_examples/multi_host/aws/pillars/nginx_grafana_configuration.sls
tools/salt-install/config_examples/multi_host/aws/pillars/nginx_keepproxy_configuration.sls
tools/salt-install/config_examples/multi_host/aws/pillars/nginx_prometheus_configuration.sls
tools/salt-install/config_examples/multi_host/aws/pillars/nginx_webshell_configuration.sls
tools/salt-install/config_examples/multi_host/aws/pillars/nginx_websocket_configuration.sls
tools/salt-install/config_examples/multi_host/aws/pillars/nginx_workbench2_configuration.sls
tools/salt-install/config_examples/multi_host/aws/pillars/nginx_workbench_configuration.sls
tools/salt-install/config_examples/multi_host/aws/pillars/prometheus_server.sls
tools/salt-install/installer.sh
tools/salt-install/local.params.example.multiple_hosts
tools/salt-install/provision.sh

index dc9043217ed20bdef72c17546e4072cd485fef9b..3597fff5b07ce08e4bb93219b0af150fad395104 100644 (file)
@@ -5,14 +5,18 @@ Add the certificates for your hosts in this directory.
 
 The nodes requiring certificates are:
 
-* CLUSTER.DOMAIN
-* collections.CLUSTER.DOMAIN
-* \*.collections.CLUSTER.DOMAIN
-* download.CLUSTER.DOMAIN
-* keep.CLUSTER.DOMAIN
-* workbench.CLUSTER.DOMAIN
-* workbench2.CLUSTER.DOMAIN
-* ws.CLUSTER.DOMAIN
+* DOMAIN
+* collections.DOMAIN
+* controller.DOMAIN
+* \*.collections.DOMAIN
+* grafana.DOMAIN
+* download.DOMAIN
+* keep.DOMAIN
+* prometheus.DOMAIN
+* shell.DOMAIN
+* workbench.DOMAIN
+* workbench2.DOMAIN
+* ws.DOMAIN
 
 They can be individual certificates or a wildcard certificate for all of them.
 
index f181c874d2989a9fe378d850849f9e676e119909..ef5a91b270d074fe2064d04808ebf23d02efc4d5 100644 (file)
@@ -84,7 +84,7 @@ arvados:
     resources:
       virtual_machines:
         shell:
-          name: shell.__CLUSTER__.__DOMAIN__
+          name: shell.__DOMAIN__
           backend: __SHELL_INT_IP__
           port: 4200
 
@@ -158,7 +158,7 @@ arvados:
 
     Services:
       Controller:
-        ExternalURL: 'https://__CLUSTER__.__DOMAIN__:__CONTROLLER_EXT_SSL_PORT__'
+        ExternalURL: 'https://__DOMAIN__:__CONTROLLER_EXT_SSL_PORT__'
         InternalURLs:
           'http://localhost:8003': {}
       DispatchCloud:
@@ -168,7 +168,7 @@ arvados:
         InternalURLs:
           'http://__CONTROLLER_INT_IP__:9005': {}
       Keepproxy:
-        ExternalURL: 'https://keep.__CLUSTER__.__DOMAIN__:__KEEP_EXT_SSL_PORT__'
+        ExternalURL: 'https://keep.__DOMAIN__:__KEEP_EXT_SSL_PORT__'
         InternalURLs:
           'http://localhost:25107': {}
       Keepstore:
@@ -178,21 +178,21 @@ arvados:
         InternalURLs:
           'http://localhost:8004': {}
       WebDAV:
-        ExternalURL: 'https://*.collections.__CLUSTER__.__DOMAIN__:__KEEPWEB_EXT_SSL_PORT__/'
+        ExternalURL: 'https://*.collections.__DOMAIN__:__KEEPWEB_EXT_SSL_PORT__/'
         InternalURLs:
           'http://__KEEPWEB_INT_IP__:9002': {}
       WebDAVDownload:
-        ExternalURL: 'https://download.__CLUSTER__.__DOMAIN__:__KEEPWEB_EXT_SSL_PORT__'
+        ExternalURL: 'https://download.__DOMAIN__:__KEEPWEB_EXT_SSL_PORT__'
       WebShell:
-        ExternalURL: 'https://webshell.__CLUSTER__.__DOMAIN__:__KEEPWEB_EXT_SSL_PORT__'
+        ExternalURL: 'https://webshell.__DOMAIN__:__KEEPWEB_EXT_SSL_PORT__'
       Websocket:
-        ExternalURL: 'wss://ws.__CLUSTER__.__DOMAIN__/websocket'
+        ExternalURL: 'wss://ws.__DOMAIN__/websocket'
         InternalURLs:
           'http://localhost:8005': {}
       Workbench1:
-        ExternalURL: 'https://workbench.__CLUSTER__.__DOMAIN__:__WORKBENCH1_EXT_SSL_PORT__'
+        ExternalURL: 'https://workbench.__DOMAIN__:__WORKBENCH1_EXT_SSL_PORT__'
       Workbench2:
-        ExternalURL: 'https://workbench2.__CLUSTER__.__DOMAIN__:__WORKBENCH2_EXT_SSL_PORT__'
+        ExternalURL: 'https://workbench2.__DOMAIN__:__WORKBENCH2_EXT_SSL_PORT__'
 
     InstanceTypes:
       t3small:
index 1cdff39a62b5ac364050b4746bc2ebfca242a32b..b46615609777cebe6c92dd81a4a6ae29d98fdc3e 100644 (file)
@@ -17,7 +17,7 @@ grafana:
         - pkg: grafana
   config:
     default:
-      instance_name: __CLUSTER__.__DOMAIN__
+      instance_name: __DOMAIN__
     security:
       admin_user: {{ "__MONITORING_USERNAME__" | yaml_dquote }}
       admin_password: {{ "__MONITORING_PASSWORD__" | yaml_dquote }}
@@ -26,5 +26,5 @@ grafana:
       protocol: http
       http_addr: 127.0.0.1
       http_port: 3000
-      domain: grafana.__CLUSTER__.__DOMAIN__
-      root_url: https://grafana.__CLUSTER__.__DOMAIN__
+      domain: grafana.__DOMAIN__
+      root_url: https://grafana.__DOMAIN__
index 1f088a8a7d8b670902a20c68bf63310e9e0ea81a..d0ecb54df694bdce1de5f48ff929c07ce18968ab 100644 (file)
@@ -6,5 +6,5 @@
 ### LETSENCRYPT
 letsencrypt:
   domainsets:
-    controller.__CLUSTER__.__DOMAIN__:
-      - __CLUSTER__.__DOMAIN__
+    controller.__DOMAIN__:
+      - __DOMAIN__
index 60a4c315d84efb5dfccf7eb7502b31f51d1577d8..c92a962be7362d73b65752c999cb3abf5c16c499 100644 (file)
@@ -6,5 +6,5 @@
 ### LETSENCRYPT
 letsencrypt:
   domainsets:
-    grafana.__CLUSTER__.__DOMAIN__:
-      - grafana.__CLUSTER__.__DOMAIN__
+    grafana.__DOMAIN__:
+      - grafana.__DOMAIN__
index b2945e611f44de3f85a16c46f834b72a7cf45e79..c174386a5a0f878c57c2ec9fff267c7416a083c5 100644 (file)
@@ -6,5 +6,5 @@
 ### LETSENCRYPT
 letsencrypt:
   domainsets:
-    keepproxy.__CLUSTER__.__DOMAIN__:
-      - keep.__CLUSTER__.__DOMAIN__
+    keepproxy.__DOMAIN__:
+      - keep.__DOMAIN__
index f95d7e619d4cb7971dc73026c75a9a35f08ba8d0..f77d17c877274c7aab4f315077dc7d1bf4fc2d99 100644 (file)
@@ -6,8 +6,8 @@
 ### LETSENCRYPT
 letsencrypt:
   domainsets:
-    download.__CLUSTER__.__DOMAIN__:
-      - download.__CLUSTER__.__DOMAIN__
-    collections.__CLUSTER__.__DOMAIN__:
-      - collections.__CLUSTER__.__DOMAIN__
-      - '*.collections.__CLUSTER__.__DOMAIN__'
+    download.__DOMAIN__:
+      - download.__DOMAIN__
+    collections.__DOMAIN__:
+      - collections.__DOMAIN__
+      - '*.collections.__DOMAIN__'
index 7b1165d6dfefd172cb381e7a474c75f7a755ad7d..a352bc2137db7e59b6fe21d1f1c2317a81e729b2 100644 (file)
@@ -6,5 +6,5 @@
 ### LETSENCRYPT
 letsencrypt:
   domainsets:
-    prometheus.__CLUSTER__.__DOMAIN__:
-      - prometheus.__CLUSTER__.__DOMAIN__
+    prometheus.__DOMAIN__:
+      - prometheus.__DOMAIN__
index 17e6422f420f0aad181695b0c40cd18a27d3a28f..538719f7f35e4c42cb80642fa2235ca00357da2f 100644 (file)
@@ -6,5 +6,5 @@
 ### LETSENCRYPT
 letsencrypt:
   domainsets:
-    webshell.__CLUSTER__.__DOMAIN__:
-      - webshell.__CLUSTER__.__DOMAIN__
+    webshell.__DOMAIN__:
+      - webshell.__DOMAIN__
index 6515b3bd0b38e4420a801d1a251ddb37fc153907..f4d2227611d5f47aa3825ad3417f2592024ccf9b 100644 (file)
@@ -6,5 +6,5 @@
 ### LETSENCRYPT
 letsencrypt:
   domainsets:
-    websocket.__CLUSTER__.__DOMAIN__:
-      - ws.__CLUSTER__.__DOMAIN__
+    websocket.__DOMAIN__:
+      - ws.__DOMAIN__
index 2bcf2b7841e5fd553a1370d1be34e59e8e230c83..0ea0179a281db8841d554e7fde142280d041792b 100644 (file)
@@ -6,5 +6,5 @@
 ### LETSENCRYPT
 letsencrypt:
   domainsets:
-    workbench2.__CLUSTER__.__DOMAIN__:
-      - workbench2.__CLUSTER__.__DOMAIN__
+    workbench2.__DOMAIN__:
+      - workbench2.__DOMAIN__
index 9ef348719423c21ab29ea1895ce4fb8db157bf16..cfff3ea8fcf26d04057b8df7620e9e21169969db 100644 (file)
@@ -6,5 +6,5 @@
 ### LETSENCRYPT
 letsencrypt:
   domainsets:
-    workbench.__CLUSTER__.__DOMAIN__:
-      - workbench.__CLUSTER__.__DOMAIN__
+    workbench.__DOMAIN__:
+      - workbench.__DOMAIN__
index 9fbf90dd2c478b0ebf64be603a6e6511f468cf10..bfe0386e9316fe848bccf5e775d452c1462e653c 100644 (file)
@@ -22,7 +22,7 @@ nginx:
             - server_name: api
             - root: /var/www/arvados-api/current/public
             - index:  index.html index.htm
-            - access_log: /var/log/nginx/api.__CLUSTER__.__DOMAIN__-upstream.access.log combined
-            - error_log: /var/log/nginx/api.__CLUSTER__.__DOMAIN__-upstream.error.log
+            - access_log: /var/log/nginx/api.__DOMAIN__-upstream.access.log combined
+            - error_log: /var/log/nginx/api.__DOMAIN__-upstream.error.log
             - passenger_enabled: 'on'
             - client_max_body_size: 128m
index b349ded3281ac9acc3e52b733cff79b5c3a518be..1c10847f76a9b199894d45e70c0da3cda6dfb4a8 100644 (file)
@@ -15,7 +15,7 @@ nginx:
         overwrite: true
         config:
           - server:
-            - server_name: '~^(.*\.)?collections\.__CLUSTER__\.__DOMAIN__'
+            - server_name: '~^(.*\.)?collections\.__DOMAIN__'
             - listen:
               - 80
             - location /:
@@ -29,7 +29,7 @@ nginx:
           __CERT_REQUIRES__
         config:
           - server:
-            - server_name: '~^(.*\.)?collections\.__CLUSTER__\.__DOMAIN__'
+            - server_name: '~^(.*\.)?collections\.__DOMAIN__'
             - listen:
               - __KEEPWEB_EXT_SSL_PORT__ http2 ssl
             - index: index.html index.htm
@@ -52,5 +52,5 @@ nginx:
             {%- if ssl_key_encrypted_pillar.ssl_key_encrypted.enabled %}
             - ssl_password_file: {{ '/run/arvados/' | path_join(ssl_key_encrypted_pillar.ssl_key_encrypted.privkey_password_filename) }}
             {%- endif %}
-            - access_log: /var/log/nginx/collections.__CLUSTER__.__DOMAIN__.access.log combined
-            - error_log: /var/log/nginx/collections.__CLUSTER__.__DOMAIN__.error.log
+            - access_log: /var/log/nginx/collections.__DOMAIN__.access.log combined
+            - error_log: /var/log/nginx/collections.__DOMAIN__.error.log
index a48810e833cded5703adfcabe67104c5526e494f..d0fd6a1312de90a0ec52aa6cd393362e2797172b 100644 (file)
@@ -28,7 +28,7 @@ nginx:
         overwrite: true
         config:
           - server:
-            - server_name: __CLUSTER__.__DOMAIN__
+            - server_name: __DOMAIN__
             - listen:
               - 80 default
             - location /.well-known:
@@ -43,7 +43,7 @@ nginx:
           __CERT_REQUIRES__
         config:
           - server:
-            - server_name: __CLUSTER__.__DOMAIN__
+            - server_name: __DOMAIN__
             - listen:
               - __CONTROLLER_EXT_SSL_PORT__ http2 ssl
             - index: index.html index.htm
@@ -69,6 +69,6 @@ nginx:
             {%- if ssl_key_encrypted_pillar.ssl_key_encrypted.enabled %}
             - ssl_password_file: {{ '/run/arvados/' | path_join(ssl_key_encrypted_pillar.ssl_key_encrypted.privkey_password_filename) }}
             {%- endif %}
-            - access_log: /var/log/nginx/controller.__CLUSTER__.__DOMAIN__.access.log combined
-            - error_log: /var/log/nginx/controller.__CLUSTER__.__DOMAIN__.error.log
+            - access_log: /var/log/nginx/controller.__DOMAIN__.access.log combined
+            - error_log: /var/log/nginx/controller.__DOMAIN__.error.log
             - client_max_body_size: 128m
index a183475a461a65ab769758ab7d1a8b252c2508fb..4470a388a951ce09730f3573762b70dc3e6e7625 100644 (file)
@@ -15,7 +15,7 @@ nginx:
         overwrite: true
         config:
           - server:
-            - server_name: download.__CLUSTER__.__DOMAIN__
+            - server_name: download.__DOMAIN__
             - listen:
               - 80
             - location /:
@@ -29,7 +29,7 @@ nginx:
           __CERT_REQUIRES__
         config:
           - server:
-            - server_name: download.__CLUSTER__.__DOMAIN__
+            - server_name: download.__DOMAIN__
             - listen:
               - __KEEPWEB_EXT_SSL_PORT__ http2 ssl
             - index: index.html index.htm
@@ -52,5 +52,5 @@ nginx:
             {%- if ssl_key_encrypted_pillar.ssl_key_encrypted.enabled %}
             - ssl_password_file: {{ '/run/arvados/' | path_join(ssl_key_encrypted_pillar.ssl_key_encrypted.privkey_password_filename) }}
             {%- endif %}
-            - access_log: /var/log/nginx/download.__CLUSTER__.__DOMAIN__.access.log combined
-            - error_log: /var/log/nginx/download.__CLUSTER__.__DOMAIN__.error.log
+            - access_log: /var/log/nginx/download.__DOMAIN__.access.log combined
+            - error_log: /var/log/nginx/download.__DOMAIN__.error.log
index e306dbd0c741df280c0d10e5da569a1a024491ed..9e1d72615012d12c1836dedd41a1cf41babba8d0 100644 (file)
@@ -24,7 +24,7 @@ nginx:
         overwrite: true
         config:
           - server:
-            - server_name: grafana.__CLUSTER__.__DOMAIN__
+            - server_name: grafana.__DOMAIN__
             - listen:
               - 80
             - location /.well-known:
@@ -39,7 +39,7 @@ nginx:
           __CERT_REQUIRES__
         config:
           - server:
-            - server_name: grafana.__CLUSTER__.__DOMAIN__
+            - server_name: grafana.__DOMAIN__
             - listen:
               - 443 http2 ssl
             - index: index.html index.htm
@@ -58,5 +58,5 @@ nginx:
             {%- if ssl_key_encrypted_pillar.ssl_key_encrypted.enabled %}
             - ssl_password_file: {{ '/run/arvados/' | path_join(ssl_key_encrypted_pillar.ssl_key_encrypted.privkey_password_filename) }}
             {%- endif %}
-            - access_log: /var/log/nginx/grafana.__CLUSTER__.__DOMAIN__.access.log combined
-            - error_log: /var/log/nginx/grafana.__CLUSTER__.__DOMAIN__.error.log
+            - access_log: /var/log/nginx/grafana.__DOMAIN__.access.log combined
+            - error_log: /var/log/nginx/grafana.__DOMAIN__.error.log
index c8deaebe97c26ea15ec9a43cfd8cb5a06ed78627..63c318fc2487f76f51b5f5f5a8b274158c663b5d 100644 (file)
@@ -23,7 +23,7 @@ nginx:
         overwrite: true
         config:
           - server:
-            - server_name: keep.__CLUSTER__.__DOMAIN__
+            - server_name: keep.__DOMAIN__
             - listen:
               - 80
             - location /:
@@ -36,7 +36,7 @@ nginx:
           __CERT_REQUIRES__
         config:
           - server:
-            - server_name: keep.__CLUSTER__.__DOMAIN__
+            - server_name: keep.__DOMAIN__
             - listen:
               - __KEEP_EXT_SSL_PORT__ http2 ssl
             - index: index.html index.htm
@@ -60,5 +60,5 @@ nginx:
             {%- if ssl_key_encrypted_pillar.ssl_key_encrypted.enabled %}
             - ssl_password_file: {{ '/run/arvados/' | path_join(ssl_key_encrypted_pillar.ssl_key_encrypted.privkey_password_filename) }}
             {%- endif %}
-            - access_log: /var/log/nginx/keepproxy.__CLUSTER__.__DOMAIN__.access.log combined
-            - error_log: /var/log/nginx/keepproxy.__CLUSTER__.__DOMAIN__.error.log
+            - access_log: /var/log/nginx/keepproxy.__DOMAIN__.access.log combined
+            - error_log: /var/log/nginx/keepproxy.__DOMAIN__.error.log
index d654d6ed0bd4368c2a2f7da617a6d6a1eb687319..5e82a9a4bd025eabd62828e849ff0805b4942207 100644 (file)
@@ -24,7 +24,7 @@ nginx:
         overwrite: true
         config:
           - server:
-            - server_name: prometheus.__CLUSTER__.__DOMAIN__
+            - server_name: prometheus.__DOMAIN__
             - listen:
               - 80
             - location /.well-known:
@@ -39,7 +39,7 @@ nginx:
           __CERT_REQUIRES__
         config:
           - server:
-            - server_name: prometheus.__CLUSTER__.__DOMAIN__
+            - server_name: prometheus.__DOMAIN__
             - listen:
               - 443 http2 ssl
             - index: index.html index.htm
@@ -60,5 +60,5 @@ nginx:
             {%- endif %}
             - auth_basic: '"Restricted Area"'
             - auth_basic_user_file: htpasswd
-            - access_log: /var/log/nginx/prometheus.__CLUSTER__.__DOMAIN__.access.log combined
-            - error_log: /var/log/nginx/prometheus.__CLUSTER__.__DOMAIN__.error.log
+            - access_log: /var/log/nginx/prometheus.__DOMAIN__.access.log combined
+            - error_log: /var/log/nginx/prometheus.__DOMAIN__.error.log
index 3a0a23d95f31b6e27a2b43f2c8934f0c67d5aa92..41471ab7a335e24d67f034715472109cec5d439f 100644 (file)
@@ -14,7 +14,7 @@ nginx:
       ### STREAMS
       http:
         upstream webshell_upstream:
-          - server: 'shell.__CLUSTER__.__DOMAIN__:4200 fail_timeout=10s'
+          - server: 'shell.__DOMAIN__:4200 fail_timeout=10s'
 
   ### SITES
   servers:
@@ -24,7 +24,7 @@ nginx:
         overwrite: true
         config:
           - server:
-            - server_name: webshell.__CLUSTER__.__DOMAIN__
+            - server_name: webshell.__DOMAIN__
             - listen:
               - 80
             - location /:
@@ -37,11 +37,11 @@ nginx:
           __CERT_REQUIRES__
         config:
           - server:
-            - server_name: webshell.__CLUSTER__.__DOMAIN__
+            - server_name: webshell.__DOMAIN__
             - listen:
               - __WEBSHELL_EXT_SSL_PORT__ http2 ssl
             - index: index.html index.htm
-            - location /shell.__CLUSTER__.__DOMAIN__:
+            - location /shell.__DOMAIN__:
               - proxy_pass: 'http://webshell_upstream'
               - proxy_read_timeout: 90
               - proxy_connect_timeout: 90
@@ -76,6 +76,6 @@ nginx:
             {%- if ssl_key_encrypted_pillar.ssl_key_encrypted.enabled %}
             - ssl_password_file: {{ '/run/arvados/' | path_join(ssl_key_encrypted_pillar.ssl_key_encrypted.privkey_password_filename) }}
             {%- endif %}
-            - access_log: /var/log/nginx/webshell.__CLUSTER__.__DOMAIN__.access.log combined
-            - error_log: /var/log/nginx/webshell.__CLUSTER__.__DOMAIN__.error.log
+            - access_log: /var/log/nginx/webshell.__DOMAIN__.access.log combined
+            - error_log: /var/log/nginx/webshell.__DOMAIN__.error.log
 
index 36246d751de5e0a10dbd21d5892259b1f9b4b6e9..f80eeb96b6cb3b379bdd0f3615a5a008125b2069 100644 (file)
@@ -23,7 +23,7 @@ nginx:
         overwrite: true
         config:
           - server:
-            - server_name: ws.__CLUSTER__.__DOMAIN__
+            - server_name: ws.__DOMAIN__
             - listen:
               - 80
             - location /:
@@ -36,7 +36,7 @@ nginx:
           __CERT_REQUIRES__
         config:
           - server:
-            - server_name: ws.__CLUSTER__.__DOMAIN__
+            - server_name: ws.__DOMAIN__
             - listen:
               - __CONTROLLER_EXT_SSL_PORT__ http2 ssl
             - index: index.html index.htm
@@ -61,5 +61,5 @@ nginx:
             {%- if ssl_key_encrypted_pillar.ssl_key_encrypted.enabled %}
             - ssl_password_file: {{ '/run/arvados/' | path_join(ssl_key_encrypted_pillar.ssl_key_encrypted.privkey_password_filename) }}
             {%- endif %}
-            - access_log: /var/log/nginx/ws.__CLUSTER__.__DOMAIN__.access.log combined
-            - error_log: /var/log/nginx/ws.__CLUSTER__.__DOMAIN__.error.log
+            - access_log: /var/log/nginx/ws.__DOMAIN__.access.log combined
+            - error_log: /var/log/nginx/ws.__DOMAIN__.error.log
index 47eafeeece9699e3de228d1e578c39751b3da53d..629910eb8a5c112bcf06faf4106e69415232c97c 100644 (file)
@@ -21,7 +21,7 @@ nginx:
         overwrite: true
         config:
           - server:
-            - server_name: workbench2.__CLUSTER__.__DOMAIN__
+            - server_name: workbench2.__DOMAIN__
             - listen:
               - 80
             - location /:
@@ -34,7 +34,7 @@ nginx:
           __CERT_REQUIRES__
         config:
           - server:
-            - server_name: workbench2.__CLUSTER__.__DOMAIN__
+            - server_name: workbench2.__DOMAIN__
             - listen:
               - __CONTROLLER_EXT_SSL_PORT__ http2 ssl
             - index: index.html index.htm
@@ -44,12 +44,12 @@ nginx:
               - 'if (-f $document_root/maintenance.html)':
                 - return: 503
             - location /config.json:
-              - return: {{ "200 '" ~ '{"API_HOST":"__CLUSTER__.__DOMAIN__:__CONTROLLER_EXT_SSL_PORT__"}' ~ "'" }}
+              - return: {{ "200 '" ~ '{"API_HOST":"__DOMAIN__:__CONTROLLER_EXT_SSL_PORT__"}' ~ "'" }}
             - include: snippets/ssl_hardening_default.conf
             - ssl_certificate: __CERT_PEM__
             - ssl_certificate_key: __CERT_KEY__
             {%- if ssl_key_encrypted_pillar.ssl_key_encrypted.enabled %}
             - ssl_password_file: {{ '/run/arvados/' | path_join(ssl_key_encrypted_pillar.ssl_key_encrypted.privkey_password_filename) }}
             {%- endif %}
-            - access_log: /var/log/nginx/workbench2.__CLUSTER__.__DOMAIN__.access.log combined
-            - error_log: /var/log/nginx/workbench2.__CLUSTER__.__DOMAIN__.error.log
+            - access_log: /var/log/nginx/workbench2.__DOMAIN__.access.log combined
+            - error_log: /var/log/nginx/workbench2.__DOMAIN__.error.log
index 82fd24756de7ce1c307adb2e3c975ec329b0f38a..013be704c84590d1c7ea02ad7a5a6757a1a247f6 100644 (file)
@@ -30,7 +30,7 @@ nginx:
         overwrite: true
         config:
           - server:
-            - server_name: workbench.__CLUSTER__.__DOMAIN__
+            - server_name: workbench.__DOMAIN__
             - listen:
               - 80
             - location /:
@@ -43,7 +43,7 @@ nginx:
           __CERT_REQUIRES__
         config:
           - server:
-            - server_name: workbench.__CLUSTER__.__DOMAIN__
+            - server_name: workbench.__DOMAIN__
             - listen:
               - __CONTROLLER_EXT_SSL_PORT__ http2 ssl
             - index: index.html index.htm
@@ -62,8 +62,8 @@ nginx:
             {%- if ssl_key_encrypted_pillar.ssl_key_encrypted.enabled %}
             - ssl_password_file: {{ '/run/arvados/' | path_join(ssl_key_encrypted_pillar.ssl_key_encrypted.privkey_password_filename) }}
             {%- endif %}
-            - access_log: /var/log/nginx/workbench.__CLUSTER__.__DOMAIN__.access.log combined
-            - error_log: /var/log/nginx/workbench.__CLUSTER__.__DOMAIN__.error.log
+            - access_log: /var/log/nginx/workbench.__DOMAIN__.access.log combined
+            - error_log: /var/log/nginx/workbench.__DOMAIN__.error.log
 
       arvados_workbench_upstream:
         enabled: true
@@ -76,5 +76,5 @@ nginx:
             - index:  index.html index.htm
             - passenger_enabled: 'on'
             # yamllint disable-line rule:line-length
-            - access_log: /var/log/nginx/workbench.__CLUSTER__.__DOMAIN__-upstream.access.log combined
-            - error_log: /var/log/nginx/workbench.__CLUSTER__.__DOMAIN__-upstream.error.log
+            - access_log: /var/log/nginx/workbench.__DOMAIN__-upstream.access.log combined
+            - error_log: /var/log/nginx/workbench.__DOMAIN__-upstream.error.log
index 7b4a09f50c609e5430ee964070250720d87552b7..bbf997b7be364700372d55b308ad527b7a7a9aae 100644 (file)
@@ -36,7 +36,7 @@ prometheus:
               bearer_token: __MANAGEMENT_TOKEN__
               scheme: https
               static_configs:
-                - targets: ['ws.__CLUSTER__.__DOMAIN__:443']
+                - targets: ['ws.__DOMAIN__:443']
                   labels:
                     instance: ws.__CLUSTER__
                     cluster: __CLUSTER__
@@ -44,7 +44,7 @@ prometheus:
               bearer_token: __MANAGEMENT_TOKEN__
               scheme: https
               static_configs:
-                - targets: ['__CLUSTER__.__DOMAIN__:443']
+                - targets: ['__DOMAIN__:443']
                   labels:
                     instance: controller.__CLUSTER__
                     cluster: __CLUSTER__
@@ -52,7 +52,7 @@ prometheus:
               bearer_token: __MANAGEMENT_TOKEN__
               scheme: https
               static_configs:
-                - targets: ['keep.__CLUSTER__.__DOMAIN__:443']
+                - targets: ['keep.__DOMAIN__:443']
                   labels:
                     instance: keep-web.__CLUSTER__
                     cluster: __CLUSTER__
@@ -98,7 +98,7 @@ prometheus:
                   'workbench',
                   'shell',
                 ] %}
-                - targets: [ "{{ node }}.__CLUSTER__.__DOMAIN__:9100" ]
+                - targets: [ "{{ node }}.__DOMAIN__:9100" ]
                   labels:
                     instance: "{{ node }}.__CLUSTER__"
                     cluster: __CLUSTER__
index 5a55e337da3248e22c335f5ac7d9be3e020d9ae6..104ce3a60269015611a440b891f7bea27045420e 100755 (executable)
@@ -338,7 +338,7 @@ case "$subcmd" in
            exit 1
        fi
 
-       export ARVADOS_API_HOST="${CLUSTER}.${DOMAIN}:${CONTROLLER_EXT_SSL_PORT}"
+       export ARVADOS_API_HOST="${DOMAIN}:${CONTROLLER_EXT_SSL_PORT}"
        export ARVADOS_API_TOKEN="$SYSTEM_ROOT_TOKEN"
 
        arvados-client diagnostics $LOCATION
index fd1919e0cc12e9b5912c5dc180d3336a8f2b20e5..463ee4c10168e28cd71da79aa8a9aac375df5c10 100644 (file)
@@ -8,8 +8,8 @@
 # The Arvados cluster ID, needs to be 5 lowercase alphanumeric characters.
 CLUSTER="cluster_fixme_or_this_wont_work"
 
-# The domain name you want to give to your cluster's hosts
-# the end result hostnames will be $SERVICE.$CLUSTER.$DOMAIN
+# The domain name you want to give to your cluster's hosts;
+# the end result hostnames will be $SERVICE.$DOMAIN
 DOMAIN="domain_fixme_or_this_wont_work"
 
 # For multi-node installs, the ssh log in for each node
@@ -19,7 +19,7 @@ DEPLOY_USER=admin
 INITIAL_USER=admin
 
 # If not specified, the initial user email will be composed as
-# INITIAL_USER@CLUSTER.DOMAIN
+# INITIAL_USER@DOMAIN
 INITIAL_USER_EMAIL="admin@cluster_fixme_or_this_wont_work.domain_fixme_or_this_wont_work"
 INITIAL_USER_PASSWORD="fixmepassword"
 
@@ -27,7 +27,7 @@ INITIAL_USER_PASSWORD="fixmepassword"
 # installer from the outside of the cluster's local network and still reach
 # the internal servers for configuration deployment.
 # Comment out to disable.
-USE_SSH_JUMPHOST="controller.${CLUSTER}.${DOMAIN}"
+USE_SSH_JUMPHOST="controller.${DOMAIN}"
 
 # YOU SHOULD CHANGE THESE TO SOME RANDOM STRINGS
 BLOB_SIGNING_KEY=fixmeblobsigningkeymushaveatleast32characters
@@ -96,10 +96,10 @@ MONITORING_EMAIL=${INITIAL_USER_EMAIL}
 # installer.sh will log in to each of these nodes and then provision
 # it for the specified roles.
 NODES=(
-  [controller.${CLUSTER}.${DOMAIN}]=database,api,controller,websocket,dispatcher,keepbalance
-  [workbench.${CLUSTER}.${DOMAIN}]=monitoring,workbench,workbench2,webshell,keepproxy,keepweb
-  [keep0.${CLUSTER}.${DOMAIN}]=keepstore
-  [shell.${CLUSTER}.${DOMAIN}]=shell
+  [controller.${DOMAIN}]=database,api,controller,websocket,dispatcher,keepbalance
+  [workbench.${DOMAIN}]=monitoring,workbench,workbench2,webshell,keepproxy,keepweb
+  [keep0.${DOMAIN}]=keepstore
+  [shell.${DOMAIN}]=shell
 )
 
 # Host SSL port where you want to point your browser to access Arvados
index 4f044c42e9e2c323ed0289ed4bd947f9ee46546e..9824b2b827c9a814409e3fd7eba3472d04959619 100755 (executable)
@@ -287,7 +287,7 @@ else
   USE_SINGLE_HOSTNAME="no"
   # We set this variable, anyway, so sed lines do not fail and we don't need to add more
   # conditionals
-  HOSTNAME_EXT="${CLUSTER}.${DOMAIN}"
+  HOSTNAME_EXT="${DOMAIN}"
 fi
 
 if [ "${DUMP_CONFIG}" = "yes" ]; then
@@ -651,7 +651,7 @@ if [ -z "${ROLES}" ]; then
         CERT_NAME=${HOSTNAME_EXT}
       else
         # We are in a multiple-hostnames env
-        CERT_NAME=${c}.${CLUSTER}.${DOMAIN}
+        CERT_NAME=${c}.${DOMAIN}
       fi
 
       # As the pillar differs whether we use LE or custom certs, we need to do a final edition on them
@@ -763,9 +763,9 @@ else
           grep -q "letsencrypt"     ${P_DIR}/top.sls || echo "    - letsencrypt" >> ${P_DIR}/top.sls
           for SVC in grafana prometheus; do
             grep -q "letsencrypt_${SVC}_configuration" ${P_DIR}/top.sls || echo "    - letsencrypt_${SVC}_configuration" >> ${P_DIR}/top.sls
-            sed -i "s/__CERT_REQUIRES__/cmd: create-initial-cert-${SVC}.${CLUSTER}.${DOMAIN}*/g;
-                    s#__CERT_PEM__#/etc/letsencrypt/live/${SVC}.${CLUSTER}.${DOMAIN}/fullchain.pem#g;
-                    s#__CERT_KEY__#/etc/letsencrypt/live/${SVC}.${CLUSTER}.${DOMAIN}/privkey.pem#g" \
+            sed -i "s/__CERT_REQUIRES__/cmd: create-initial-cert-${SVC}.${DOMAIN}*/g;
+                    s#__CERT_PEM__#/etc/letsencrypt/live/${SVC}.${DOMAIN}/fullchain.pem#g;
+                    s#__CERT_KEY__#/etc/letsencrypt/live/${SVC}.${DOMAIN}/privkey.pem#g" \
             ${P_DIR}/nginx_${SVC}_configuration.sls
           done
           if [ "${USE_LETSENCRYPT_ROUTE53}" = "yes" ]; then
@@ -875,15 +875,15 @@ else
           # Special case for keepweb
           if [ ${R} = "keepweb" ]; then
             for kwsub in download collections; do
-              sed -i "s/__CERT_REQUIRES__/cmd: create-initial-cert-${kwsub}.${CLUSTER}.${DOMAIN}*/g;
-                      s#__CERT_PEM__#/etc/letsencrypt/live/${kwsub}.${CLUSTER}.${DOMAIN}/fullchain.pem#g;
-                      s#__CERT_KEY__#/etc/letsencrypt/live/${kwsub}.${CLUSTER}.${DOMAIN}/privkey.pem#g" \
+              sed -i "s/__CERT_REQUIRES__/cmd: create-initial-cert-${kwsub}.${DOMAIN}*/g;
+                      s#__CERT_PEM__#/etc/letsencrypt/live/${kwsub}.${DOMAIN}/fullchain.pem#g;
+                      s#__CERT_KEY__#/etc/letsencrypt/live/${kwsub}.${DOMAIN}/privkey.pem#g" \
               ${P_DIR}/nginx_${kwsub}_configuration.sls
             done
           else
-            sed -i "s/__CERT_REQUIRES__/cmd: create-initial-cert-${R}.${CLUSTER}.${DOMAIN}*/g;
-                    s#__CERT_PEM__#/etc/letsencrypt/live/${R}.${CLUSTER}.${DOMAIN}/fullchain.pem#g;
-                    s#__CERT_KEY__#/etc/letsencrypt/live/${R}.${CLUSTER}.${DOMAIN}/privkey.pem#g" \
+            sed -i "s/__CERT_REQUIRES__/cmd: create-initial-cert-${R}.${DOMAIN}*/g;
+                    s#__CERT_PEM__#/etc/letsencrypt/live/${R}.${DOMAIN}/fullchain.pem#g;
+                    s#__CERT_KEY__#/etc/letsencrypt/live/${R}.${DOMAIN}/privkey.pem#g" \
             ${P_DIR}/nginx_${R}_configuration.sls
           fi
         else
@@ -948,11 +948,11 @@ fi
 
 # Leave a copy of the Arvados CA so the user can copy it where it's required
 if [ "${SSL_MODE}" = "self-signed" ]; then
-  echo "Copying the Arvados CA certificate '${CLUSTER}.${DOMAIN}-arvados-snakeoil-ca.crt' to the installer dir, so you can import it"
+  echo "Copying the Arvados CA certificate '${DOMAIN}-arvados-snakeoil-ca.crt' to the installer dir, so you can import it"
   if [ "x${VAGRANT}" = "xyes" ]; then
-    cp /etc/ssl/certs/arvados-snakeoil-ca.pem /vagrant/${CLUSTER}.${DOMAIN}-arvados-snakeoil-ca.pem
+    cp /etc/ssl/certs/arvados-snakeoil-ca.pem /vagrant/${DOMAIN}-arvados-snakeoil-ca.pem
   else
-    cp /etc/ssl/certs/arvados-snakeoil-ca.pem ${SCRIPT_DIR}/${CLUSTER}.${DOMAIN}-arvados-snakeoil-ca.crt
+    cp /etc/ssl/certs/arvados-snakeoil-ca.pem ${SCRIPT_DIR}/${DOMAIN}-arvados-snakeoil-ca.crt
   fi
 fi