Merge branch '21606-keep-web-output-buffer'
authorTom Clegg <tom@curii.com>
Fri, 19 Apr 2024 18:11:30 +0000 (14:11 -0400)
committerTom Clegg <tom@curii.com>
Fri, 19 Apr 2024 18:11:30 +0000 (14:11 -0400)
closes #21606

Arvados-DCO-1.1-Signed-off-by: Tom Clegg <tom@curii.com>

24 files changed:
build/package-testing/rpm-common-test-packages.sh
build/rails-package-scripts/postinst.sh
build/run-library.sh
doc/admin/config-urls.html.textile.liquid
doc/install/install-api-server.html.textile.liquid
services/keep-web/server_test.go
services/workbench2/cypress/e2e/collection.cy.js
services/workbench2/cypress/e2e/sharing.cy.js
services/workbench2/package.json
services/workbench2/src/services/collection-service/collection-service.ts
services/workbench2/src/services/services.ts
services/workbench2/src/views-components/login-form/login-form.tsx
services/workbench2/src/views/login-panel/login-panel.tsx
services/workbench2/src/views/process-panel/process-io-card.tsx
services/workbench2/yarn.lock
tools/compute-images/scripts/base.sh
tools/salt-install/config_examples/multi_host/aws/pillars/logrotate.sls [new file with mode: 0644]
tools/salt-install/config_examples/multi_host/aws/pillars/nginx_api_configuration.sls
tools/salt-install/config_examples/single_host/multiple_hostnames/pillars/logrotate.sls [new file with mode: 0644]
tools/salt-install/config_examples/single_host/multiple_hostnames/pillars/nginx_api_configuration.sls
tools/salt-install/config_examples/single_host/single_hostname/pillars/logrotate.sls [new file with mode: 0644]
tools/salt-install/config_examples/single_host/single_hostname/pillars/nginx_api_configuration.sls
tools/salt-install/config_examples/single_host/single_hostname/pillars/nginx_workbench_configuration.sls
tools/salt-install/provision.sh

index cd41f1d920f9e787a3b5bd75fdfea5e6c83fd8ea..b6d7fec46876cd027ef1d34926d6563dd364cefc 100755 (executable)
@@ -30,6 +30,10 @@ diff "$ARV_PACKAGES_DIR/$1".{before,after} >"$ARV_PACKAGES_DIR/$1.diff" || true
 mkdir -p /tmp/opts
 cd /tmp/opts
 
+# Install other packages alongside to test for build id conflicts.
+# This line can be removed after we have test-provision-rocky8, #21426.
+microdnf --assumeyes install arvados-client arvados-server python3-arvados-python-client
+
 rpm2cpio $(ls -t "$ARV_PACKAGES_DIR/$1"-*.rpm | head -n1) | cpio -idm 2>/dev/null
 
 if [[ "$DEBUG" != "0" ]]; then
index e317f85aaff27ac246885c76263ed5365d75cbc2..17802a7b52b54f8e2614afb2059ebd2dbeef9855 100644 (file)
@@ -208,6 +208,18 @@ configure_version() {
   run_and_report "Running bundle install" \
       $COMMAND_PREFIX bin/bundle install --local --quiet
 
+  # As of April 2024/Bundler 2.4, for some reason `bundle install` skips
+  # zlib if it's already installed as a system-wide gem, which it often will
+  # be because arvados gems pull it in. If this happened, install it in the
+  # bundle manually as a workaround.
+  if ! $COMMAND_PREFIX bin/bundle info zlib >/dev/null 2>&1; then
+      local RUBY_VERSION="$($COMMAND_PREFIX ruby -e 'puts RUBY_VERSION')"
+      run_and_report "Adding zlib to bundle" \
+                     $COMMAND_PREFIX gem install \
+                     --install-dir="$SHARED_PATH/vendor_bundle/ruby/$RUBY_VERSION" \
+                     vendor/cache/zlib-*.gem
+  fi
+
   echo -n "Ensuring directory and file permissions ..."
   # Ensure correct ownership of a few files
   chown "$WWW_OWNER:" $RELEASE_PATH/config/environment.rb
index 6380e1cb19fafe18e2ea5034f88d9fc938abc07c..03d99b13274d233e8d3548a8631ded2566c3be6b 100755 (executable)
@@ -262,6 +262,13 @@ package_go_binary_worker() {
       binpath="$GOPATH/bin/linux_${target_arch}/${basename}"
     fi
 
+    case "$package_format" in
+        # As of April 2024 we package identical Go binaries under different
+        # packages and names. This upsets the build id database, so don't
+        # register ourselves there.
+        rpm) switches+=(--rpm-rpmbuild-define="_build_id_links none") ;;
+    esac
+
     systemd_unit="$WORKSPACE/${src_path}/${prog}.service"
     if [[ -e "${systemd_unit}" ]]; then
         switches+=(
index 3cf6e79722a4ae03b9f55b4b6fd8fd891fc34c03..7c998061fe20d7ae252e7463c39d87d641251ae8 100644 (file)
@@ -174,6 +174,7 @@ server {
   index  index.html index.htm index.php;
 
   passenger_enabled on;
+  passenger_preload_bundler on;
 
   # If you are using RVM, uncomment the line below.
   # If you're using system ruby, leave it commented out.
index 06f94a8a5f11f26329151fbe64f63a2d6d4c0589..8e4f347d49eed47acd51b18a3dcf617734290341 100644 (file)
@@ -30,7 +30,7 @@ h2(#dependencies). Install dependencies
 # "Install PostgreSQL":install-postgresql.html
 # "Install Ruby and Bundler":ruby.html
 # "Install nginx":nginx.html
-# "Install Phusion Passenger":https://www.phusionpassenger.com/library/walkthroughs/deploy/ruby/ownserver/nginx/oss/install_passenger_main.html
+# "Install Phusion Passenger":https://www.phusionpassenger.com/docs/tutorials/deploy_to_production/installations/oss/ownserver/ruby/nginx/
 
 h2(#database-setup). Set up database
 
@@ -178,6 +178,7 @@ server {
   index  index.html index.htm index.php;
 
   passenger_enabled on;
+  passenger_preload_bundler on;
 
   # <span class="userinput">If you are using RVM, uncomment the line below.</span>
   # <span class="userinput">If you're using system ruby, leave it commented out.</span>
index f79df2021213310f72ec3e2da37eada86eaab283..0308f949f4cbd0c4d3b47e6ab6e599100a0f03aa 100644 (file)
@@ -518,7 +518,7 @@ func (s *IntegrationSuite) TestMetrics(c *check.C) {
        allmetrics, err := ioutil.ReadAll(resp.Body)
        c.Check(err, check.IsNil)
 
-       c.Check(string(allmetrics), check.Matches, `(?ms).*\narvados_keepweb_download_apparent_backend_speed_bucket{size_range="0",le="1e\+06"} 4\n.*`)
+       c.Check(string(allmetrics), check.Matches, `(?ms).*\narvados_keepweb_download_apparent_backend_speed_bucket{size_range="0",le="\+Inf"} 4\n.*`)
        c.Check(string(allmetrics), check.Matches, `(?ms).*\narvados_keepweb_download_speed_bucket{size_range="0",le="\+Inf"} 4\n.*`)
        c.Check(string(allmetrics), check.Matches, `(?ms).*\narvados_keepweb_upload_speed_bucket{size_range="0",le="\+Inf"} 2\n.*`)
        c.Check(string(allmetrics), check.Matches, `(?ms).*\narvados_keepweb_upload_sync_delay_seconds_bucket{size_range="0",le="10"} 2\n.*`)
index 38a0c973fd87ee076ed67aac5d8d238afcbaac1d..20ecf11c09f5a57e27615b7aabfe73105c9a88a4 100644 (file)
@@ -143,7 +143,7 @@ describe("Collection panel tests", function () {
                 cy.get("[data-cy=name-field]").within(() => {
                     cy.get("input").type(" renamed");
                 });
-                cy.get("[data-cy=form-submit-btn]").click();
+                cy.get("[data-cy=form-submit-btn]").click({timeout: 10000});
             });
         cy.get("[data-cy=form-dialog]").should("not.exist");
         // Attempt to rename the collection with the duplicate name
index 05a7d470bf6e1b1d7ed048dc27c46c60d27cd9aa..4cb7e487853941773cd7494282430777bd4fdc52 100644 (file)
@@ -31,7 +31,7 @@ describe('Sharing tests', function () {
 
             cy.get('main').contains(sharedCollection.name).rightclick();
             cy.get('[data-cy=context-menu]').within(() => {
-                cy.contains('Share').click();
+                cy.contains('Share').click({ waitForAnimations: false });
             });
             cy.get('.sharing-dialog').within(() => {
                 cy.contains('Sharing URLs').click();
@@ -63,7 +63,7 @@ describe('Sharing tests', function () {
             cy.contains('Refresh').click();
             cy.get('main').contains(mySharedWritableProject.name).rightclick();
             cy.get('[data-cy=context-menu]').within(() => {
-                cy.contains('Share').click();
+                cy.contains('Share').click({ waitForAnimations: false });
             });
             cy.get('[id="select-permissions"]').as('selectPermissions');
             cy.get('@selectPermissions').click();
@@ -73,7 +73,7 @@ describe('Sharing tests', function () {
             cy.get('[role=tooltip]').click();
             cy.get('@sharingDialog').within(() => {
                 cy.get('[data-cy=add-invited-people]').click();
-                cy.contains('Close').click();
+                cy.contains('Close').click({ waitForAnimations: false });
             });
         });
 
@@ -84,14 +84,14 @@ describe('Sharing tests', function () {
             cy.contains('Refresh').click();
             cy.get('main').contains(mySharedReadonlyProject.name).rightclick();
             cy.get('[data-cy=context-menu]').within(() => {
-                cy.contains('Share').click();
+                cy.contains('Share').click({ waitForAnimations: false });
             });
             cy.get('.sharing-dialog').as('sharingDialog');
             cy.get('[data-cy=invite-people-field]').find('input').type(activeUser.user.email);
             cy.get('[role=tooltip]').click();
             cy.get('@sharingDialog').within(() => {
                 cy.get('[data-cy=add-invited-people]').click();
-                cy.contains('Close').click();
+                cy.contains('Close').click({ waitForAnimations: false });
             });
         });
 
@@ -117,7 +117,7 @@ describe('Sharing tests', function () {
                 // Test move to trash
                 cy.get('main').contains(mySharedWritableProject.name).rightclick();
                 cy.get('[data-cy=context-menu]').should('contain', 'Move to trash');
-                cy.get('[data-cy=context-menu]').contains('Move to trash').click();
+                cy.get('[data-cy=context-menu]').contains('Move to trash').click({ waitForAnimations: false });
 
                 // GUARD: Let's wait for the above removed project to disappear
                 // before continuing, to avoid intermittent failures.
@@ -161,7 +161,7 @@ describe('Sharing tests', function () {
             .then(function ([]) {
                 cy.loginAs(adminUser);
                 cy.get('[data-cy=project-panel]').contains(collName).rightclick();
-                cy.get('[data-cy=context-menu]').contains('Share').click();
+                cy.get('[data-cy=context-menu]').contains('Share').click({ waitForAnimations: false });
                 cy.get('button').get('[data-cy=add-invited-people]').should('be.disabled');
                 cy.get('[data-cy=invite-people-field] input').type('Anonymous');
                 cy.get('div[role=tooltip]').contains('anonymous').click();
index 94e35029c4d77778943ff08a8f43edd4c7dc9420..71dc4d7cee3aa1e55977ef67e62e4334b5c07166 100644 (file)
@@ -28,7 +28,7 @@
     "@types/react-window": "1.8.2",
     "@types/redux-form": "7.4.12",
     "@types/shell-escape": "^0.2.0",
-    "axios": "^0.21.1",
+    "axios": "^0.28.1",
     "bootstrap": "^5.3.2",
     "caniuse-lite": "1.0.30001606",
     "classnames": "2.2.6",
index e50e5ed35026403c6332865d6b897c32a01f5605..12d31d1678b7de140a2f5cb34c4849927e1a1f32 100644 (file)
@@ -3,7 +3,7 @@
 // SPDX-License-Identifier: AGPL-3.0
 
 import { CollectionResource, defaultCollectionSelectedFields } from "models/collection";
-import { AxiosInstance } from "axios";
+import { AxiosInstance, AxiosResponse } from "axios";
 import { CollectionFile, CollectionDirectory } from "models/collection-file";
 import { WebDAV } from "common/webdav";
 import { AuthService } from "../auth-service/auth-service";
@@ -20,6 +20,11 @@ type CollectionPartialUpdateOrCreate =
     | (Partial<CollectionResource> & Pick<CollectionResource, "uuid">)
     | (Partial<CollectionResource> & Pick<CollectionResource, "ownerUuid">);
 
+type ReplaceFilesPayload = {
+    collection: Partial<CollectionResource>;
+    replace_files: {[key: string]: string};
+}
+
 export const emptyCollectionPdh = "d41d8cd98f00b204e9800998ecf8427e+0";
 export const SOURCE_DESTINATION_EQUAL_ERROR_MESSAGE = "Source and destination cannot be the same";
 
@@ -78,7 +83,7 @@ export class CollectionService extends TrashableResourceService<CollectionResour
     }
 
     private replaceFiles(data: CollectionPartialUpdateOrCreate, fileMap: {}, showErrors?: boolean) {
-        const payload = {
+        const payload: ReplaceFilesPayload = {
             collection: {
                 preserve_version: true,
                 ...CommonService.mapKeys(snakeCase)(data),
@@ -89,14 +94,14 @@ export class CollectionService extends TrashableResourceService<CollectionResour
         };
         if (data.uuid) {
             return CommonService.defaultResponse(
-                this.serverApi.put<CollectionResource>(`/${this.resourceType}/${data.uuid}`, payload),
+                this.serverApi.put<ReplaceFilesPayload, AxiosResponse<CollectionResource>>(`/${this.resourceType}/${data.uuid}`, payload),
                 this.actions,
                 true, // mapKeys
                 showErrors
             );
         } else {
             return CommonService.defaultResponse(
-                this.serverApi.post<CollectionResource>(`/${this.resourceType}`, payload),
+                this.serverApi.post<ReplaceFilesPayload, AxiosResponse<CollectionResource>>(`/${this.resourceType}`, payload),
                 this.actions,
                 true, // mapKeys
                 showErrors
index 48cd931127d995b094f441250bf8baf14c525ad9..12938e82d6f351cf12d6fd0dd6b70e49c24d7c6c 100644 (file)
@@ -44,7 +44,8 @@ export function setAuthorizationHeader(services: ServiceRepository, token: strin
 }
 
 export function removeAuthorizationHeader(services: ServiceRepository) {
-    delete services.apiClient.defaults.headers.common;
+    services.apiClient.defaults.headers.common = {};
+
     services.keepWebdavClient.setAuthorization(undefined);
     services.apiWebdavClient.setAuthorization(undefined);
 }
index 6c5902653bb3cdf21b1416430dfcdd4af6881ad3..7d71078c31d1b69af4e3e636f52e9193acd5d15e 100644 (file)
@@ -13,6 +13,7 @@ import { DispatchProp } from 'react-redux';
 import { saveApiToken } from 'store/auth/auth-action';
 import { navigateToRootProject } from 'store/navigation/navigation-action';
 import { replace } from 'react-router-redux';
+import { PasswordLoginResponse } from 'views/login-panel/login-panel';
 
 type CssRules = 'root' | 'loginBtn' | 'card' | 'wrapper' | 'progress';
 
@@ -46,7 +47,7 @@ const styles: StyleRulesCallback<CssRules> = theme => ({
 });
 
 type LoginFormProps = DispatchProp<any> & WithStyles<CssRules> & {
-    handleSubmit: (username: string, password: string) => AxiosPromise;
+    handleSubmit: (username: string, password: string) => AxiosPromise<PasswordLoginResponse>;
     loginLabel?: string,
 };
 
index f834b3b6dfcaf2346890fd9d38da848a20f60ad4..452a66672a327bd9fd67db950efb295136d96efe 100644 (file)
@@ -10,7 +10,7 @@ import { login, authActions } from 'store/auth/auth-action';
 import { ArvadosTheme } from 'common/custom-theme';
 import { RootState } from 'store/store';
 import { LoginForm } from 'views-components/login-form/login-form';
-import Axios from 'axios';
+import Axios, { AxiosResponse } from 'axios';
 import { Config } from 'common/config';
 import { sanitizeHTML } from 'common/html-sanitize';
 
@@ -51,11 +51,17 @@ const styles: StyleRulesCallback<CssRules> = (theme: ArvadosTheme) => ({
     }
 });
 
+export type PasswordLoginResponse = {
+    uuid?: string;
+    api_token?: string;
+    message?: string;
+};
+
 const doPasswordLogin = (url: string) => (username: string, password: string) => {
     const formData: string[] = [];
     formData.push('username='+encodeURIComponent(username));
     formData.push('password='+encodeURIComponent(password));
-    return Axios.post(`${url}/arvados/v1/users/authenticate`, formData.join('&'), {
+    return Axios.post<string, AxiosResponse<PasswordLoginResponse>>(`${url}/arvados/v1/users/authenticate`, formData.join('&'), {
         headers: {
             'Content-Type': 'application/x-www-form-urlencoded'
         },
index 5851b145d4337611b8e0802df5435dffad467336..9fce7e83d4bd516cb5f9e1247fd320a7b0b09404 100644 (file)
@@ -123,7 +123,7 @@ const styles: StyleRulesCallback<CssRules> = (theme: ArvadosTheme) => ({
         color: theme.customs.colors.greyD,
         fontSize: "1.875rem",
     },
-    // Applies to table tab's content
+    // Applies to table tab and collection table content
     tableWrapper: {
         height: "auto",
         maxHeight: `calc(100% - ${theme.spacing.unit * 6}px)`,
@@ -131,7 +131,8 @@ const styles: StyleRulesCallback<CssRules> = (theme: ArvadosTheme) => ({
         // Use flexbox to keep scrolling at the virtual list level
         display: "flex",
         flexDirection: "column",
-        alignItems: "start", // Prevents scroll bars at different levels in json tab
+        alignItems: "stretch", // Stretches output collection to full width
+
     },
 
     // Param table virtual list styles
@@ -486,9 +487,9 @@ export const ProcessIOCard = withStyles(styles)(
                                             {hasOutputCollecton && <Tab label="Collection" />}
                                             {isRawLoaded && <Tab label="JSON" />}
                                         </Tabs>
-                                        <div className={classes.tableWrapper}>
-                                            {subProcTabState === 0 && hasInputMounts && <ProcessInputMounts mounts={mounts || []} />}
-                                            {subProcTabState === 0 && hasOutputCollecton && (
+                                        {subProcTabState === 0 && hasInputMounts && <ProcessInputMounts mounts={mounts || []} />}
+                                        {subProcTabState === 0 && hasOutputCollecton && (
+                                            <div className={classes.tableWrapper}>
                                                 <>
                                                     {outputUuid && (
                                                         <Typography className={classes.collectionLink}>
@@ -508,13 +509,13 @@ export const ProcessIOCard = withStyles(styles)(
                                                         currentItemUuid={outputUuid}
                                                     />
                                                 </>
-                                            )}
-                                            {isRawLoaded && (subProcTabState === 1 || (!hasInputMounts && !hasOutputCollecton)) && (
-                                                <div className={classes.jsonWrapper}>
-                                                    <ProcessIORaw data={raw} />
-                                                </div>
-                                            )}
-                                        </div>
+                                            </div>
+                                        )}
+                                        {isRawLoaded && (subProcTabState === 1 || (!hasInputMounts && !hasOutputCollecton)) && (
+                                            <div className={classes.jsonWrapper}>
+                                                <ProcessIORaw data={raw} />
+                                            </div>
+                                        )}
                                     </>
                                 ) : (
                                     <Grid
index 21fcc817c53887db5fffaede272a2fa1a85ce70e..de1da9a8583022f71920588771cfd0160a64ce07 100644 (file)
@@ -4162,7 +4162,7 @@ __metadata:
     "@types/shell-escape": ^0.2.0
     "@types/sinon": 7.5
     "@types/uuid": 3.4.4
-    axios: ^0.21.1
+    axios: ^0.28.1
     axios-mock-adapter: 1.17.0
     bootstrap: ^5.3.2
     caniuse-lite: 1.0.30001606
@@ -4439,12 +4439,14 @@ __metadata:
   languageName: node
   linkType: hard
 
-"axios@npm:^0.21.1":
-  version: 0.21.4
-  resolution: "axios@npm:0.21.4"
+"axios@npm:^0.28.1":
+  version: 0.28.1
+  resolution: "axios@npm:0.28.1"
   dependencies:
-    follow-redirects: ^1.14.0
-  checksum: 44245f24ac971e7458f3120c92f9d66d1fc695e8b97019139de5b0cc65d9b8104647db01e5f46917728edfc0cfd88eb30fc4c55e6053eef4ace76768ce95ff3c
+    follow-redirects: ^1.15.0
+    form-data: ^4.0.0
+    proxy-from-env: ^1.1.0
+  checksum: 5115a38d79064d07437c5a28f15841e3607634040e3120ec06a2c4367a7d07cf213b16496eab53b6f58ebc5fb377a440ba9ed4782529b14449a1e285734bfb54
   languageName: node
   linkType: hard
 
@@ -5851,7 +5853,7 @@ __metadata:
   languageName: node
   linkType: hard
 
-"combined-stream@npm:^1.0.6, combined-stream@npm:~1.0.6":
+"combined-stream@npm:^1.0.6, combined-stream@npm:^1.0.8, combined-stream@npm:~1.0.6":
   version: 1.0.8
   resolution: "combined-stream@npm:1.0.8"
   dependencies:
@@ -8712,7 +8714,7 @@ __metadata:
   languageName: node
   linkType: hard
 
-"follow-redirects@npm:^1.0.0, follow-redirects@npm:^1.14.0":
+"follow-redirects@npm:^1.0.0, follow-redirects@npm:^1.15.0":
   version: 1.15.6
   resolution: "follow-redirects@npm:1.15.6"
   peerDependenciesMeta:
@@ -8777,6 +8779,17 @@ __metadata:
   languageName: node
   linkType: hard
 
+"form-data@npm:^4.0.0":
+  version: 4.0.0
+  resolution: "form-data@npm:4.0.0"
+  dependencies:
+    asynckit: ^0.4.0
+    combined-stream: ^1.0.8
+    mime-types: ^2.1.12
+  checksum: 01135bf8675f9d5c61ff18e2e2932f719ca4de964e3be90ef4c36aacfc7b9cb2fceb5eca0b7e0190e3383fe51c5b37f4cb80b62ca06a99aaabfcfd6ac7c9328c
+  languageName: node
+  linkType: hard
+
 "form-data@npm:~2.3.2":
   version: 2.3.3
   resolution: "form-data@npm:2.3.3"
@@ -15335,6 +15348,13 @@ __metadata:
   languageName: node
   linkType: hard
 
+"proxy-from-env@npm:^1.1.0":
+  version: 1.1.0
+  resolution: "proxy-from-env@npm:1.1.0"
+  checksum: ed7fcc2ba0a33404958e34d95d18638249a68c430e30fcb6c478497d72739ba64ce9810a24f53a7d921d0c065e5b78e3822759800698167256b04659366ca4d4
+  languageName: node
+  linkType: hard
+
 "prr@npm:~1.0.1":
   version: 1.0.1
   resolution: "prr@npm:1.0.1"
@@ -18361,8 +18381,8 @@ __metadata:
   linkType: hard
 
 "tar@npm:^6.0.2, tar@npm:^6.1.11, tar@npm:^6.1.2":
-  version: 6.2.0
-  resolution: "tar@npm:6.2.0"
+  version: 6.2.1
+  resolution: "tar@npm:6.2.1"
   dependencies:
     chownr: ^2.0.0
     fs-minipass: ^2.0.0
@@ -18370,7 +18390,7 @@ __metadata:
     minizlib: ^2.1.1
     mkdirp: ^1.0.3
     yallist: ^4.0.0
-  checksum: db4d9fe74a2082c3a5016630092c54c8375ff3b280186938cfd104f2e089c4fd9bad58688ef6be9cf186a889671bf355c7cda38f09bbf60604b281715ca57f5c
+  checksum: f1322768c9741a25356c11373bce918483f40fa9a25c69c59410c8a1247632487edef5fe76c5f12ac51a6356d2f1829e96d2bc34098668a2fc34d76050ac2b6c
   languageName: node
   linkType: hard
 
index 370c3f3a3a2794b4889adc545db556a56958d3e6..c19febdc0136a3f2cda3c2c36820caf70f51ee76 100644 (file)
@@ -15,8 +15,8 @@ wait_for_apt_locks() {
   done
 }
 
-# $DIST should not have a dot if there is one in /etc/os-release (e.g. 18.04)
-DIST=$(. /etc/os-release; echo $ID$VERSION_ID | tr -d '.')
+. /etc/os-release
+DISTRO_ID="$ID"
 
 # Run apt-get update
 $SUDO DEBIAN_FRONTEND=noninteractive apt-get --yes update
@@ -36,9 +36,6 @@ if [[ ! -d /var/lib/cloud/scripts/per-boot ]]; then
   mkdir -p /var/lib/cloud/scripts/per-boot
 fi
 
-TMP_LSB=`/usr/bin/lsb_release -c -s`
-LSB_RELEASE_CODENAME=${TMP_LSB//[$'\t\r\n ']}
-
 SET_RESOLVER=
 if [ -n "$RESOLVER" ]; then
   SET_RESOLVER="--dns ${RESOLVER}"
@@ -46,7 +43,7 @@ fi
 
 # Add the arvados apt repository
 echo "# apt.arvados.org" |$SUDO tee --append /etc/apt/sources.list.d/apt.arvados.org.list
-echo "deb http://apt.arvados.org/$LSB_RELEASE_CODENAME $LSB_RELEASE_CODENAME${REPOSUFFIX} main" |$SUDO tee --append /etc/apt/sources.list.d/apt.arvados.org.list
+echo "deb http://apt.arvados.org/$VERSION_CODENAME $VERSION_CODENAME${REPOSUFFIX} main" |$SUDO tee --append /etc/apt/sources.list.d/apt.arvados.org.list
 
 # Add the arvados signing key
 cat /tmp/1078ECD7.asc | $SUDO apt-key add -
@@ -75,32 +72,12 @@ wait_for_apt_locks && $SUDO DEBIAN_FRONTEND=noninteractive apt-get -qq --yes ins
   python3-arvados-fuse \
   arvados-docker-cleaner
 
-# We want Docker 20.10 or later so that we support glibc 2.33 and up in the container, cf.
-# https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=1005906
-dockerversion=5:20.10.13~3-0
-if [[ "$DIST" =~ ^debian ]]; then
-  family="debian"
-  if [ "$DIST" == "debian11" ]; then
-    distro="bullseye"
-  elif [ "$DIST" == "debian12" ]; then
-    distro="bookworm"
-  fi
-elif [[ "$DIST" =~ ^ubuntu ]]; then
-  family="ubuntu"
-  if [ "$DIST" == "ubuntu2004" ]; then
-    distro="focal"
-  elif [ "$DIST" == "ubuntu2204" ]; then
-    distro="jammy"
-  fi
-else
-  echo "Unsupported distribution $DIST"
-  exit 1
-fi
-curl -fsSL https://download.docker.com/linux/$family/gpg | $SUDO gpg --dearmor -o /usr/share/keyrings/docker-archive-keyring.gpg
-echo deb [arch=amd64 signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/$family/ $distro stable | \
+DOCKER_URL="https://download.docker.com/linux/$DISTRO_ID"
+curl -fsSL "$DOCKER_URL/gpg" | $SUDO gpg --dearmor -o /usr/share/keyrings/docker-archive-keyring.gpg
+echo "deb [arch=amd64 signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] $DOCKER_URL/ $VERSION_CODENAME stable" | \
     $SUDO tee /etc/apt/sources.list.d/docker.list
 $SUDO apt-get update
-$SUDO apt-get -yq --no-install-recommends install docker-ce=${dockerversion}~${family}-${distro}
+$SUDO apt-get -yq --no-install-recommends install docker-ce
 
 # Set a higher ulimit and the resolver (if set) for docker
 $SUDO sed "s/ExecStart=\(.*\)/ExecStart=\1 --default-ulimit nofile=10000:10000 ${SET_RESOLVER}/g" \
@@ -173,7 +150,7 @@ $SUDO chown root:root /etc/cloud/cloud.cfg.d/07_compute_arvados_dispatch_cloud.c
 
 if [ "$NVIDIA_GPU_SUPPORT" == "1" ]; then
   # We need a kernel and matching headers
-  if [[ "$DIST" =~ ^debian ]]; then
+  if [[ "$DISTRO_ID" == debian ]]; then
     $SUDO apt-get -y install linux-image-cloud-amd64 linux-headers-cloud-amd64
   elif [ "$CLOUD" == "azure" ]; then
     $SUDO apt-get -y install linux-image-azure linux-headers-azure
@@ -182,10 +159,11 @@ if [ "$NVIDIA_GPU_SUPPORT" == "1" ]; then
   fi
 
   # Install CUDA
-  $SUDO apt-key adv --fetch-keys https://developer.download.nvidia.com/compute/cuda/repos/$DIST/x86_64/7fa2af80.pub
-  $SUDO apt-key adv --fetch-keys https://developer.download.nvidia.com/compute/cuda/repos/$DIST/x86_64/3bf863cc.pub
+  NVIDIA_URL="https://developer.download.nvidia.com/compute/cuda/repos/$(echo "$DISTRO_ID$VERSION_ID" | tr -d .)/x86_64"
+  $SUDO apt-key adv --fetch-keys "$NVIDIA_URL/7fa2af80.pub"
+  $SUDO apt-key adv --fetch-keys "$NVIDIA_URL/3bf863cc.pub"
   $SUDO apt-get -y install software-properties-common
-  $SUDO add-apt-repository "deb https://developer.download.nvidia.com/compute/cuda/repos/$DIST/x86_64/ /"
+  $SUDO add-apt-repository "deb $NVIDIA_URL/ /"
   $SUDO add-apt-repository contrib
   $SUDO apt-get update
   $SUDO apt-get -y install cuda
@@ -193,7 +171,7 @@ if [ "$NVIDIA_GPU_SUPPORT" == "1" ]; then
   # Install libnvidia-container, the tooling for Docker/Singularity
   curl -s -L https://nvidia.github.io/libnvidia-container/gpgkey | \
     $SUDO apt-key add -
-  if [ "$DIST" == "debian11" ]; then
+  if [[ "$VERSION_CODENAME" == bullseye ]]; then
     # As of 2021-12-16 libnvidia-container and friends are only available for
     # Debian 10, not yet Debian 11. Install experimental rc1 package as per this
     # workaround:
@@ -202,9 +180,7 @@ if [ "$NVIDIA_GPU_SUPPORT" == "1" ]; then
       $SUDO tee /etc/apt/sources.list.d/libnvidia-container.list
     $SUDO sed -i -e '/experimental/ s/^#//g' /etc/apt/sources.list.d/libnvidia-container.list
   else
-    # here, $DIST should have a dot if there is one in /etc/os-release (e.g. 18.04)...
-    DIST=$(. /etc/os-release; echo $ID$VERSION_ID)
-    curl -s -L https://nvidia.github.io/libnvidia-container/$DIST/libnvidia-container.list | \
+    curl -s -L "https://nvidia.github.io/libnvidia-container/$DISTRO_ID$VERSION_ID/libnvidia-container.list" | \
       $SUDO tee /etc/apt/sources.list.d/libnvidia-container.list
   fi
 
diff --git a/tools/salt-install/config_examples/multi_host/aws/pillars/logrotate.sls b/tools/salt-install/config_examples/multi_host/aws/pillars/logrotate.sls
new file mode 100644 (file)
index 0000000..8c455e9
--- /dev/null
@@ -0,0 +1,14 @@
+---
+# Copyright (C) The Arvados Authors. All rights reserved.
+#
+# SPDX-License-Identifier: AGPL-3.0
+
+# The logrotate formula checks that an associated service is running.
+# The default it checks is cron.
+# All the distributions Arvados supports (Debian 11+/Ubuntu 20.04+)
+# have switched to a systemd timer, so check that instead.
+# Refer to logrotate-formula's documentation for details
+# https://github.com/salt-formulas/salt-formula-logrotate/blob/master/README.rst
+
+logrotate:
+  service: logrotate.timer
index bfe0386e9316fe848bccf5e775d452c1462e653c..d27552f6fbecada020d4d9aac8d093d567b32b59 100644 (file)
@@ -25,4 +25,5 @@ nginx:
             - access_log: /var/log/nginx/api.__DOMAIN__-upstream.access.log combined
             - error_log: /var/log/nginx/api.__DOMAIN__-upstream.error.log
             - passenger_enabled: 'on'
+            - passenger_preload_bundler: 'on'
             - client_max_body_size: 128m
diff --git a/tools/salt-install/config_examples/single_host/multiple_hostnames/pillars/logrotate.sls b/tools/salt-install/config_examples/single_host/multiple_hostnames/pillars/logrotate.sls
new file mode 100644 (file)
index 0000000..8c455e9
--- /dev/null
@@ -0,0 +1,14 @@
+---
+# Copyright (C) The Arvados Authors. All rights reserved.
+#
+# SPDX-License-Identifier: AGPL-3.0
+
+# The logrotate formula checks that an associated service is running.
+# The default it checks is cron.
+# All the distributions Arvados supports (Debian 11+/Ubuntu 20.04+)
+# have switched to a systemd timer, so check that instead.
+# Refer to logrotate-formula's documentation for details
+# https://github.com/salt-formulas/salt-formula-logrotate/blob/master/README.rst
+
+logrotate:
+  service: logrotate.timer
index 54087f6d6d0fe43ae9c1a12e71ac2604935a2635..b567af90d7fbff5bf6be71d2c8a6e9c4b84fb95c 100644 (file)
@@ -31,4 +31,5 @@ nginx:
             - access_log: /var/log/nginx/api.__CLUSTER__.__DOMAIN__-upstream.access.log combined
             - error_log: /var/log/nginx/api.__CLUSTER__.__DOMAIN__-upstream.error.log
             - passenger_enabled: 'on'
+            - passenger_preload_bundler: 'on'
             - client_max_body_size: 128m
diff --git a/tools/salt-install/config_examples/single_host/single_hostname/pillars/logrotate.sls b/tools/salt-install/config_examples/single_host/single_hostname/pillars/logrotate.sls
new file mode 100644 (file)
index 0000000..8c455e9
--- /dev/null
@@ -0,0 +1,14 @@
+---
+# Copyright (C) The Arvados Authors. All rights reserved.
+#
+# SPDX-License-Identifier: AGPL-3.0
+
+# The logrotate formula checks that an associated service is running.
+# The default it checks is cron.
+# All the distributions Arvados supports (Debian 11+/Ubuntu 20.04+)
+# have switched to a systemd timer, so check that instead.
+# Refer to logrotate-formula's documentation for details
+# https://github.com/salt-formulas/salt-formula-logrotate/blob/master/README.rst
+
+logrotate:
+  service: logrotate.timer
index 04195ae5b9b23e25f21ad1703b66c4a2116cfb21..3bf7bf54abbbff0e04c4a64849136f17bcca9de8 100644 (file)
@@ -31,4 +31,5 @@ nginx:
             - access_log: /var/log/nginx/api.__CLUSTER__.__DOMAIN__-upstream.access.log combined
             - error_log: /var/log/nginx/api.__CLUSTER__.__DOMAIN__-upstream.error.log
             - passenger_enabled: 'on'
+            - passenger_preload_bundler: 'on'
             - client_max_body_size: 128m
index 59fb43e57af40d70736dc27822c304bdce76f1c6..5d5d0af6684c272d296f2143045f22d196337880 100644 (file)
@@ -78,6 +78,7 @@ nginx:
             - root: /var/www/arvados-workbench/current/public
             - index:  index.html index.htm
             - passenger_enabled: 'on'
+            - passenger_preload_bundler: 'on'
             # yamllint disable-line rule:line-length
             - access_log: /var/log/nginx/workbench.__CLUSTER__.__DOMAIN__-upstream.access.log combined
             - error_log: /var/log/nginx/workbench.__CLUSTER__.__DOMAIN__-upstream.error.log
index 8dd07020c349942ff9f3936e8462e8a7b5b44026..c78f65e9ca4c4426a7154a4774ae38d5bb53d04a 100755 (executable)
@@ -284,17 +284,15 @@ VERSION="latest"
 
 # We pin the salt version to avoid potential incompatibilities when a new
 # stable version is released.
-SALT_VERSION="3004"
+SALT_VERSION="3006"
 
 # Other formula versions we depend on
-#POSTGRES_TAG="v0.44.0"
-#POSTGRES_URL="https://github.com/saltstack-formulas/postgres-formula.git"
-POSTGRES_TAG="0.45.0-bugfix327"
-POSTGRES_URL="https://github.com/arvados/postgres-formula.git"
+POSTGRES_TAG="7529300c287b1c288af0f494ca668c2217bd1c5d"
+POSTGRES_URL="https://github.com/saltstack-formulas/postgres-formula.git"
 NGINX_TAG="v2.8.1"
 DOCKER_TAG="v2.4.2"
-LOCALE_TAG="v0.3.4"
-LETSENCRYPT_TAG="v2.1.0"
+LOCALE_TAG="v0.3.5"
+LETSENCRYPT_TAG="v3.2.0"
 LOGROTATE_TAG="v0.14.0"
 PROMETHEUS_TAG="v5.6.5"
 GRAFANA_TAG="v3.1.3"
@@ -362,23 +360,24 @@ fi
 if [ "${DUMP_CONFIG}" = "yes" ]; then
   echo "The provision installer will just dump a config under ${DUMP_SALT_CONFIG_DIR} and exit"
 else
-  # Install a few dependency packages
-  # First, let's figure out the OS we're working on
   OS_IDS="$(. /etc/os-release && echo "${ID:-} ${ID_LIKE:-}")"
   echo "Detected distro families: $OS_IDS"
 
+  # Several of our formulas use the cron module, which requires the crontab
+  # command. We install systemd-cron to ensure we have that.
+  # The rest of these packages are required by the rest of the script.
   for OS_ID in $OS_IDS; do
     case "$OS_ID" in
       rhel)
         echo "WARNING! Disabling SELinux, see https://dev.arvados.org/issues/18019"
         sed -i 's/SELINUX=enforcing/SELINUX=permissive/g' /etc/sysconfig/selinux
         setenforce permissive
-        yum install -y  curl git jq
+        yum install -y curl git jq systemd-cron
         break
         ;;
       debian)
         DEBIAN_FRONTEND=noninteractive apt -o DPkg::Lock::Timeout=120 update
-        DEBIAN_FRONTEND=noninteractive apt install -y curl git jq
+        DEBIAN_FRONTEND=noninteractive apt install -y curl git jq systemd-cron
         break
         ;;
     esac
@@ -388,7 +387,7 @@ else
     echo "Salt already installed"
   else
     curl -L https://bootstrap.saltstack.com -o /tmp/bootstrap_salt.sh
-    sh /tmp/bootstrap_salt.sh -XdfP -x python3 old-stable ${SALT_VERSION}
+    sh /tmp/bootstrap_salt.sh -XdfP -x python3 stable ${SALT_VERSION}
     /bin/systemctl stop salt-minion.service
     /bin/systemctl disable salt-minion.service
   fi
@@ -431,7 +430,7 @@ test -d nginx && ( cd nginx && git fetch ) \
 echo "...postgres"
 test -d postgres && ( cd postgres && git fetch ) \
   || git clone --quiet ${POSTGRES_URL} ${F_DIR}/postgres
-( cd postgres && git checkout --quiet tags/"${POSTGRES_TAG}" )
+( cd postgres && git checkout --quiet "${POSTGRES_TAG}" )
 
 echo "...prometheus"
 test -d prometheus && ( cd prometheus && git fetch ) \
@@ -620,6 +619,7 @@ if [ -z "${ROLES:-}" ]; then
   # Pillars
   echo "    - docker" >> ${PILLARS_TOP}
   echo "    - nginx_api_configuration" >> ${PILLARS_TOP}
+  echo "    - logrotate" >> ${PILLARS_TOP}
   echo "    - logrotate_api" >> ${PILLARS_TOP}
   echo "    - nginx_controller_configuration" >> ${PILLARS_TOP}
   echo "    - nginx_keepproxy_configuration" >> ${PILLARS_TOP}
@@ -855,6 +855,7 @@ else
         grep -q "arvados.controller" ${STATES_TOP} || echo "    - arvados.controller" >> ${STATES_TOP}
 
         ### Pillars ###
+        grep -q "logrotate" ${PILLARS_TOP}                || echo "    - logrotate" >> ${PILLARS_TOP}
         grep -q "logrotate_api" ${PILLARS_TOP}            || echo "    - logrotate_api" >> ${PILLARS_TOP}
         grep -q "aws_credentials" ${PILLARS_TOP}          || echo "    - aws_credentials" >> ${PILLARS_TOP}
         grep -q "postgresql" ${PILLARS_TOP}               || echo "    - postgresql" >> ${PILLARS_TOP}