From d76aa6ba31074f3d41cef48628b235aab4906ce5 Mon Sep 17 00:00:00 2001 From: Stephen Smith Date: Fri, 28 Jul 2023 11:18:20 -0400 Subject: [PATCH] 20219: Replace API & websocket log loading with webdav polling * Add apiWebdavClient to log service for making webdav requests to the API and differentiate from keepWebdavClient * Add listLogFiles and getLogFileContents methods to log service, contents can be fetched with range request * Add loadContainerLogFileContents to load logs using webdav range requests * Files larger than the chunk limit (128k) are loaded as beginning and end in 2 64k fragments, snipline is appended to the last line of the first fragment * The chunks are all requested in parallel * Add sorting of timestamped log types in combined log views (Main/All) * Lines without timestamps are merged with previous lines with timestamps * If no preceding lines of a sortable type exist, the leading lines are removed from combined log views - they can still be seen in the single log type filter * Non-timestamped log types (node-info/contaner) are pushed to the bottom with original ordering in All view * Merging & sorting is not applied to single log type view * Remove snipline LogEventType as all sniplines now belong to specific log event types * Update logs panel store and reducer to remember the last log byte requested * Add polling useAsyncInterval utility function to poll log file sizes * Polling awaits for callback to finish to prevent race conditions * Update process log code snippet style to remove gap between array of loglines * Remove websocket handling of log events * Replaced cypress log manipulation commands with webdav log helpers * Add tests for polling, line sorting, correctly sized/positioned chunks, and snipline Arvados-DCO-1.1-Signed-off-by: Stephen Smith --- cypress/integration/process.spec.js | 2353 +++++++++-------- cypress/support/commands.js | 125 +- src/common/use-async-interval.ts | 35 + src/models/log.ts | 1 - .../collection-service.test.ts | 18 +- .../collection-service/collection-service.ts | 14 +- src/services/log-service/log-service.ts | 43 +- src/services/services.ts | 19 +- .../process-logs-panel-actions.ts | 316 ++- .../process-logs-panel-reducer.ts | 29 +- .../process-logs-panel/process-logs-panel.ts | 6 +- src/views/process-panel/process-log-card.tsx | 9 +- .../process-log-code-snippet.tsx | 4 +- .../process-panel/process-panel-root.tsx | 2 + src/views/process-panel/process-panel.tsx | 3 +- src/websocket/websocket.ts | 3 - 16 files changed, 1692 insertions(+), 1288 deletions(-) create mode 100644 src/common/use-async-interval.ts diff --git a/cypress/integration/process.spec.js b/cypress/integration/process.spec.js index bdb4fae6..2be9c5c3 100644 --- a/cypress/integration/process.spec.js +++ b/cypress/integration/process.spec.js @@ -84,882 +84,1332 @@ describe('Process tests', function() { }); } - it('shows process logs', function() { - const crName = 'test_container_request'; - createContainerRequest( - activeUser, - crName, - 'arvados/jobs', - ['echo', 'hello world'], - false, 'Committed') - .then(function(containerRequest) { - cy.loginAs(activeUser); - cy.goToPath(`/processes/${containerRequest.uuid}`); - cy.get('[data-cy=process-details]').should('contain', crName); - cy.get('[data-cy=process-logs]') - .should('contain', 'No logs yet') - .and('not.contain', 'hello world'); - cy.createLog(activeUser.token, { - object_uuid: containerRequest.container_uuid, - properties: { - text: 'hello world' - }, - event_type: 'stdout' - }).then(function(log) { - cy.get('[data-cy=process-logs]', {timeout: 7000}) - .should('not.contain', 'No logs yet') - .and('contain', 'hello world'); - }) + describe('Details panel', function() { + it('shows process details', function() { + createContainerRequest( + activeUser, + `test_container_request ${Math.floor(Math.random() * 999999)}`, + 'arvados/jobs', + ['echo', 'hello world'], + false, 'Committed') + .then(function(containerRequest) { + cy.loginAs(activeUser); + cy.goToPath(`/processes/${containerRequest.uuid}`); + cy.get('[data-cy=process-details]').should('contain', containerRequest.name); + cy.get('[data-cy=process-details-attributes-modifiedby-user]').contains(`Active User (${activeUser.user.uuid})`); + cy.get('[data-cy=process-details-attributes-runtime-user]').should('not.exist'); + }); + + // Fake submitted by another user + cy.intercept({method: 'GET', url: '**/arvados/v1/container_requests/*'}, (req) => { + req.reply((res) => { + res.body.modified_by_user_uuid = 'zzzzz-tpzed-000000000000000'; + }); + }); + + createContainerRequest( + activeUser, + `test_container_request ${Math.floor(Math.random() * 999999)}`, + 'arvados/jobs', + ['echo', 'hello world'], + false, 'Committed') + .then(function(containerRequest) { + cy.loginAs(activeUser); + cy.goToPath(`/processes/${containerRequest.uuid}`); + cy.get('[data-cy=process-details]').should('contain', containerRequest.name); + cy.get('[data-cy=process-details-attributes-modifiedby-user]').contains(`zzzzz-tpzed-000000000000000`); + cy.get('[data-cy=process-details-attributes-runtime-user]').contains(`Active User (${activeUser.user.uuid})`); + }); }); - }); - it('shows process details', function() { - createContainerRequest( - activeUser, - `test_container_request ${Math.floor(Math.random() * 999999)}`, - 'arvados/jobs', - ['echo', 'hello world'], - false, 'Committed') - .then(function(containerRequest) { - cy.loginAs(activeUser); - cy.goToPath(`/processes/${containerRequest.uuid}`); - cy.get('[data-cy=process-details]').should('contain', containerRequest.name); - cy.get('[data-cy=process-details-attributes-modifiedby-user]').contains(`Active User (${activeUser.user.uuid})`); - cy.get('[data-cy=process-details-attributes-runtime-user]').should('not.exist'); + it('should show runtime status indicators', function() { + // Setup running container with runtime_status error & warning messages + createContainerRequest( + activeUser, + 'test_container_request', + 'arvados/jobs', + ['echo', 'hello world'], + false, 'Committed') + .as('containerRequest') + .then(function(containerRequest) { + expect(containerRequest.state).to.equal('Committed'); + expect(containerRequest.container_uuid).not.to.be.equal(''); + + cy.getContainer(activeUser.token, containerRequest.container_uuid) + .then(function(queuedContainer) { + expect(queuedContainer.state).to.be.equal('Queued'); + }); + cy.updateContainer(adminUser.token, containerRequest.container_uuid, { + state: 'Locked' + }).then(function(lockedContainer) { + expect(lockedContainer.state).to.be.equal('Locked'); + + cy.updateContainer(adminUser.token, lockedContainer.uuid, { + state: 'Running', + runtime_status: { + error: 'Something went wrong', + errorDetail: 'Process exited with status 1', + warning: 'Free disk space is low', + } + }) + .as('runningContainer') + .then(function(runningContainer) { + expect(runningContainer.state).to.be.equal('Running'); + expect(runningContainer.runtime_status).to.be.deep.equal({ + 'error': 'Something went wrong', + 'errorDetail': 'Process exited with status 1', + 'warning': 'Free disk space is low', + }); + }); + }) + }); + // Test that the UI shows the error and warning messages + cy.getAll('@containerRequest', '@runningContainer').then(function([containerRequest]) { + cy.loginAs(activeUser); + cy.goToPath(`/processes/${containerRequest.uuid}`); + cy.get('[data-cy=process-runtime-status-error]') + .should('contain', 'Something went wrong') + .and('contain', 'Process exited with status 1'); + cy.get('[data-cy=process-runtime-status-warning]') + .should('contain', 'Free disk space is low') + .and('contain', 'No additional warning details available'); + }); + + + // Force container_count for testing + let containerCount = 2; + cy.intercept({method: 'GET', url: '**/arvados/v1/container_requests/*'}, (req) => { + req.reply((res) => { + res.body.container_count = containerCount; + }); + }); + + cy.getAll('@containerRequest').then(function([containerRequest]) { + cy.goToPath(`/processes/${containerRequest.uuid}`); + cy.get('[data-cy=process-runtime-status-retry-warning]', {timeout: 7000}) + .should('contain', 'Process retried 1 time'); + }); + + cy.getAll('@containerRequest').then(function([containerRequest]) { + containerCount = 3; + cy.goToPath(`/processes/${containerRequest.uuid}`); + cy.get('[data-cy=process-runtime-status-retry-warning]', {timeout: 7000}) + .should('contain', 'Process retried 2 times'); + }); }); - // Fake submitted by another user - cy.intercept({method: 'GET', url: '**/arvados/v1/container_requests/*'}, (req) => { - req.reply((res) => { - res.body.modified_by_user_uuid = 'zzzzz-tpzed-000000000000000'; + it('allows copying processes', function() { + const crName = 'first_container_request'; + const copiedCrName = 'copied_container_request'; + createContainerRequest( + activeUser, + crName, + 'arvados/jobs', + ['echo', 'hello world'], + false, 'Committed') + .then(function(containerRequest) { + cy.loginAs(activeUser); + cy.goToPath(`/processes/${containerRequest.uuid}`); + cy.get('[data-cy=process-details]').should('contain', crName); + + cy.get('[data-cy=process-details]').find('button[title="More options"]').click(); + cy.get('ul[data-cy=context-menu]').contains("Copy and re-run process").click(); + }); + + cy.get('[data-cy=form-dialog]').within(() => { + cy.get('input[name=name]').clear().type(copiedCrName); + cy.get('[data-cy=projects-tree-home-tree-picker]').click(); + cy.get('[data-cy=form-submit-btn]').click(); }); + + cy.get('[data-cy=process-details]').should('contain', copiedCrName); + cy.get('[data-cy=process-details]').find('button').contains('Run'); }); - createContainerRequest( - activeUser, - `test_container_request ${Math.floor(Math.random() * 999999)}`, - 'arvados/jobs', - ['echo', 'hello world'], - false, 'Committed') - .then(function(containerRequest) { - cy.loginAs(activeUser); - cy.goToPath(`/processes/${containerRequest.uuid}`); - cy.get('[data-cy=process-details]').should('contain', containerRequest.name); - cy.get('[data-cy=process-details-attributes-modifiedby-user]').contains(`zzzzz-tpzed-000000000000000`); - cy.get('[data-cy=process-details-attributes-runtime-user]').contains(`Active User (${activeUser.user.uuid})`); + const getFakeContainer = (fakeContainerUuid) => ({ + href: `/containers/${fakeContainerUuid}`, + kind: "arvados#container", + etag: "ecfosljpnxfari9a8m7e4yv06", + uuid: fakeContainerUuid, + owner_uuid: "zzzzz-tpzed-000000000000000", + created_at: "2023-02-13T15:55:47.308915000Z", + modified_by_client_uuid: "zzzzz-ozdt8-q6dzdi1lcc03155", + modified_by_user_uuid: "zzzzz-tpzed-000000000000000", + modified_at: "2023-02-15T19:12:45.987086000Z", + command: [ + "arvados-cwl-runner", + "--api=containers", + "--local", + "--project-uuid=zzzzz-j7d0g-yr18k784zplfeza", + "/var/lib/cwl/workflow.json#main", + "/var/lib/cwl/cwl.input.json", + ], + container_image: "4ad7d11381df349e464694762db14e04+303", + cwd: "/var/spool/cwl", + environment: {}, + exit_code: null, + finished_at: null, + locked_by_uuid: null, + log: null, + output: null, + output_path: "/var/spool/cwl", + progress: null, + runtime_constraints: { + API: true, + cuda: { + device_count: 0, + driver_version: "", + hardware_capability: "", + }, + keep_cache_disk: 2147483648, + keep_cache_ram: 0, + ram: 1342177280, + vcpus: 1, + }, + runtime_status: {}, + started_at: null, + auth_uuid: null, + scheduling_parameters: { + max_run_time: 0, + partitions: [], + preemptible: false, + }, + runtime_user_uuid: "zzzzz-tpzed-vllbpebicy84rd5", + runtime_auth_scopes: ["all"], + lock_count: 2, + gateway_address: null, + interactive_session_started: false, + output_storage_classes: ["default"], + output_properties: {}, + cost: 0.0, + subrequests_cost: 0.0, }); + + it('shows cancel button when appropriate', function() { + // Ignore collection requests + cy.intercept({method: 'GET', url: `**/arvados/v1/collections/*`}, { + statusCode: 200, + body: {} + }); + + // Uncommitted container + const crUncommitted = `Test process ${Math.floor(Math.random() * 999999)}`; + createContainerRequest( + activeUser, + crUncommitted, + 'arvados/jobs', + ['echo', 'hello world'], + false, 'Uncommitted') + .then(function(containerRequest) { + // Navigate to process and verify run / cancel button + cy.goToPath(`/processes/${containerRequest.uuid}`); + cy.waitForDom(); + cy.get('[data-cy=process-details]').should('contain', crUncommitted); + cy.get('[data-cy=process-run-button]').should('exist'); + cy.get('[data-cy=process-cancel-button]').should('not.exist'); + }); + + // Queued container + const crQueued = `Test process ${Math.floor(Math.random() * 999999)}`; + const fakeCrUuid = 'zzzzz-dz642-000000000000001'; + createContainerRequest( + activeUser, + crQueued, + 'arvados/jobs', + ['echo', 'hello world'], + false, 'Committed') + .then(function(containerRequest) { + // Fake container uuid + cy.intercept({method: 'GET', url: `**/arvados/v1/container_requests/${containerRequest.uuid}`}, (req) => { + req.reply((res) => { + res.body.output_uuid = fakeCrUuid; + res.body.priority = 500; + res.body.state = "Committed"; + }); + }); + + // Fake container + const container = getFakeContainer(fakeCrUuid); + cy.intercept({method: 'GET', url: `**/arvados/v1/container/${fakeCrUuid}`}, { + statusCode: 200, + body: {...container, state: "Queued", priority: 500} + }); + + // Navigate to process and verify cancel button + cy.goToPath(`/processes/${containerRequest.uuid}`); + cy.waitForDom(); + cy.get('[data-cy=process-details]').should('contain', crQueued); + cy.get('[data-cy=process-cancel-button]').contains('Cancel'); + }); + + // Locked container + const crLocked = `Test process ${Math.floor(Math.random() * 999999)}`; + const fakeCrLockedUuid = 'zzzzz-dz642-000000000000002'; + createContainerRequest( + activeUser, + crLocked, + 'arvados/jobs', + ['echo', 'hello world'], + false, 'Committed') + .then(function(containerRequest) { + // Fake container uuid + cy.intercept({method: 'GET', url: `**/arvados/v1/container_requests/${containerRequest.uuid}`}, (req) => { + req.reply((res) => { + res.body.output_uuid = fakeCrLockedUuid; + res.body.priority = 500; + res.body.state = "Committed"; + }); + }); + + // Fake container + const container = getFakeContainer(fakeCrLockedUuid); + cy.intercept({method: 'GET', url: `**/arvados/v1/container/${fakeCrLockedUuid}`}, { + statusCode: 200, + body: {...container, state: "Locked", priority: 500} + }); + + // Navigate to process and verify cancel button + cy.goToPath(`/processes/${containerRequest.uuid}`); + cy.waitForDom(); + cy.get('[data-cy=process-details]').should('contain', crLocked); + cy.get('[data-cy=process-cancel-button]').contains('Cancel'); + }); + + // On Hold container + const crOnHold = `Test process ${Math.floor(Math.random() * 999999)}`; + const fakeCrOnHoldUuid = 'zzzzz-dz642-000000000000003'; + createContainerRequest( + activeUser, + crOnHold, + 'arvados/jobs', + ['echo', 'hello world'], + false, 'Committed') + .then(function(containerRequest) { + // Fake container uuid + cy.intercept({method: 'GET', url: `**/arvados/v1/container_requests/${containerRequest.uuid}`}, (req) => { + req.reply((res) => { + res.body.output_uuid = fakeCrOnHoldUuid; + res.body.priority = 0; + res.body.state = "Committed"; + }); + }); + + // Fake container + const container = getFakeContainer(fakeCrOnHoldUuid); + cy.intercept({method: 'GET', url: `**/arvados/v1/container/${fakeCrOnHoldUuid}`}, { + statusCode: 200, + body: {...container, state: "Queued", priority: 0} + }); + + // Navigate to process and verify cancel button + cy.goToPath(`/processes/${containerRequest.uuid}`); + cy.waitForDom(); + cy.get('[data-cy=process-details]').should('contain', crOnHold); + cy.get('[data-cy=process-run-button]').should('exist'); + cy.get('[data-cy=process-cancel-button]').should('not.exist'); + }); + }); + }); - it('filters process logs by event type', function() { - const nodeInfoLogs = [ - 'Host Information', - 'Linux compute-99cb150b26149780de44b929577e1aed-19rgca8vobuvc4p 5.4.0-1059-azure #62~18.04.1-Ubuntu SMP Tue Sep 14 17:53:18 UTC 2021 x86_64 x86_64 x86_64 GNU/Linux', - 'CPU Information', - 'processor : 0', - 'vendor_id : GenuineIntel', - 'cpu family : 6', - 'model : 79', - 'model name : Intel(R) Xeon(R) CPU E5-2673 v4 @ 2.30GHz' - ]; - const crunchRunLogs = [ - '2022-03-22T13:56:22.542417997Z using local keepstore process (pid 3733) at http://localhost:46837, writing logs to keepstore.txt in log collection', - '2022-03-22T13:56:26.237571754Z crunch-run 2.4.0~dev20220321141729 (go1.17.1) started', - '2022-03-22T13:56:26.244704134Z crunch-run process has uid=0(root) gid=0(root) groups=0(root)', - '2022-03-22T13:56:26.244862836Z Executing container \'zzzzz-dz642-1wokwvcct9s9du3\' using docker runtime', - '2022-03-22T13:56:26.245037738Z Executing on host \'compute-99cb150b26149780de44b929577e1aed-19rgca8vobuvc4p\'', - ]; - const stdoutLogs = [ - 'Lorem ipsum dolor sit amet, consectetur adipiscing elit. Donec dui nisi, hendrerit porta sapien a, pretium dignissim purus.', - 'Integer viverra, mauris finibus aliquet ultricies, dui mauris cursus justo, ut venenatis nibh ex eget neque.', - 'In hac habitasse platea dictumst.', - 'Fusce fringilla turpis id accumsan faucibus. Donec congue congue ex non posuere. In semper mi quis tristique rhoncus.', - 'Interdum et malesuada fames ac ante ipsum primis in faucibus.', - 'Quisque fermentum tortor ex, ut suscipit velit feugiat faucibus.', - 'Donec vitae porta risus, at luctus nulla. Mauris gravida iaculis ipsum, id sagittis tortor egestas ac.', - 'Maecenas condimentum volutpat nulla. Integer lacinia maximus risus eu posuere.', - 'Donec vitae leo id augue gravida bibendum.', - 'Nam libero libero, pretium ac faucibus elementum, mattis nec ex.', - 'Nullam id laoreet nibh. Vivamus tellus metus, pretium quis justo ut, bibendum varius metus. Pellentesque vitae accumsan lorem, quis tincidunt augue.', - 'Aliquam viverra nisi nulla, et efficitur dolor mattis in.', - 'Sed at enim sit amet nulla tincidunt mattis. Aenean eget aliquet ex, non ultrices ex. Nulla ex tortor, vestibulum aliquam tempor ac, aliquam vel est.', - 'Fusce auctor faucibus libero id venenatis. Etiam sodales, odio eu cursus efficitur, quam sem blandit ex, quis porttitor enim dui quis lectus. In id tincidunt felis.', - 'Phasellus non ex quis arcu tempus faucibus molestie in sapien.', - 'Duis tristique semper dolor, vitae pulvinar risus.', - 'Aliquam tortor elit, luctus nec tortor eget, porta tristique nulla.', - 'Nulla eget mollis ipsum.', - ]; - createContainerRequest( - activeUser, - 'test_container_request', - 'arvados/jobs', - ['echo', 'hello world'], - false, 'Committed') - .then(function(containerRequest) { - cy.logsForContainer(activeUser.token, containerRequest.container_uuid, - 'node-info', nodeInfoLogs).as('nodeInfoLogs'); - cy.logsForContainer(activeUser.token, containerRequest.container_uuid, - 'crunch-run', crunchRunLogs).as('crunchRunLogs'); - cy.logsForContainer(activeUser.token, containerRequest.container_uuid, - 'stdout', stdoutLogs).as('stdoutLogs'); - cy.getAll('@stdoutLogs', '@nodeInfoLogs', '@crunchRunLogs').then(function() { + describe('Logs panel', function() { + it('shows live process logs', function() { + const crName = 'test_container_request'; + createContainerRequest( + activeUser, + crName, + 'arvados/jobs', + ['echo', 'hello world'], + false, 'Committed') + .then(function(containerRequest) { cy.loginAs(activeUser); cy.goToPath(`/processes/${containerRequest.uuid}`); - // Should show main logs by default - cy.get('[data-cy=process-logs-filter]', {timeout: 7000}).should('contain', 'Main logs'); + cy.get('[data-cy=process-details]').should('contain', crName); cy.get('[data-cy=process-logs]') - .should('contain', stdoutLogs[Math.floor(Math.random() * stdoutLogs.length)]) - .and('not.contain', nodeInfoLogs[Math.floor(Math.random() * nodeInfoLogs.length)]) - .and('contain', crunchRunLogs[Math.floor(Math.random() * crunchRunLogs.length)]); - // Select 'All logs' - cy.get('[data-cy=process-logs-filter]').click(); - cy.get('body').contains('li', 'All logs').click(); - cy.get('[data-cy=process-logs]') - .should('contain', stdoutLogs[Math.floor(Math.random() * stdoutLogs.length)]) - .and('contain', nodeInfoLogs[Math.floor(Math.random() * nodeInfoLogs.length)]) - .and('contain', crunchRunLogs[Math.floor(Math.random() * crunchRunLogs.length)]); - // Select 'node-info' logs - cy.get('[data-cy=process-logs-filter]').click(); - cy.get('body').contains('li', 'node-info').click(); - cy.get('[data-cy=process-logs]') - .should('not.contain', stdoutLogs[Math.floor(Math.random() * stdoutLogs.length)]) - .and('contain', nodeInfoLogs[Math.floor(Math.random() * nodeInfoLogs.length)]) - .and('not.contain', crunchRunLogs[Math.floor(Math.random() * crunchRunLogs.length)]); - // Select 'stdout' logs - cy.get('[data-cy=process-logs-filter]').click(); - cy.get('body').contains('li', 'stdout').click(); - cy.get('[data-cy=process-logs]') - .should('contain', stdoutLogs[Math.floor(Math.random() * stdoutLogs.length)]) - .and('not.contain', nodeInfoLogs[Math.floor(Math.random() * nodeInfoLogs.length)]) - .and('not.contain', crunchRunLogs[Math.floor(Math.random() * crunchRunLogs.length)]); + .should('contain', 'No logs yet') + .and('not.contain', 'hello world'); + + cy.appendLog(adminUser.token, containerRequest.uuid, "stdout.txt", [ + "2023-07-18T20:14:48.128642814Z hello world" + ]).then(() => { + cy.get('[data-cy=process-logs]', {timeout: 7000}) + .should('not.contain', 'No logs yet') + .and('contain', 'hello world'); + }); + + cy.appendLog(adminUser.token, containerRequest.uuid, "stderr.txt", [ + "2023-07-18T20:14:49.128642814Z hello new line" + ]).then(() => { + cy.get('[data-cy=process-logs]', {timeout: 7000}) + .should('not.contain', 'No logs yet') + .and('contain', 'hello new line'); + }); }); }); - }); - it('should show runtime status indicators', function() { - // Setup running container with runtime_status error & warning messages - createContainerRequest( - activeUser, - 'test_container_request', - 'arvados/jobs', - ['echo', 'hello world'], - false, 'Committed') - .as('containerRequest') - .then(function(containerRequest) { - expect(containerRequest.state).to.equal('Committed'); - expect(containerRequest.container_uuid).not.to.be.equal(''); - - cy.getContainer(activeUser.token, containerRequest.container_uuid) - .then(function(queuedContainer) { - expect(queuedContainer.state).to.be.equal('Queued'); - }); - cy.updateContainer(adminUser.token, containerRequest.container_uuid, { - state: 'Locked' - }).then(function(lockedContainer) { - expect(lockedContainer.state).to.be.equal('Locked'); - - cy.updateContainer(adminUser.token, lockedContainer.uuid, { - state: 'Running', - runtime_status: { - error: 'Something went wrong', - errorDetail: 'Process exited with status 1', - warning: 'Free disk space is low', - } - }) - .as('runningContainer') - .then(function(runningContainer) { - expect(runningContainer.state).to.be.equal('Running'); - expect(runningContainer.runtime_status).to.be.deep.equal({ - 'error': 'Something went wrong', - 'errorDetail': 'Process exited with status 1', - 'warning': 'Free disk space is low', - }); + it('filters process logs by event type', function() { + const nodeInfoLogs = [ + 'Host Information', + 'Linux compute-99cb150b26149780de44b929577e1aed-19rgca8vobuvc4p 5.4.0-1059-azure #62~18.04.1-Ubuntu SMP Tue Sep 14 17:53:18 UTC 2021 x86_64 x86_64 x86_64 GNU/Linux', + 'CPU Information', + 'processor : 0', + 'vendor_id : GenuineIntel', + 'cpu family : 6', + 'model : 79', + 'model name : Intel(R) Xeon(R) CPU E5-2673 v4 @ 2.30GHz' + ]; + const crunchRunLogs = [ + '2022-03-22T13:56:22.542417997Z using local keepstore process (pid 3733) at http://localhost:46837, writing logs to keepstore.txt in log collection', + '2022-03-22T13:56:26.237571754Z crunch-run 2.4.0~dev20220321141729 (go1.17.1) started', + '2022-03-22T13:56:26.244704134Z crunch-run process has uid=0(root) gid=0(root) groups=0(root)', + '2022-03-22T13:56:26.244862836Z Executing container \'zzzzz-dz642-1wokwvcct9s9du3\' using docker runtime', + '2022-03-22T13:56:26.245037738Z Executing on host \'compute-99cb150b26149780de44b929577e1aed-19rgca8vobuvc4p\'', + ]; + const stdoutLogs = [ + '2022-03-22T13:56:22.542417987Z Lorem ipsum dolor sit amet, consectetur adipiscing elit. Donec dui nisi, hendrerit porta sapien a, pretium dignissim purus.', + '2022-03-22T13:56:22.542417997Z Integer viverra, mauris finibus aliquet ultricies, dui mauris cursus justo, ut venenatis nibh ex eget neque.', + '2022-03-22T13:56:22.542418007Z In hac habitasse platea dictumst.', + '2022-03-22T13:56:22.542418027Z Fusce fringilla turpis id accumsan faucibus. Donec congue congue ex non posuere. In semper mi quis tristique rhoncus.', + '2022-03-22T13:56:22.542418037Z Interdum et malesuada fames ac ante ipsum primis in faucibus.', + '2022-03-22T13:56:22.542418047Z Quisque fermentum tortor ex, ut suscipit velit feugiat faucibus.', + '2022-03-22T13:56:22.542418057Z Donec vitae porta risus, at luctus nulla. Mauris gravida iaculis ipsum, id sagittis tortor egestas ac.', + '2022-03-22T13:56:22.542418067Z Maecenas condimentum volutpat nulla. Integer lacinia maximus risus eu posuere.', + '2022-03-22T13:56:22.542418077Z Donec vitae leo id augue gravida bibendum.', + '2022-03-22T13:56:22.542418087Z Nam libero libero, pretium ac faucibus elementum, mattis nec ex.', + '2022-03-22T13:56:22.542418097Z Nullam id laoreet nibh. Vivamus tellus metus, pretium quis justo ut, bibendum varius metus. Pellentesque vitae accumsan lorem, quis tincidunt augue.', + '2022-03-22T13:56:22.542418107Z Aliquam viverra nisi nulla, et efficitur dolor mattis in.', + '2022-03-22T13:56:22.542418117Z Sed at enim sit amet nulla tincidunt mattis. Aenean eget aliquet ex, non ultrices ex. Nulla ex tortor, vestibulum aliquam tempor ac, aliquam vel est.', + '2022-03-22T13:56:22.542418127Z Fusce auctor faucibus libero id venenatis. Etiam sodales, odio eu cursus efficitur, quam sem blandit ex, quis porttitor enim dui quis lectus. In id tincidunt felis.', + '2022-03-22T13:56:22.542418137Z Phasellus non ex quis arcu tempus faucibus molestie in sapien.', + '2022-03-22T13:56:22.542418147Z Duis tristique semper dolor, vitae pulvinar risus.', + '2022-03-22T13:56:22.542418157Z Aliquam tortor elit, luctus nec tortor eget, porta tristique nulla.', + '2022-03-22T13:56:22.542418167Z Nulla eget mollis ipsum.', + ]; + + createContainerRequest( + activeUser, + 'test_container_request', + 'arvados/jobs', + ['echo', 'hello world'], + false, 'Committed') + .then(function(containerRequest) { + cy.appendLog(adminUser.token, containerRequest.uuid, "node-info.txt", nodeInfoLogs).as('nodeInfoLogs'); + cy.appendLog(adminUser.token, containerRequest.uuid, "crunch-run.txt", crunchRunLogs).as('crunchRunLogs'); + cy.appendLog(adminUser.token, containerRequest.uuid, "stdout.txt", stdoutLogs).as('stdoutLogs'); + + cy.getAll('@stdoutLogs', '@nodeInfoLogs', '@crunchRunLogs').then(function() { + cy.loginAs(activeUser); + cy.goToPath(`/processes/${containerRequest.uuid}`); + // Should show main logs by default + cy.get('[data-cy=process-logs-filter]', {timeout: 7000}).should('contain', 'Main logs'); + cy.get('[data-cy=process-logs]') + .should('contain', stdoutLogs[Math.floor(Math.random() * stdoutLogs.length)]) + .and('not.contain', nodeInfoLogs[Math.floor(Math.random() * nodeInfoLogs.length)]) + .and('contain', crunchRunLogs[Math.floor(Math.random() * crunchRunLogs.length)]); + // Select 'All logs' + cy.get('[data-cy=process-logs-filter]').click(); + cy.get('body').contains('li', 'All logs').click(); + cy.get('[data-cy=process-logs]') + .should('contain', stdoutLogs[Math.floor(Math.random() * stdoutLogs.length)]) + .and('contain', nodeInfoLogs[Math.floor(Math.random() * nodeInfoLogs.length)]) + .and('contain', crunchRunLogs[Math.floor(Math.random() * crunchRunLogs.length)]); + // Select 'node-info' logs + cy.get('[data-cy=process-logs-filter]').click(); + cy.get('body').contains('li', 'node-info').click(); + cy.get('[data-cy=process-logs]') + .should('not.contain', stdoutLogs[Math.floor(Math.random() * stdoutLogs.length)]) + .and('contain', nodeInfoLogs[Math.floor(Math.random() * nodeInfoLogs.length)]) + .and('not.contain', crunchRunLogs[Math.floor(Math.random() * crunchRunLogs.length)]); + // Select 'stdout' logs + cy.get('[data-cy=process-logs-filter]').click(); + cy.get('body').contains('li', 'stdout').click(); + cy.get('[data-cy=process-logs]') + .should('contain', stdoutLogs[Math.floor(Math.random() * stdoutLogs.length)]) + .and('not.contain', nodeInfoLogs[Math.floor(Math.random() * nodeInfoLogs.length)]) + .and('not.contain', crunchRunLogs[Math.floor(Math.random() * crunchRunLogs.length)]); }); - }) - }); - // Test that the UI shows the error and warning messages - cy.getAll('@containerRequest', '@runningContainer').then(function([containerRequest]) { - cy.loginAs(activeUser); - cy.goToPath(`/processes/${containerRequest.uuid}`); - cy.get('[data-cy=process-runtime-status-error]') - .should('contain', 'Something went wrong') - .and('contain', 'Process exited with status 1'); - cy.get('[data-cy=process-runtime-status-warning]') - .should('contain', 'Free disk space is low') - .and('contain', 'No additional warning details available'); + }); }); + it('sorts combined logs', function() { + const crName = 'test_container_request'; + createContainerRequest( + activeUser, + crName, + 'arvados/jobs', + ['echo', 'hello world'], + false, 'Committed') + .then(function(containerRequest) { + cy.appendLog(adminUser.token, containerRequest.uuid, "node-info.txt", [ + "3: nodeinfo 1", + "2: nodeinfo 2", + "1: nodeinfo 3", + "2: nodeinfo 4", + "3: nodeinfo 5", + ]).as('node-info'); + + cy.appendLog(adminUser.token, containerRequest.uuid, "stdout.txt", [ + "2023-07-18T20:14:48.128642814Z first", + "2023-07-18T20:14:49.128642814Z third" + ]).as('stdout'); + + cy.appendLog(adminUser.token, containerRequest.uuid, "stderr.txt", [ + "2023-07-18T20:14:48.528642814Z second" + ]).as('stderr'); - // Force container_count for testing - let containerCount = 2; - cy.intercept({method: 'GET', url: '**/arvados/v1/container_requests/*'}, (req) => { - req.reply((res) => { - res.body.container_count = containerCount; + cy.loginAs(activeUser); + cy.goToPath(`/processes/${containerRequest.uuid}`); + cy.get('[data-cy=process-details]').should('contain', crName); + cy.get('[data-cy=process-logs]') + .should('contain', 'No logs yet'); + + cy.getAll('@node-info', '@stdout', '@stderr').then(() => { + // Verify sorted main logs + cy.get('[data-cy=process-logs] pre', {timeout: 7000}) + .eq(0).should('contain', '2023-07-18T20:14:48.128642814Z first'); + cy.get('[data-cy=process-logs] pre') + .eq(1).should('contain', '2023-07-18T20:14:48.528642814Z second'); + cy.get('[data-cy=process-logs] pre') + .eq(2).should('contain', '2023-07-18T20:14:49.128642814Z third'); + + // Switch to All logs + cy.get('[data-cy=process-logs-filter]').click(); + cy.get('body').contains('li', 'All logs').click(); + // Verify sorted logs + cy.get('[data-cy=process-logs] pre') + .eq(0).should('contain', '2023-07-18T20:14:48.128642814Z first'); + cy.get('[data-cy=process-logs] pre') + .eq(1).should('contain', '2023-07-18T20:14:48.528642814Z second'); + cy.get('[data-cy=process-logs] pre') + .eq(2).should('contain', '2023-07-18T20:14:49.128642814Z third'); + // Verify non-sorted lines were preserved + cy.get('[data-cy=process-logs] pre') + .eq(3).should('contain', '3: nodeinfo 1'); + cy.get('[data-cy=process-logs] pre') + .eq(4).should('contain', '2: nodeinfo 2'); + cy.get('[data-cy=process-logs] pre') + .eq(5).should('contain', '1: nodeinfo 3'); + cy.get('[data-cy=process-logs] pre') + .eq(6).should('contain', '2: nodeinfo 4'); + cy.get('[data-cy=process-logs] pre') + .eq(7).should('contain', '3: nodeinfo 5'); + }); }); }); - cy.getAll('@containerRequest').then(function([containerRequest]) { - cy.goToPath(`/processes/${containerRequest.uuid}`); - cy.get('[data-cy=process-runtime-status-retry-warning]', {timeout: 7000}) - .should('contain', 'Process retried 1 time'); - }); + it('correctly generates sniplines', function() { + const SNIPLINE = `================ ✀ ================ ✀ ========= Some log(s) were skipped ========= ✀ ================ ✀ ================`; + const crName = 'test_container_request'; + createContainerRequest( + activeUser, + crName, + 'arvados/jobs', + ['echo', 'hello world'], + false, 'Committed') + .then(function(containerRequest) { + + cy.appendLog(adminUser.token, containerRequest.uuid, "stdout.txt", [ + 'X'.repeat(63999) + '_' + + 'O'.repeat(100) + + '_' + 'X'.repeat(63999) + ]).as('stdout'); - cy.getAll('@containerRequest').then(function([containerRequest]) { - containerCount = 3; - cy.goToPath(`/processes/${containerRequest.uuid}`); - cy.get('[data-cy=process-runtime-status-retry-warning]', {timeout: 7000}) - .should('contain', 'Process retried 2 times'); + cy.loginAs(activeUser); + cy.goToPath(`/processes/${containerRequest.uuid}`); + cy.get('[data-cy=process-details]').should('contain', crName); + cy.get('[data-cy=process-logs]') + .should('contain', 'No logs yet'); + + // Switch to stdout since lines are unsortable (no timestamp) + cy.get('[data-cy=process-logs-filter]').click(); + cy.get('body').contains('li', 'stdout').click(); + + cy.getAll('@stdout').then(() => { + // Verify first 64KB and snipline + cy.get('[data-cy=process-logs] pre', {timeout: 7000}) + .eq(0).should('contain', 'X'.repeat(63999) + '_\n' + SNIPLINE); + // Verify last 64KB + cy.get('[data-cy=process-logs] pre') + .eq(1).should('contain', '_' + 'X'.repeat(63999)); + // Verify none of the Os got through + cy.get('[data-cy=process-logs] pre') + .should('not.contain', 'O'); + }); + }); }); - }); + }); - const testInputs = [ - { - definition: { - "id": "#main/input_file", - "label": "Label Description", - "type": "File" + describe('I/O panel', function() { + const testInputs = [ + { + definition: { + "id": "#main/input_file", + "label": "Label Description", + "type": "File" + }, + input: { + "input_file": { + "basename": "input1.tar", + "class": "File", + "location": "keep:00000000000000000000000000000000+01/input1.tar", + "secondaryFiles": [ + { + "basename": "input1-2.txt", + "class": "File", + "location": "keep:00000000000000000000000000000000+01/input1-2.txt" + }, + { + "basename": "input1-3.txt", + "class": "File", + "location": "keep:00000000000000000000000000000000+01/input1-3.txt" + }, + { + "basename": "input1-4.txt", + "class": "File", + "location": "keep:00000000000000000000000000000000+01/input1-4.txt" + } + ] + } + } + }, + { + definition: { + "id": "#main/input_dir", + "doc": "Doc Description", + "type": "Directory" + }, + input: { + "input_dir": { + "basename": "11111111111111111111111111111111+01", + "class": "Directory", + "location": "keep:11111111111111111111111111111111+01" + } + } + }, + { + definition: { + "id": "#main/input_bool", + "doc": ["Doc desc 1", "Doc desc 2"], + "type": "boolean" + }, + input: { + "input_bool": true, + } + }, + { + definition: { + "id": "#main/input_int", + "type": "int" + }, + input: { + "input_int": 1, + } + }, + { + definition: { + "id": "#main/input_long", + "type": "long" + }, + input: { + "input_long" : 1, + } + }, + { + definition: { + "id": "#main/input_float", + "type": "float" + }, + input: { + "input_float": 1.5, + } + }, + { + definition: { + "id": "#main/input_double", + "type": "double" + }, + input: { + "input_double": 1.3, + } + }, + { + definition: { + "id": "#main/input_string", + "type": "string" + }, + input: { + "input_string": "Hello World", + } }, - input: { - "input_file": { - "basename": "input1.tar", - "class": "File", - "location": "keep:00000000000000000000000000000000+01/input1.tar", - "secondaryFiles": [ + { + definition: { + "id": "#main/input_file_array", + "type": { + "items": "File", + "type": "array" + } + }, + input: { + "input_file_array": [ { - "basename": "input1-2.txt", + "basename": "input2.tar", "class": "File", - "location": "keep:00000000000000000000000000000000+01/input1-2.txt" + "location": "keep:00000000000000000000000000000000+02/input2.tar" }, { - "basename": "input1-3.txt", + "basename": "input3.tar", "class": "File", - "location": "keep:00000000000000000000000000000000+01/input1-3.txt" + "location": "keep:00000000000000000000000000000000+03/input3.tar", + "secondaryFiles": [ + { + "basename": "input3-2.txt", + "class": "File", + "location": "keep:00000000000000000000000000000000+03/input3-2.txt" + } + ] }, { - "basename": "input1-4.txt", - "class": "File", - "location": "keep:00000000000000000000000000000000+01/input1-4.txt" + "$import": "import_path" } ] } - } - }, - { - definition: { - "id": "#main/input_dir", - "doc": "Doc Description", - "type": "Directory" }, - input: { - "input_dir": { - "basename": "11111111111111111111111111111111+01", - "class": "Directory", - "location": "keep:11111111111111111111111111111111+01" + { + definition: { + "id": "#main/input_dir_array", + "type": { + "items": "Directory", + "type": "array" + } + }, + input: { + "input_dir_array": [ + { + "basename": "11111111111111111111111111111111+02", + "class": "Directory", + "location": "keep:11111111111111111111111111111111+02" + }, + { + "basename": "11111111111111111111111111111111+03", + "class": "Directory", + "location": "keep:11111111111111111111111111111111+03" + }, + { + "$import": "import_path" + } + ] } - } - }, - { - definition: { - "id": "#main/input_bool", - "doc": ["Doc desc 1", "Doc desc 2"], - "type": "boolean" - }, - input: { - "input_bool": true, - } - }, - { - definition: { - "id": "#main/input_int", - "type": "int" }, - input: { - "input_int": 1, - } - }, - { - definition: { - "id": "#main/input_long", - "type": "long" - }, - input: { - "input_long" : 1, - } - }, - { - definition: { - "id": "#main/input_float", - "type": "float" + { + definition: { + "id": "#main/input_int_array", + "type": { + "items": "int", + "type": "array" + } + }, + input: { + "input_int_array": [ + 1, + 3, + 5, + { + "$import": "import_path" + } + ] + } }, - input: { - "input_float": 1.5, - } - }, - { - definition: { - "id": "#main/input_double", - "type": "double" + { + definition: { + "id": "#main/input_long_array", + "type": { + "items": "long", + "type": "array" + } + }, + input: { + "input_long_array": [ + 10, + 20, + { + "$import": "import_path" + } + ] + } }, - input: { - "input_double": 1.3, - } - }, - { - definition: { - "id": "#main/input_string", - "type": "string" + { + definition: { + "id": "#main/input_float_array", + "type": { + "items": "float", + "type": "array" + } + }, + input: { + "input_float_array": [ + 10.2, + 10.4, + 10.6, + { + "$import": "import_path" + } + ] + } }, - input: { - "input_string": "Hello World", - } - }, - { - definition: { - "id": "#main/input_file_array", - "type": { - "items": "File", - "type": "array" + { + definition: { + "id": "#main/input_double_array", + "type": { + "items": "double", + "type": "array" + } + }, + input: { + "input_double_array": [ + 20.1, + 20.2, + 20.3, + { + "$import": "import_path" + } + ] } }, - input: { - "input_file_array": [ - { - "basename": "input2.tar", - "class": "File", - "location": "keep:00000000000000000000000000000000+02/input2.tar" - }, - { - "basename": "input3.tar", - "class": "File", - "location": "keep:00000000000000000000000000000000+03/input3.tar", - "secondaryFiles": [ - { - "basename": "input3-2.txt", - "class": "File", - "location": "keep:00000000000000000000000000000000+03/input3-2.txt" - } - ] - }, - { - "$import": "import_path" + { + definition: { + "id": "#main/input_string_array", + "type": { + "items": "string", + "type": "array" } - ] - } - }, - { - definition: { - "id": "#main/input_dir_array", - "type": { - "items": "Directory", - "type": "array" + }, + input: { + "input_string_array": [ + "Hello", + "World", + "!", + { + "$import": "import_path" + } + ] } }, - input: { - "input_dir_array": [ - { - "basename": "11111111111111111111111111111111+02", - "class": "Directory", - "location": "keep:11111111111111111111111111111111+02" - }, - { - "basename": "11111111111111111111111111111111+03", - "class": "Directory", - "location": "keep:11111111111111111111111111111111+03" - }, - { - "$import": "import_path" + { + definition: { + "id": "#main/input_bool_include", + "type": "boolean" + }, + input: { + "input_bool_include": { + "$include": "include_path" } - ] - } - }, - { - definition: { - "id": "#main/input_int_array", - "type": { - "items": "int", - "type": "array" } }, - input: { - "input_int_array": [ - 1, - 3, - 5, - { - "$import": "import_path" + { + definition: { + "id": "#main/input_int_include", + "type": "int" + }, + input: { + "input_int_include": { + "$include": "include_path" } - ] - } - }, - { - definition: { - "id": "#main/input_long_array", - "type": { - "items": "long", - "type": "array" } }, - input: { - "input_long_array": [ - 10, - 20, - { - "$import": "import_path" + { + definition: { + "id": "#main/input_float_include", + "type": "float" + }, + input: { + "input_float_include": { + "$include": "include_path" } - ] - } - }, - { - definition: { - "id": "#main/input_float_array", - "type": { - "items": "float", - "type": "array" } }, - input: { - "input_float_array": [ - 10.2, - 10.4, - 10.6, - { - "$import": "import_path" + { + definition: { + "id": "#main/input_string_include", + "type": "string" + }, + input: { + "input_string_include": { + "$include": "include_path" } - ] - } - }, - { - definition: { - "id": "#main/input_double_array", - "type": { - "items": "double", - "type": "array" } }, - input: { - "input_double_array": [ - 20.1, - 20.2, - 20.3, - { - "$import": "import_path" + { + definition: { + "id": "#main/input_file_include", + "type": "File" + }, + input: { + "input_file_include": { + "$include": "include_path" } - ] - } - }, - { - definition: { - "id": "#main/input_string_array", - "type": { - "items": "string", - "type": "array" } }, - input: { - "input_string_array": [ - "Hello", - "World", - "!", - { - "$import": "import_path" + { + definition: { + "id": "#main/input_directory_include", + "type": "Directory" + }, + input: { + "input_directory_include": { + "$include": "include_path" } - ] - } - }, - { - definition: { - "id": "#main/input_bool_include", - "type": "boolean" + } }, - input: { - "input_bool_include": { - "$include": "include_path" + { + definition: { + "id": "#main/input_file_url", + "type": "File" + }, + input: { + "input_file_url": { + "basename": "index.html", + "class": "File", + "location": "http://example.com/index.html" + } } } - }, - { - definition: { - "id": "#main/input_int_include", - "type": "int" + ]; + + const testOutputs = [ + { + definition: { + "id": "#main/output_file", + "label": "Label Description", + "type": "File" + }, + output: { + "output_file": { + "basename": "cat.png", + "class": "File", + "location": "cat.png" + } + } }, - input: { - "input_int_include": { - "$include": "include_path" + { + definition: { + "id": "#main/output_file_with_secondary", + "doc": "Doc Description", + "type": "File" + }, + output: { + "output_file_with_secondary": { + "basename": "main.dat", + "class": "File", + "location": "main.dat", + "secondaryFiles": [ + { + "basename": "secondary.dat", + "class": "File", + "location": "secondary.dat" + }, + { + "basename": "secondary2.dat", + "class": "File", + "location": "secondary2.dat" + } + ] + } } - } - }, - { - definition: { - "id": "#main/input_float_include", - "type": "float" }, - input: { - "input_float_include": { - "$include": "include_path" + { + definition: { + "id": "#main/output_dir", + "doc": ["Doc desc 1", "Doc desc 2"], + "type": "Directory" + }, + output: { + "output_dir": { + "basename": "outdir1", + "class": "Directory", + "location": "outdir1" + } } - } - }, - { - definition: { - "id": "#main/input_string_include", - "type": "string" }, - input: { - "input_string_include": { - "$include": "include_path" + { + definition: { + "id": "#main/output_bool", + "type": "boolean" + }, + output: { + "output_bool": true } - } - }, - { - definition: { - "id": "#main/input_file_include", - "type": "File" }, - input: { - "input_file_include": { - "$include": "include_path" + { + definition: { + "id": "#main/output_int", + "type": "int" + }, + output: { + "output_int": 1 } - } - }, - { - definition: { - "id": "#main/input_directory_include", - "type": "Directory" }, - input: { - "input_directory_include": { - "$include": "include_path" + { + definition: { + "id": "#main/output_long", + "type": "long" + }, + output: { + "output_long": 1 } - } - }, - { - definition: { - "id": "#main/input_file_url", - "type": "File" }, - input: { - "input_file_url": { - "basename": "index.html", - "class": "File", - "location": "http://example.com/index.html" - } - } - } - ]; - - const testOutputs = [ - { - definition: { - "id": "#main/output_file", - "label": "Label Description", - "type": "File" + { + definition: { + "id": "#main/output_float", + "type": "float" + }, + output: { + "output_float": 100.5 + } }, - output: { - "output_file": { - "basename": "cat.png", - "class": "File", - "location": "cat.png" + { + definition: { + "id": "#main/output_double", + "type": "double" + }, + output: { + "output_double": 100.3 + } + }, + { + definition: { + "id": "#main/output_string", + "type": "string" + }, + output: { + "output_string": "Hello output" } - } - }, - { - definition: { - "id": "#main/output_file_with_secondary", - "doc": "Doc Description", - "type": "File" }, - output: { - "output_file_with_secondary": { - "basename": "main.dat", - "class": "File", - "location": "main.dat", - "secondaryFiles": [ + { + definition: { + "id": "#main/output_file_array", + "type": { + "items": "File", + "type": "array" + } + }, + output: { + "output_file_array": [ { - "basename": "secondary.dat", + "basename": "output2.tar", "class": "File", - "location": "secondary.dat" + "location": "output2.tar" }, { - "basename": "secondary2.dat", + "basename": "output3.tar", "class": "File", - "location": "secondary2.dat" + "location": "output3.tar" } ] } - } - }, - { - definition: { - "id": "#main/output_dir", - "doc": ["Doc desc 1", "Doc desc 2"], - "type": "Directory" - }, - output: { - "output_dir": { - "basename": "outdir1", - "class": "Directory", - "location": "outdir1" - } - } - }, - { - definition: { - "id": "#main/output_bool", - "type": "boolean" - }, - output: { - "output_bool": true - } - }, - { - definition: { - "id": "#main/output_int", - "type": "int" - }, - output: { - "output_int": 1 - } - }, - { - definition: { - "id": "#main/output_long", - "type": "long" - }, - output: { - "output_long": 1 - } - }, - { - definition: { - "id": "#main/output_float", - "type": "float" - }, - output: { - "output_float": 100.5 - } - }, - { - definition: { - "id": "#main/output_double", - "type": "double" - }, - output: { - "output_double": 100.3 - } - }, - { - definition: { - "id": "#main/output_string", - "type": "string" - }, - output: { - "output_string": "Hello output" - } - }, - { - definition: { - "id": "#main/output_file_array", - "type": { - "items": "File", - "type": "array" - } }, - output: { - "output_file_array": [ - { - "basename": "output2.tar", - "class": "File", - "location": "output2.tar" - }, - { - "basename": "output3.tar", - "class": "File", - "location": "output3.tar" + { + definition: { + "id": "#main/output_dir_array", + "type": { + "items": "Directory", + "type": "array" } - ] - } - }, - { - definition: { - "id": "#main/output_dir_array", - "type": { - "items": "Directory", - "type": "array" + }, + output: { + "output_dir_array": [ + { + "basename": "outdir2", + "class": "Directory", + "location": "outdir2" + }, + { + "basename": "outdir3", + "class": "Directory", + "location": "outdir3" + } + ] } }, - output: { - "output_dir_array": [ - { - "basename": "outdir2", - "class": "Directory", - "location": "outdir2" - }, - { - "basename": "outdir3", - "class": "Directory", - "location": "outdir3" + { + definition: { + "id": "#main/output_int_array", + "type": { + "items": "int", + "type": "array" } - ] - } - }, - { - definition: { - "id": "#main/output_int_array", - "type": { - "items": "int", - "type": "array" + }, + output: { + "output_int_array": [ + 10, + 11, + 12 + ] } }, - output: { - "output_int_array": [ - 10, - 11, - 12 - ] - } - }, - { - definition: { - "id": "#main/output_long_array", - "type": { - "items": "long", - "type": "array" + { + definition: { + "id": "#main/output_long_array", + "type": { + "items": "long", + "type": "array" + } + }, + output: { + "output_long_array": [ + 51, + 52 + ] } }, - output: { - "output_long_array": [ - 51, - 52 - ] - } - }, - { - definition: { - "id": "#main/output_float_array", - "type": { - "items": "float", - "type": "array" + { + definition: { + "id": "#main/output_float_array", + "type": { + "items": "float", + "type": "array" + } + }, + output: { + "output_float_array": [ + 100.2, + 100.4, + 100.6 + ] } }, - output: { - "output_float_array": [ - 100.2, - 100.4, - 100.6 - ] - } - }, - { - definition: { - "id": "#main/output_double_array", - "type": { - "items": "double", - "type": "array" + { + definition: { + "id": "#main/output_double_array", + "type": { + "items": "double", + "type": "array" + } + }, + output: { + "output_double_array": [ + 100.1, + 100.2, + 100.3 + ] } }, - output: { - "output_double_array": [ - 100.1, - 100.2, - 100.3 - ] - } - }, - { - definition: { - "id": "#main/output_string_array", - "type": { - "items": "string", - "type": "array" + { + definition: { + "id": "#main/output_string_array", + "type": { + "items": "string", + "type": "array" + } + }, + output: { + "output_string_array": [ + "Hello", + "Output", + "!" + ] } - }, - output: { - "output_string_array": [ - "Hello", - "Output", - "!" - ] } - } - ]; - - const verifyIOParameter = (name, label, doc, val, collection, multipleRows) => { - cy.get('table tr').contains(name).parents('tr').within(($mainRow) => { - label && cy.contains(label); - - if (multipleRows) { - cy.get($mainRow).nextUntil('[data-cy="process-io-param"]').as('secondaryRows'); - if (val) { - if (Array.isArray(val)) { - val.forEach(v => cy.get('@secondaryRows').contains(v)); - } else { - cy.get('@secondaryRows').contains(val); + ]; + + const verifyIOParameter = (name, label, doc, val, collection, multipleRows) => { + cy.get('table tr').contains(name).parents('tr').within(($mainRow) => { + label && cy.contains(label); + + if (multipleRows) { + cy.get($mainRow).nextUntil('[data-cy="process-io-param"]').as('secondaryRows'); + if (val) { + if (Array.isArray(val)) { + val.forEach(v => cy.get('@secondaryRows').contains(v)); + } else { + cy.get('@secondaryRows').contains(val); + } } - } - if (collection) { - cy.get('@secondaryRows').contains(collection); - } - } else { - if (val) { - if (Array.isArray(val)) { - val.forEach(v => cy.contains(v)); - } else { - cy.contains(val); + if (collection) { + cy.get('@secondaryRows').contains(collection); + } + } else { + if (val) { + if (Array.isArray(val)) { + val.forEach(v => cy.contains(v)); + } else { + cy.contains(val); + } + } + if (collection) { + cy.contains(collection); } } - if (collection) { - cy.contains(collection); - } - } - }); - }; - - const verifyIOParameterImage = (name, url) => { - cy.get('table tr').contains(name).parents('tr').within(() => { - cy.get('[alt="Inline Preview"]') - .should('be.visible') - .and(($img) => { - expect($img[0].naturalWidth).to.be.greaterThan(0); - expect($img[0].src).contains(url); - }) - }); - }; + }); + }; + + const verifyIOParameterImage = (name, url) => { + cy.get('table tr').contains(name).parents('tr').within(() => { + cy.get('[alt="Inline Preview"]') + .should('be.visible') + .and(($img) => { + expect($img[0].naturalWidth).to.be.greaterThan(0); + expect($img[0].src).contains(url); + }) + }); + }; + + it('displays IO parameters with keep links and previews', function() { + // Create output collection for real files + cy.createCollection(adminUser.token, { + name: `Test collection ${Math.floor(Math.random() * 999999)}`, + owner_uuid: activeUser.user.uuid, + }).then((testOutputCollection) => { + cy.loginAs(activeUser); + + cy.goToPath(`/collections/${testOutputCollection.uuid}`); + + cy.get('[data-cy=upload-button]').click(); + + cy.fixture('files/cat.png', 'base64').then(content => { + cy.get('[data-cy=drag-and-drop]').upload(content, 'cat.png'); + cy.get('[data-cy=form-submit-btn]').click(); + cy.waitForDom().get('[data-cy=form-submit-btn]').should('not.exist'); + // Confirm final collection state. + cy.get('[data-cy=collection-files-panel]') + .contains('cat.png').should('exist'); + }); + + cy.getCollection(activeUser.token, testOutputCollection.uuid).as('testOutputCollection'); + }); - it('displays IO parameters with keep links and previews', function() { - // Create output collection for real files - cy.createCollection(adminUser.token, { - name: `Test collection ${Math.floor(Math.random() * 999999)}`, - owner_uuid: activeUser.user.uuid, - }).then((testOutputCollection) => { - cy.loginAs(activeUser); + // Get updated collection pdh + cy.getAll('@testOutputCollection').then(([testOutputCollection]) => { + // Add output uuid and inputs to container request + cy.intercept({method: 'GET', url: '**/arvados/v1/container_requests/*'}, (req) => { + req.reply((res) => { + res.body.output_uuid = testOutputCollection.uuid; + res.body.mounts["/var/lib/cwl/cwl.input.json"] = { + content: testInputs.map((param) => (param.input)).reduce((acc, val) => (Object.assign(acc, val)), {}) + }; + res.body.mounts["/var/lib/cwl/workflow.json"] = { + content: { + $graph: [{ + id: "#main", + inputs: testInputs.map((input) => (input.definition)), + outputs: testOutputs.map((output) => (output.definition)) + }] + } + }; + }); + }); - cy.goToPath(`/collections/${testOutputCollection.uuid}`); + // Stub fake output collection + cy.intercept({method: 'GET', url: `**/arvados/v1/collections/${testOutputCollection.uuid}*`}, { + statusCode: 200, + body: { + uuid: testOutputCollection.uuid, + portable_data_hash: testOutputCollection.portable_data_hash, + } + }); - cy.get('[data-cy=upload-button]').click(); + // Stub fake output json + cy.intercept({method: 'GET', url: '**/c%3Dzzzzz-4zz18-zzzzzzzzzzzzzzz/cwl.output.json'}, { + statusCode: 200, + body: testOutputs.map((param) => (param.output)).reduce((acc, val) => (Object.assign(acc, val)), {}) + }); + + // Stub webdav response, points to output json + cy.intercept({method: 'PROPFIND', url: '*'}, { + fixture: 'webdav-propfind-outputs.xml', + }); + }); - cy.fixture('files/cat.png', 'base64').then(content => { - cy.get('[data-cy=drag-and-drop]').upload(content, 'cat.png'); - cy.get('[data-cy=form-submit-btn]').click(); - cy.waitForDom().get('[data-cy=form-submit-btn]').should('not.exist'); - // Confirm final collection state. - cy.get('[data-cy=collection-files-panel]') - .contains('cat.png').should('exist'); + createContainerRequest( + activeUser, + 'test_container_request', + 'arvados/jobs', + ['echo', 'hello world'], + false, 'Committed') + .as('containerRequest'); + + cy.getAll('@containerRequest', '@testOutputCollection').then(function([containerRequest, testOutputCollection]) { + cy.goToPath(`/processes/${containerRequest.uuid}`); + cy.get('[data-cy=process-io-card] h6').contains('Inputs') + .parents('[data-cy=process-io-card]').within(() => { + verifyIOParameter('input_file', null, "Label Description", 'input1.tar', '00000000000000000000000000000000+01'); + verifyIOParameter('input_file', null, "Label Description", 'input1-2.txt', undefined, true); + verifyIOParameter('input_file', null, "Label Description", 'input1-3.txt', undefined, true); + verifyIOParameter('input_file', null, "Label Description", 'input1-4.txt', undefined, true); + verifyIOParameter('input_dir', null, "Doc Description", '/', '11111111111111111111111111111111+01'); + verifyIOParameter('input_bool', null, "Doc desc 1, Doc desc 2", 'true'); + verifyIOParameter('input_int', null, null, '1'); + verifyIOParameter('input_long', null, null, '1'); + verifyIOParameter('input_float', null, null, '1.5'); + verifyIOParameter('input_double', null, null, '1.3'); + verifyIOParameter('input_string', null, null, 'Hello World'); + verifyIOParameter('input_file_array', null, null, 'input2.tar', '00000000000000000000000000000000+02'); + verifyIOParameter('input_file_array', null, null, 'input3.tar', undefined, true); + verifyIOParameter('input_file_array', null, null, 'input3-2.txt', undefined, true); + verifyIOParameter('input_file_array', null, null, 'Cannot display value', undefined, true); + verifyIOParameter('input_dir_array', null, null, '/', '11111111111111111111111111111111+02'); + verifyIOParameter('input_dir_array', null, null, '/', '11111111111111111111111111111111+03', true); + verifyIOParameter('input_dir_array', null, null, 'Cannot display value', undefined, true); + verifyIOParameter('input_int_array', null, null, ["1", "3", "5", "Cannot display value"]); + verifyIOParameter('input_long_array', null, null, ["10", "20", "Cannot display value"]); + verifyIOParameter('input_float_array', null, null, ["10.2", "10.4", "10.6", "Cannot display value"]); + verifyIOParameter('input_double_array', null, null, ["20.1", "20.2", "20.3", "Cannot display value"]); + verifyIOParameter('input_string_array', null, null, ["Hello", "World", "!", "Cannot display value"]); + verifyIOParameter('input_bool_include', null, null, "Cannot display value"); + verifyIOParameter('input_int_include', null, null, "Cannot display value"); + verifyIOParameter('input_float_include', null, null, "Cannot display value"); + verifyIOParameter('input_string_include', null, null, "Cannot display value"); + verifyIOParameter('input_file_include', null, null, "Cannot display value"); + verifyIOParameter('input_directory_include', null, null, "Cannot display value"); + verifyIOParameter('input_file_url', null, null, "http://example.com/index.html"); + }); + cy.get('[data-cy=process-io-card] h6').contains('Outputs') + .parents('[data-cy=process-io-card]').within((ctx) => { + cy.get(ctx).scrollIntoView(); + cy.get('[data-cy="io-preview-image-toggle"]').click({waitForAnimations: false}); + const outPdh = testOutputCollection.portable_data_hash; + + verifyIOParameter('output_file', null, "Label Description", 'cat.png', `${outPdh}`); + verifyIOParameterImage('output_file', `/c=${outPdh}/cat.png`); + verifyIOParameter('output_file_with_secondary', null, "Doc Description", 'main.dat', `${outPdh}`); + verifyIOParameter('output_file_with_secondary', null, "Doc Description", 'secondary.dat', undefined, true); + verifyIOParameter('output_file_with_secondary', null, "Doc Description", 'secondary2.dat', undefined, true); + verifyIOParameter('output_dir', null, "Doc desc 1, Doc desc 2", 'outdir1', `${outPdh}`); + verifyIOParameter('output_bool', null, null, 'true'); + verifyIOParameter('output_int', null, null, '1'); + verifyIOParameter('output_long', null, null, '1'); + verifyIOParameter('output_float', null, null, '100.5'); + verifyIOParameter('output_double', null, null, '100.3'); + verifyIOParameter('output_string', null, null, 'Hello output'); + verifyIOParameter('output_file_array', null, null, 'output2.tar', `${outPdh}`); + verifyIOParameter('output_file_array', null, null, 'output3.tar', undefined, true); + verifyIOParameter('output_dir_array', null, null, 'outdir2', `${outPdh}`); + verifyIOParameter('output_dir_array', null, null, 'outdir3', undefined, true); + verifyIOParameter('output_int_array', null, null, ["10", "11", "12"]); + verifyIOParameter('output_long_array', null, null, ["51", "52"]); + verifyIOParameter('output_float_array', null, null, ["100.2", "100.4", "100.6"]); + verifyIOParameter('output_double_array', null, null, ["100.1", "100.2", "100.3"]); + verifyIOParameter('output_string_array', null, null, ["Hello", "Output", "!"]); }); + }); + }); - cy.getCollection(activeUser.token, testOutputCollection.uuid).as('testOutputCollection'); - }); + it('displays IO parameters with no value', function() { + + const fakeOutputUUID = 'zzzzz-4zz18-abcdefghijklmno'; + const fakeOutputPDH = '11111111111111111111111111111111+99/'; + + cy.loginAs(activeUser); - // Get updated collection pdh - cy.getAll('@testOutputCollection').then(([testOutputCollection]) => { // Add output uuid and inputs to container request cy.intercept({method: 'GET', url: '**/arvados/v1/container_requests/*'}, (req) => { req.reply((res) => { - res.body.output_uuid = testOutputCollection.uuid; + res.body.output_uuid = fakeOutputUUID; res.body.mounts["/var/lib/cwl/cwl.input.json"] = { - content: testInputs.map((param) => (param.input)).reduce((acc, val) => (Object.assign(acc, val)), {}) + content: {} }; res.body.mounts["/var/lib/cwl/workflow.json"] = { content: { @@ -974,386 +1424,53 @@ describe('Process tests', function() { }); // Stub fake output collection - cy.intercept({method: 'GET', url: `**/arvados/v1/collections/${testOutputCollection.uuid}*`}, { + cy.intercept({method: 'GET', url: `**/arvados/v1/collections/${fakeOutputUUID}*`}, { statusCode: 200, body: { - uuid: testOutputCollection.uuid, - portable_data_hash: testOutputCollection.portable_data_hash, + uuid: fakeOutputUUID, + portable_data_hash: fakeOutputPDH, } }); // Stub fake output json - cy.intercept({method: 'GET', url: '**/c%3Dzzzzz-4zz18-zzzzzzzzzzzzzzz/cwl.output.json'}, { + cy.intercept({method: 'GET', url: `**/c%3D${fakeOutputUUID}/cwl.output.json`}, { statusCode: 200, - body: testOutputs.map((param) => (param.output)).reduce((acc, val) => (Object.assign(acc, val)), {}) + body: {} }); - // Stub webdav response, points to output json - cy.intercept({method: 'PROPFIND', url: '*'}, { - fixture: 'webdav-propfind-outputs.xml', - }); - }); - - createContainerRequest( - activeUser, - 'test_container_request', - 'arvados/jobs', - ['echo', 'hello world'], - false, 'Committed') - .as('containerRequest'); - - cy.getAll('@containerRequest', '@testOutputCollection').then(function([containerRequest, testOutputCollection]) { - cy.goToPath(`/processes/${containerRequest.uuid}`); - cy.get('[data-cy=process-io-card] h6').contains('Inputs') - .parents('[data-cy=process-io-card]').within(() => { - verifyIOParameter('input_file', null, "Label Description", 'input1.tar', '00000000000000000000000000000000+01'); - verifyIOParameter('input_file', null, "Label Description", 'input1-2.txt', undefined, true); - verifyIOParameter('input_file', null, "Label Description", 'input1-3.txt', undefined, true); - verifyIOParameter('input_file', null, "Label Description", 'input1-4.txt', undefined, true); - verifyIOParameter('input_dir', null, "Doc Description", '/', '11111111111111111111111111111111+01'); - verifyIOParameter('input_bool', null, "Doc desc 1, Doc desc 2", 'true'); - verifyIOParameter('input_int', null, null, '1'); - verifyIOParameter('input_long', null, null, '1'); - verifyIOParameter('input_float', null, null, '1.5'); - verifyIOParameter('input_double', null, null, '1.3'); - verifyIOParameter('input_string', null, null, 'Hello World'); - verifyIOParameter('input_file_array', null, null, 'input2.tar', '00000000000000000000000000000000+02'); - verifyIOParameter('input_file_array', null, null, 'input3.tar', undefined, true); - verifyIOParameter('input_file_array', null, null, 'input3-2.txt', undefined, true); - verifyIOParameter('input_file_array', null, null, 'Cannot display value', undefined, true); - verifyIOParameter('input_dir_array', null, null, '/', '11111111111111111111111111111111+02'); - verifyIOParameter('input_dir_array', null, null, '/', '11111111111111111111111111111111+03', true); - verifyIOParameter('input_dir_array', null, null, 'Cannot display value', undefined, true); - verifyIOParameter('input_int_array', null, null, ["1", "3", "5", "Cannot display value"]); - verifyIOParameter('input_long_array', null, null, ["10", "20", "Cannot display value"]); - verifyIOParameter('input_float_array', null, null, ["10.2", "10.4", "10.6", "Cannot display value"]); - verifyIOParameter('input_double_array', null, null, ["20.1", "20.2", "20.3", "Cannot display value"]); - verifyIOParameter('input_string_array', null, null, ["Hello", "World", "!", "Cannot display value"]); - verifyIOParameter('input_bool_include', null, null, "Cannot display value"); - verifyIOParameter('input_int_include', null, null, "Cannot display value"); - verifyIOParameter('input_float_include', null, null, "Cannot display value"); - verifyIOParameter('input_string_include', null, null, "Cannot display value"); - verifyIOParameter('input_file_include', null, null, "Cannot display value"); - verifyIOParameter('input_directory_include', null, null, "Cannot display value"); - verifyIOParameter('input_file_url', null, null, "http://example.com/index.html"); + cy.readFile('cypress/fixtures/webdav-propfind-outputs.xml').then((data) => { + // Stub webdav response, points to output json + cy.intercept({method: 'PROPFIND', url: '*'}, { + statusCode: 200, + body: data.replace(/zzzzz-4zz18-zzzzzzzzzzzzzzz/g, fakeOutputUUID) }); - cy.get('[data-cy=process-io-card] h6').contains('Outputs') - .parents('[data-cy=process-io-card]').within((ctx) => { - cy.get(ctx).scrollIntoView(); - cy.get('[data-cy="io-preview-image-toggle"]').click({waitForAnimations: false}); - const outPdh = testOutputCollection.portable_data_hash; - - verifyIOParameter('output_file', null, "Label Description", 'cat.png', `${outPdh}`); - verifyIOParameterImage('output_file', `/c=${outPdh}/cat.png`); - verifyIOParameter('output_file_with_secondary', null, "Doc Description", 'main.dat', `${outPdh}`); - verifyIOParameter('output_file_with_secondary', null, "Doc Description", 'secondary.dat', undefined, true); - verifyIOParameter('output_file_with_secondary', null, "Doc Description", 'secondary2.dat', undefined, true); - verifyIOParameter('output_dir', null, "Doc desc 1, Doc desc 2", 'outdir1', `${outPdh}`); - verifyIOParameter('output_bool', null, null, 'true'); - verifyIOParameter('output_int', null, null, '1'); - verifyIOParameter('output_long', null, null, '1'); - verifyIOParameter('output_float', null, null, '100.5'); - verifyIOParameter('output_double', null, null, '100.3'); - verifyIOParameter('output_string', null, null, 'Hello output'); - verifyIOParameter('output_file_array', null, null, 'output2.tar', `${outPdh}`); - verifyIOParameter('output_file_array', null, null, 'output3.tar', undefined, true); - verifyIOParameter('output_dir_array', null, null, 'outdir2', `${outPdh}`); - verifyIOParameter('output_dir_array', null, null, 'outdir3', undefined, true); - verifyIOParameter('output_int_array', null, null, ["10", "11", "12"]); - verifyIOParameter('output_long_array', null, null, ["51", "52"]); - verifyIOParameter('output_float_array', null, null, ["100.2", "100.4", "100.6"]); - verifyIOParameter('output_double_array', null, null, ["100.1", "100.2", "100.3"]); - verifyIOParameter('output_string_array', null, null, ["Hello", "Output", "!"]); - }); - }); - }); - - it('displays IO parameters with no value', function() { - - const fakeOutputUUID = 'zzzzz-4zz18-abcdefghijklmno'; - const fakeOutputPDH = '11111111111111111111111111111111+99/'; - - cy.loginAs(activeUser); - - // Add output uuid and inputs to container request - cy.intercept({method: 'GET', url: '**/arvados/v1/container_requests/*'}, (req) => { - req.reply((res) => { - res.body.output_uuid = fakeOutputUUID; - res.body.mounts["/var/lib/cwl/cwl.input.json"] = { - content: {} - }; - res.body.mounts["/var/lib/cwl/workflow.json"] = { - content: { - $graph: [{ - id: "#main", - inputs: testInputs.map((input) => (input.definition)), - outputs: testOutputs.map((output) => (output.definition)) - }] - } - }; }); - }); - // Stub fake output collection - cy.intercept({method: 'GET', url: `**/arvados/v1/collections/${fakeOutputUUID}*`}, { - statusCode: 200, - body: { - uuid: fakeOutputUUID, - portable_data_hash: fakeOutputPDH, - } - }); - - // Stub fake output json - cy.intercept({method: 'GET', url: `**/c%3D${fakeOutputUUID}/cwl.output.json`}, { - statusCode: 200, - body: {} - }); + createContainerRequest( + activeUser, + 'test_container_request', + 'arvados/jobs', + ['echo', 'hello world'], + false, 'Committed') + .as('containerRequest'); - cy.readFile('cypress/fixtures/webdav-propfind-outputs.xml').then((data) => { - // Stub webdav response, points to output json - cy.intercept({method: 'PROPFIND', url: '*'}, { - statusCode: 200, - body: data.replace(/zzzzz-4zz18-zzzzzzzzzzzzzzz/g, fakeOutputUUID) - }); - }); - - createContainerRequest( - activeUser, - 'test_container_request', - 'arvados/jobs', - ['echo', 'hello world'], - false, 'Committed') - .as('containerRequest'); - - cy.getAll('@containerRequest').then(function([containerRequest]) { - cy.goToPath(`/processes/${containerRequest.uuid}`); - cy.get('[data-cy=process-io-card] h6').contains('Inputs') - .parents('[data-cy=process-io-card]').within(() => { - cy.wait(2000); - cy.waitForDom(); - cy.get('tbody tr').each((item) => { - cy.wrap(item).contains('No value'); + cy.getAll('@containerRequest').then(function([containerRequest]) { + cy.goToPath(`/processes/${containerRequest.uuid}`); + cy.get('[data-cy=process-io-card] h6').contains('Inputs') + .parents('[data-cy=process-io-card]').within(() => { + cy.wait(2000); + cy.waitForDom(); + cy.get('tbody tr').each((item) => { + cy.wrap(item).contains('No value'); + }); }); - }); - cy.get('[data-cy=process-io-card] h6').contains('Outputs') - .parents('[data-cy=process-io-card]').within(() => { - cy.get('tbody tr').each((item) => { - cy.wrap(item).contains('No value'); + cy.get('[data-cy=process-io-card] h6').contains('Outputs') + .parents('[data-cy=process-io-card]').within(() => { + cy.get('tbody tr').each((item) => { + cy.wrap(item).contains('No value'); + }); }); - }); - }); - }); - - - it('allows copying processes', function() { - const crName = 'first_container_request'; - const copiedCrName = 'copied_container_request'; - createContainerRequest( - activeUser, - crName, - 'arvados/jobs', - ['echo', 'hello world'], - false, 'Committed') - .then(function(containerRequest) { - cy.loginAs(activeUser); - cy.goToPath(`/processes/${containerRequest.uuid}`); - cy.get('[data-cy=process-details]').should('contain', crName); - - cy.get('[data-cy=process-details]').find('button[title="More options"]').click(); - cy.get('ul[data-cy=context-menu]').contains("Copy and re-run process").click(); - }); - - cy.get('[data-cy=form-dialog]').within(() => { - cy.get('input[name=name]').clear().type(copiedCrName); - cy.get('[data-cy=projects-tree-home-tree-picker]').click(); - cy.get('[data-cy=form-submit-btn]').click(); - }); - - cy.get('[data-cy=process-details]').should('contain', copiedCrName); - cy.get('[data-cy=process-details]').find('button').contains('Run'); - }); - - const getFakeContainer = (fakeContainerUuid) => ({ - href: `/containers/${fakeContainerUuid}`, - kind: "arvados#container", - etag: "ecfosljpnxfari9a8m7e4yv06", - uuid: fakeContainerUuid, - owner_uuid: "zzzzz-tpzed-000000000000000", - created_at: "2023-02-13T15:55:47.308915000Z", - modified_by_client_uuid: "zzzzz-ozdt8-q6dzdi1lcc03155", - modified_by_user_uuid: "zzzzz-tpzed-000000000000000", - modified_at: "2023-02-15T19:12:45.987086000Z", - command: [ - "arvados-cwl-runner", - "--api=containers", - "--local", - "--project-uuid=zzzzz-j7d0g-yr18k784zplfeza", - "/var/lib/cwl/workflow.json#main", - "/var/lib/cwl/cwl.input.json", - ], - container_image: "4ad7d11381df349e464694762db14e04+303", - cwd: "/var/spool/cwl", - environment: {}, - exit_code: null, - finished_at: null, - locked_by_uuid: null, - log: null, - output: null, - output_path: "/var/spool/cwl", - progress: null, - runtime_constraints: { - API: true, - cuda: { - device_count: 0, - driver_version: "", - hardware_capability: "", - }, - keep_cache_disk: 2147483648, - keep_cache_ram: 0, - ram: 1342177280, - vcpus: 1, - }, - runtime_status: {}, - started_at: null, - auth_uuid: null, - scheduling_parameters: { - max_run_time: 0, - partitions: [], - preemptible: false, - }, - runtime_user_uuid: "zzzzz-tpzed-vllbpebicy84rd5", - runtime_auth_scopes: ["all"], - lock_count: 2, - gateway_address: null, - interactive_session_started: false, - output_storage_classes: ["default"], - output_properties: {}, - cost: 0.0, - subrequests_cost: 0.0, - }); - - it('shows cancel button when appropriate', function() { - // Ignore collection requests - cy.intercept({method: 'GET', url: `**/arvados/v1/collections/*`}, { - statusCode: 200, - body: {} - }); - - // Uncommitted container - const crUncommitted = `Test process ${Math.floor(Math.random() * 999999)}`; - createContainerRequest( - activeUser, - crUncommitted, - 'arvados/jobs', - ['echo', 'hello world'], - false, 'Uncommitted') - .then(function(containerRequest) { - // Navigate to process and verify run / cancel button - cy.goToPath(`/processes/${containerRequest.uuid}`); - cy.waitForDom(); - cy.get('[data-cy=process-details]').should('contain', crUncommitted); - cy.get('[data-cy=process-run-button]').should('exist'); - cy.get('[data-cy=process-cancel-button]').should('not.exist'); - }); - - // Queued container - const crQueued = `Test process ${Math.floor(Math.random() * 999999)}`; - const fakeCrUuid = 'zzzzz-dz642-000000000000001'; - createContainerRequest( - activeUser, - crQueued, - 'arvados/jobs', - ['echo', 'hello world'], - false, 'Committed') - .then(function(containerRequest) { - // Fake container uuid - cy.intercept({method: 'GET', url: `**/arvados/v1/container_requests/${containerRequest.uuid}`}, (req) => { - req.reply((res) => { - res.body.output_uuid = fakeCrUuid; - res.body.priority = 500; - res.body.state = "Committed"; - }); }); - - // Fake container - const container = getFakeContainer(fakeCrUuid); - cy.intercept({method: 'GET', url: `**/arvados/v1/container/${fakeCrUuid}`}, { - statusCode: 200, - body: {...container, state: "Queued", priority: 500} - }); - - // Navigate to process and verify cancel button - cy.goToPath(`/processes/${containerRequest.uuid}`); - cy.waitForDom(); - cy.get('[data-cy=process-details]').should('contain', crQueued); - cy.get('[data-cy=process-cancel-button]').contains('Cancel'); - }); - - // Locked container - const crLocked = `Test process ${Math.floor(Math.random() * 999999)}`; - const fakeCrLockedUuid = 'zzzzz-dz642-000000000000002'; - createContainerRequest( - activeUser, - crLocked, - 'arvados/jobs', - ['echo', 'hello world'], - false, 'Committed') - .then(function(containerRequest) { - // Fake container uuid - cy.intercept({method: 'GET', url: `**/arvados/v1/container_requests/${containerRequest.uuid}`}, (req) => { - req.reply((res) => { - res.body.output_uuid = fakeCrLockedUuid; - res.body.priority = 500; - res.body.state = "Committed"; - }); - }); - - // Fake container - const container = getFakeContainer(fakeCrLockedUuid); - cy.intercept({method: 'GET', url: `**/arvados/v1/container/${fakeCrLockedUuid}`}, { - statusCode: 200, - body: {...container, state: "Locked", priority: 500} - }); - - // Navigate to process and verify cancel button - cy.goToPath(`/processes/${containerRequest.uuid}`); - cy.waitForDom(); - cy.get('[data-cy=process-details]').should('contain', crLocked); - cy.get('[data-cy=process-cancel-button]').contains('Cancel'); - }); - - // On Hold container - const crOnHold = `Test process ${Math.floor(Math.random() * 999999)}`; - const fakeCrOnHoldUuid = 'zzzzz-dz642-000000000000003'; - createContainerRequest( - activeUser, - crOnHold, - 'arvados/jobs', - ['echo', 'hello world'], - false, 'Committed') - .then(function(containerRequest) { - // Fake container uuid - cy.intercept({method: 'GET', url: `**/arvados/v1/container_requests/${containerRequest.uuid}`}, (req) => { - req.reply((res) => { - res.body.output_uuid = fakeCrOnHoldUuid; - res.body.priority = 0; - res.body.state = "Committed"; - }); - }); - - // Fake container - const container = getFakeContainer(fakeCrOnHoldUuid); - cy.intercept({method: 'GET', url: `**/arvados/v1/container/${fakeCrOnHoldUuid}`}, { - statusCode: 200, - body: {...container, state: "Queued", priority: 0} - }); - - // Navigate to process and verify cancel button - cy.goToPath(`/processes/${containerRequest.uuid}`); - cy.waitForDom(); - cy.get('[data-cy=process-details]').should('contain', crOnHold); - cy.get('[data-cy=process-run-button]').should('exist'); - cy.get('[data-cy=process-cancel-button]').should('not.exist'); }); }); diff --git a/cypress/support/commands.js b/cypress/support/commands.js index f09d959b..e4f6fd60 100644 --- a/cypress/support/commands.js +++ b/cypress/support/commands.js @@ -28,6 +28,8 @@ // -- This will overwrite an existing command -- // Cypress.Commands.overwrite("visit", (originalFn, url, options) => { ... }) +import { extractFilesData } from "services/collection-service/collection-service-files-response"; + const controllerURL = Cypress.env('controller_url'); const systemToken = Cypress.env('system_token'); let createdResources = []; @@ -60,6 +62,22 @@ Cypress.Commands.add( }); }); +Cypress.Commands.add( + "doKeepRequest", (method = 'GET', path = '', data = null, qs = null, + token = systemToken, auth = false, followRedirect = true, failOnStatusCode = true) => { + return cy.doRequest('GET', '/arvados/v1/config', null, null).then(({body: config}) => { + return cy.request({ + method: method, + url: `${config.Services.WebDAVDownload.ExternalURL.replace(/\/+$/, '')}/${path.replace(/^\/+/, '')}`, + body: data, + qs: auth ? qs : Object.assign({ api_token: token }, qs), + auth: auth ? { bearer: `${token}` } : undefined, + followRedirect: followRedirect, + failOnStatusCode: failOnStatusCode + }); + }); +}); + Cypress.Commands.add( "getUser", (username, first_name = '', last_name = '', is_admin = false, is_active = true) => { // Create user if not already created @@ -151,12 +169,6 @@ Cypress.Commands.add( } ) -Cypress.Commands.add( - "getCollection", (token, uuid) => { - return cy.getResource(token, 'collections', uuid) - } -) - Cypress.Commands.add( "createCollection", (token, data) => { return cy.createResource(token, 'collections', { @@ -166,6 +178,12 @@ Cypress.Commands.add( } ) +Cypress.Commands.add( + "getCollection", (token, uuid) => { + return cy.getResource(token, 'collections', uuid) + } +) + Cypress.Commands.add( "updateCollection", (token, uuid, data) => { return cy.updateResource(token, 'collections', uuid, { @@ -188,6 +206,12 @@ Cypress.Commands.add( } ) +Cypress.Commands.add( + "getContainerRequest", (token, uuid) => { + return cy.getResource(token, 'container_requests', uuid) + } +) + Cypress.Commands.add( 'createContainerRequest', (token, data) => { return cy.createResource(token, 'container_requests', { @@ -205,31 +229,80 @@ Cypress.Commands.add( } ) +/** + * Requires an admin token for log_uuid modification to succeed + */ Cypress.Commands.add( - "createLog", (token, data) => { - return cy.createResource(token, 'logs', { - log: JSON.stringify(data) + "appendLog", (token, crUuid, fileName, lines = []) => ( + cy.getContainerRequest(token, crUuid).then((containerRequest) => { + if (containerRequest.log_uuid) { + cy.listContainerRequestLogs(token, crUuid).then((logFiles) => { + if (logFiles.find((file) => (file.name === fileName))) { + // File exists, fetch and append + return cy.doKeepRequest( + "GET", + `c=${containerRequest.log_uuid}/${fileName}`, + null, + null, + token + ) + .then(({ body: contents }) => cy.doKeepRequest( + "PUT", + `c=${containerRequest.log_uuid}/${fileName}`, + contents.split("\n").concat(lines).join("\n"), + null, + token + )); + } else { + // File not exists, put new file + cy.doKeepRequest( + "PUT", + `c=${containerRequest.log_uuid}/${fileName}`, + lines.join("\n"), + null, + token + ) + } + }); + // Fetch current log contents and append new line + // let newLines = [...lines]; + // return cy.doKeepRequest('GET', `c=${containerRequest.log_uuid}/${fileName}`, null, null, token) + // .then(({body: contents}) => { + // newLines = [contents.split('\n'), ...newLines]; + // }) + // .then(() => ( + // cy.doKeepRequest('PUT', `c=${containerRequest.log_uuid}/${fileName}`, newLines.join('\n'), null, token) + // )); + } else { + // Create log collection + return cy.createCollection(token, { + name: `Test log collection ${Math.floor(Math.random() * 999999)}`, + owner_uuid: containerRequest.owner_uuid, + manifest_text: "" + }).then((collection) => ( + // Update CR log_uuid to fake log collection + cy.updateContainerRequest(token, containerRequest.uuid, { + log_uuid: collection.uuid, + }).then(() => ( + // Put new log file with contents into fake log collection + cy.doKeepRequest('PUT', `c=${collection.uuid}/${fileName}`, lines.join('\n'), null, token) + )) + )); + } }) - } + ) ) Cypress.Commands.add( - "logsForContainer", (token, uuid, logType, logTextArray = []) => { - let logs = []; - for (const logText of logTextArray) { - logs.push(cy.createLog(token, { - object_uuid: uuid, - event_type: logType, - properties: { - text: logText - } - }).as('lastLogRecord')) - } - cy.getAll('@lastLogRecord').then(function () { - return logs; - }) - } -) + "listContainerRequestLogs", (token, crUuid) => ( + cy.getContainerRequest(token, crUuid).then((containerRequest) => ( + cy.doKeepRequest('PROPFIND', `c=${containerRequest.log_uuid}`, null, null, token) + .then(({body: data}) => { + return extractFilesData(new DOMParser().parseFromString(data, "text/xml")); + }) + )) + ) +); Cypress.Commands.add( "createVirtualMachine", (token, data) => { diff --git a/src/common/use-async-interval.ts b/src/common/use-async-interval.ts new file mode 100644 index 00000000..8951a9b0 --- /dev/null +++ b/src/common/use-async-interval.ts @@ -0,0 +1,35 @@ +// Copyright (C) The Arvados Authors. All rights reserved. +// +// SPDX-License-Identifier: AGPL-3.0 + +var react = require("react"); + +export const useAsyncInterval = function (callback, delay) { + const savedCallback = react.useRef(); + const active = react.useRef(false); + + // Remember the latest callback. + react.useEffect(() => { + savedCallback.current = callback; + }, [callback]); + // Set up the interval. + react.useEffect(() => { + // useEffect doesn't like async callbacks (https://github.com/facebook/react/issues/14326) so create nested async callback + (async () => { + // Make tick() async + async function tick() { + if (active.current) { + // If savedCallback is not set yet, no-op until it is + savedCallback.current && await savedCallback.current(); + setTimeout(tick, delay); + } + } + if (delay !== null) { + active.current = true; + setTimeout(tick, delay); + } + })(); // Call nested async function + // We return the teardown function here since we can't from inside the nested async callback + return () => {active.current = false;}; + }, [delay]); +}; diff --git a/src/models/log.ts b/src/models/log.ts index 0109ad61..f5d351ac 100644 --- a/src/models/log.ts +++ b/src/models/log.ts @@ -18,7 +18,6 @@ export enum LogEventType { STDERR = 'stderr', CONTAINER = 'container', KEEPSTORE = 'keepstore', - SNIP = 'snip-line', // This type is for internal use only. See #19851 } export interface LogResource extends Resource, ResourceWithProperties { diff --git a/src/services/collection-service/collection-service.test.ts b/src/services/collection-service/collection-service.test.ts index c8e907d3..3b4f423a 100644 --- a/src/services/collection-service/collection-service.test.ts +++ b/src/services/collection-service/collection-service.test.ts @@ -13,14 +13,14 @@ describe('collection-service', () => { let collectionService: CollectionService; let serverApi: AxiosInstance; let axiosMock: MockAdapter; - let webdavClient: any; + let keepWebdavClient: any; let authService; let actions; beforeEach(() => { serverApi = axios.create(); axiosMock = new MockAdapter(serverApi); - webdavClient = { + keepWebdavClient = { delete: jest.fn(), upload: jest.fn(), mkdir: jest.fn(), @@ -30,7 +30,7 @@ describe('collection-service', () => { progressFn: jest.fn(), errorFn: jest.fn(), } as any; - collectionService = new CollectionService(serverApi, webdavClient, authService, actions); + collectionService = new CollectionService(serverApi, keepWebdavClient, authService, actions); collectionService.update = jest.fn(); }); @@ -79,7 +79,7 @@ describe('collection-service', () => { }, select: ['uuid', 'name', 'version', 'modified_at'], } - collectionService = new CollectionService(serverApi, webdavClient, authService, actions); + collectionService = new CollectionService(serverApi, keepWebdavClient, authService, actions); await collectionService.update('uuid', data); expect(serverApi.put).toHaveBeenCalledWith('/collections/uuid', expected); }); @@ -95,7 +95,7 @@ describe('collection-service', () => { await collectionService.uploadFiles(collectionUUID, files); // then - expect(webdavClient.upload).not.toHaveBeenCalled(); + expect(keepWebdavClient.upload).not.toHaveBeenCalled(); }); it('should upload files', async () => { @@ -107,8 +107,8 @@ describe('collection-service', () => { await collectionService.uploadFiles(collectionUUID, files); // then - expect(webdavClient.upload).toHaveBeenCalledTimes(1); - expect(webdavClient.upload.mock.calls[0][0]).toEqual("c=zzzzz-4zz18-0123456789abcde/test-file1"); + expect(keepWebdavClient.upload).toHaveBeenCalledTimes(1); + expect(keepWebdavClient.upload.mock.calls[0][0]).toEqual("c=zzzzz-4zz18-0123456789abcde/test-file1"); }); it('should upload files with custom uplaod target', async () => { @@ -121,8 +121,8 @@ describe('collection-service', () => { await collectionService.uploadFiles(collectionUUID, files, undefined, customTarget); // then - expect(webdavClient.upload).toHaveBeenCalledTimes(1); - expect(webdavClient.upload.mock.calls[0][0]).toEqual("c=zzzzz-4zz18-0123456789adddd/test-path/test-file1"); + expect(keepWebdavClient.upload).toHaveBeenCalledTimes(1); + expect(keepWebdavClient.upload.mock.calls[0][0]).toEqual("c=zzzzz-4zz18-0123456789adddd/test-path/test-file1"); }); }); diff --git a/src/services/collection-service/collection-service.ts b/src/services/collection-service/collection-service.ts index b6687985..7e28c37b 100644 --- a/src/services/collection-service/collection-service.ts +++ b/src/services/collection-service/collection-service.ts @@ -23,7 +23,7 @@ export const emptyCollectionPdh = 'd41d8cd98f00b204e9800998ecf8427e+0'; export const SOURCE_DESTINATION_EQUAL_ERROR_MESSAGE = 'Source and destination cannot be the same'; export class CollectionService extends TrashableResourceService { - constructor(serverApi: AxiosInstance, private webdavClient: WebDAV, private authService: AuthService, actions: ApiActions) { + constructor(serverApi: AxiosInstance, private keepWebdavClient: WebDAV, private authService: AuthService, actions: ApiActions) { super(serverApi, "collections", actions, [ 'fileCount', 'fileSizeTotal', @@ -52,7 +52,7 @@ export class CollectionService extends TrashableResourceService { - const baseUrl = this.webdavClient.getBaseUrl().endsWith('/') - ? this.webdavClient.getBaseUrl().slice(0, -1) - : this.webdavClient.getBaseUrl(); + const baseUrl = this.keepWebdavClient.getBaseUrl().endsWith('/') + ? this.keepWebdavClient.getBaseUrl().slice(0, -1) + : this.keepWebdavClient.getBaseUrl(); const apiToken = this.authService.getApiToken(); const encodedApiToken = apiToken ? encodeURI(apiToken) : ''; const userApiToken = `/t=${encodedApiToken}/`; @@ -133,7 +133,7 @@ export class CollectionService extends TrashableResourceService { return; }, targetLocation: string = '') { @@ -146,7 +146,7 @@ export class CollectionService extends TrashableResourceService { - constructor(serverApi: AxiosInstance, actions: ApiActions) { + constructor(serverApi: AxiosInstance, private apiWebdavClient: WebDAV, actions: ApiActions) { super(serverApi, "logs", actions); } + + async listLogFiles(containerRequestUuid: string) { + const request = await this.apiWebdavClient.propfind(`container_requests/${containerRequestUuid}/log`); + if (request.responseXML != null) { + return extractFilesData(request.responseXML); + } + return Promise.reject(); + } + + async getLogFileContents(containerRequestUuid: string, fileRecord: CollectionFile, startByte: number, endByte: number): Promise { + try { + const request = await this.apiWebdavClient.get( + `container_requests/${containerRequestUuid}/log/${fileRecord.name}`, + {headers: {Range: `bytes=${startByte}-${endByte}`}} + ); + const logFileType = logFileToLogType(fileRecord); + + if (request.responseText && logFileType) { + return { + logType: logFileType, + contents: request.responseText.split(/\r?\n/), + }; + } else { + return undefined; + } + } catch(e) { + return undefined; + } + } } + +export const logFileToLogType = (file: CollectionFile | CollectionDirectory) => (file.name.replace(/\.(txt|json)$/, '') as LogEventType); diff --git a/src/services/services.ts b/src/services/services.ts index be6f16b6..cd04a65f 100644 --- a/src/services/services.ts +++ b/src/services/services.ts @@ -39,12 +39,14 @@ export function setAuthorizationHeader(services: ServiceRepository, token: strin services.apiClient.defaults.headers.common = { Authorization: `Bearer ${token}` }; - services.webdavClient.setAuthorization(`Bearer ${token}`); + services.keepWebdavClient.setAuthorization(`Bearer ${token}`); + services.apiWebdavClient.setAuthorization(`Bearer ${token}`); } export function removeAuthorizationHeader(services: ServiceRepository) { delete services.apiClient.defaults.headers.common; - services.webdavClient.setAuthorization(undefined); + services.keepWebdavClient.setAuthorization(undefined); + services.apiWebdavClient.setAuthorization(undefined); } export const createServices = (config: Config, actions: ApiActions, useApiClient?: AxiosInstance) => { @@ -55,10 +57,14 @@ export const createServices = (config: Config, actions: ApiActions, useApiClient const apiClient = useApiClient || Axios.create({ headers: {} }); apiClient.defaults.baseURL = config.baseUrl; - const webdavClient = new WebDAV({ + const keepWebdavClient = new WebDAV({ baseURL: config.keepWebServiceUrl }); + const apiWebdavClient = new WebDAV({ + baseURL: config.baseUrl + }); + const apiClientAuthorizationService = new ApiClientAuthorizationService(apiClient, actions); const authorizedKeysService = new AuthorizedKeysService(apiClient, actions); const containerRequestService = new ContainerRequestService(apiClient, actions); @@ -66,7 +72,7 @@ export const createServices = (config: Config, actions: ApiActions, useApiClient const groupsService = new GroupsService(apiClient, actions); const keepService = new KeepService(apiClient, actions); const linkService = new LinkService(apiClient, actions); - const logService = new LogService(apiClient, actions); + const logService = new LogService(apiClient, apiWebdavClient, actions); const permissionService = new PermissionService(apiClient, actions); const projectService = new ProjectService(apiClient, actions); const repositoriesService = new RepositoriesService(apiClient, actions); @@ -79,7 +85,7 @@ export const createServices = (config: Config, actions: ApiActions, useApiClient const authService = new AuthService(apiClient, config.rootUrl, actions, (parse(idleTimeout, 's') || 0) > 0); - const collectionService = new CollectionService(apiClient, webdavClient, authService, actions); + const collectionService = new CollectionService(apiClient, keepWebdavClient, authService, actions); const ancestorsService = new AncestorService(groupsService, userService, collectionService); const favoriteService = new FavoriteService(linkService, groupsService); const tagService = new TagService(linkService); @@ -109,7 +115,8 @@ export const createServices = (config: Config, actions: ApiActions, useApiClient tagService, userService, virtualMachineService, - webdavClient, + keepWebdavClient, + apiWebdavClient, workflowService, vocabularyService, linkAccountService diff --git a/src/store/process-logs-panel/process-logs-panel-actions.ts b/src/store/process-logs-panel/process-logs-panel-actions.ts index 16177f18..a0d071fd 100644 --- a/src/store/process-logs-panel/process-logs-panel-actions.ts +++ b/src/store/process-logs-panel/process-logs-panel-actions.ts @@ -3,28 +3,35 @@ // SPDX-License-Identifier: AGPL-3.0 import { unionize, ofType, UnionOf } from "common/unionize"; -import { ProcessLogs, getProcessLogsPanelCurrentUuid } from './process-logs-panel'; +import { ProcessLogs } from './process-logs-panel'; import { LogEventType } from 'models/log'; import { RootState } from 'store/store'; import { ServiceRepository } from 'services/services'; import { Dispatch } from 'redux'; -import { groupBy, min, reverse } from 'lodash'; -import { LogResource } from 'models/log'; -import { LogService } from 'services/log-service/log-service'; -import { ResourceEventMessage } from 'websocket/resource-event-message'; -import { getProcess } from 'store/processes/process'; -import { FilterBuilder } from "services/api/filter-builder"; -import { OrderBuilder } from "services/api/order-builder"; +import { LogFragment, LogService, logFileToLogType } from 'services/log-service/log-service'; +import { Process, getProcess } from 'store/processes/process'; import { navigateTo } from 'store/navigation/navigation-action'; import { snackbarActions, SnackbarKind } from 'store/snackbar/snackbar-actions'; +import { CollectionFile, CollectionFileType } from "models/collection-file"; + +const SNIPLINE = `================ ✀ ================ ✀ ========= Some log(s) were skipped ========= ✀ ================ ✀ ================`; +const LOG_TIMESTAMP_PATTERN = /^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}\.[0-9]{9}Z/; export const processLogsPanelActions = unionize({ RESET_PROCESS_LOGS_PANEL: ofType<{}>(), INIT_PROCESS_LOGS_PANEL: ofType<{ filters: string[], logs: ProcessLogs }>(), SET_PROCESS_LOGS_PANEL_FILTER: ofType(), - ADD_PROCESS_LOGS_PANEL_ITEM: ofType<{ logType: string, log: string }>(), + ADD_PROCESS_LOGS_PANEL_ITEM: ofType(), }); +// Max size of logs to fetch in bytes +const maxLogFetchSize: number = 128 * 1000; + +type FileWithProgress = { + file: CollectionFile; + lastByte: number; +} + export type ProcessLogsPanelAction = UnionOf; export const setProcessLogsPanelFilter = (filter: string) => @@ -34,110 +41,218 @@ export const initProcessLogsPanel = (processUuid: string) => async (dispatch: Dispatch, getState: () => RootState, { logService }: ServiceRepository) => { dispatch(processLogsPanelActions.RESET_PROCESS_LOGS_PANEL()); const process = getProcess(processUuid)(getState().resources); - const maxPageSize = getState().auth.config.clusterConfig.API.MaxItemsPerResponse; - if (process && process.container) { - const logResources = await loadContainerLogs(process.container.uuid, logService, maxPageSize); - const initialState = createInitialLogPanelState(logResources); + if (process?.containerRequest?.uuid) { + // Get log file size info + const logFiles = await loadContainerLogFileList(process.containerRequest.uuid, logService); + + // Populate lastbyte 0 for each file + const filesWithProgress = logFiles.map((file) => ({file, lastByte: 0})); + + // Fetch array of LogFragments + const logLines = await loadContainerLogFileContents(filesWithProgress, logService, process); + + // Populate initial state with filters + const initialState = createInitialLogPanelState(logFiles, logLines); dispatch(processLogsPanelActions.INIT_PROCESS_LOGS_PANEL(initialState)); } }; -export const addProcessLogsPanelItem = (message: ResourceEventMessage<{ text: string }>) => +export const pollProcessLogs = (processUuid: string) => async (dispatch: Dispatch, getState: () => RootState, { logService }: ServiceRepository) => { - if (PROCESS_PANEL_LOG_EVENT_TYPES.indexOf(message.eventType) > -1) { - const uuid = getProcessLogsPanelCurrentUuid(getState().router); - if (!uuid) { return } - const process = getProcess(uuid)(getState().resources); - if (!process) { return } - const { containerRequest, container } = process; - if (message.objectUuid === containerRequest.uuid - || (container && message.objectUuid === container.uuid)) { - dispatch(processLogsPanelActions.ADD_PROCESS_LOGS_PANEL_ITEM({ - logType: ALL_FILTER_TYPE, - log: message.properties.text - })); - dispatch(processLogsPanelActions.ADD_PROCESS_LOGS_PANEL_ITEM({ - logType: message.eventType, - log: message.properties.text - })); - if (MAIN_EVENT_TYPES.indexOf(message.eventType) > -1) { - dispatch(processLogsPanelActions.ADD_PROCESS_LOGS_PANEL_ITEM({ - logType: MAIN_FILTER_TYPE, - log: message.properties.text - })); + try { + // Get log panel state and process from store + const currentState = getState().processLogsPanel; + const process = getProcess(processUuid)(getState().resources); + + // Check if container request is present and initial logs state loaded + if (process?.containerRequest?.uuid && Object.keys(currentState.logs).length > 0) { + const logFiles = await loadContainerLogFileList(process.containerRequest.uuid, logService); + + // Determine byte to fetch from while filtering unchanged files + const filesToUpdateWithProgress = logFiles.reduce((acc, updatedFile) => { + // Fetch last byte or 0 for new log files + const currentStateLogLastByte = currentState.logs[logFileToLogType(updatedFile)]?.lastByte || 0; + + const isNew = !Object.keys(currentState.logs).find((currentStateLogName) => (updatedFile.name.startsWith(currentStateLogName))); + const isChanged = !isNew && currentStateLogLastByte < updatedFile.size; + + if (isNew || isChanged) { + return acc.concat({file: updatedFile, lastByte: currentStateLogLastByte}); + } else { + return acc; + } + }, [] as FileWithProgress[]); + + // Perform range request(s) for each file + const logFragments = await loadContainerLogFileContents(filesToUpdateWithProgress, logService, process); + + if (logFragments.length) { + // Convert LogFragments to ProcessLogs with All/Main sorting & line-merging + const groupedLogs = groupLogs(logFiles, logFragments); + await dispatch(processLogsPanelActions.ADD_PROCESS_LOGS_PANEL_ITEM(groupedLogs)); } } + } catch (e) { + // Failed to poll, ignore error } }; -const loadContainerLogs = async (containerUuid: string, logService: LogService, maxPageSize: number) => { - const requestFilters = new FilterBuilder() - .addEqual('object_uuid', containerUuid) - .addIn('event_type', PROCESS_PANEL_LOG_EVENT_TYPES) - .getFilters(); - const requestOrderAsc = new OrderBuilder() - .addAsc('eventAt') - .getOrder(); - const requestOrderDesc = new OrderBuilder() - .addDesc('eventAt') - .getOrder(); - const { items, itemsAvailable } = await logService.list({ - limit: maxPageSize, - filters: requestFilters, - order: requestOrderAsc, - }); - - // Request additional logs if necessary - const remainingLogs = itemsAvailable - items.length; - if (remainingLogs > 0) { - const { items: itemsLast } = await logService.list({ - limit: min([maxPageSize, remainingLogs]), - filters: requestFilters, - order: requestOrderDesc, - count: 'none', - }) - if (remainingLogs - itemsLast.length > 0) { - const snipLine = { - ...items[items.length - 1], - eventType: LogEventType.SNIP, - properties: { - text: `================ 8< ================ 8< ========= Some log(s) were skipped ========= 8< ================ 8< ================` - }, - } - return [...items, snipLine, ...reverse(itemsLast)]; +const loadContainerLogFileList = async (containerUuid: string, logService: LogService) => { + const logCollectionContents = await logService.listLogFiles(containerUuid); + + // Filter only root directory files matching log event types which have bytes + return logCollectionContents.filter((file) => ( + file.path === `/arvados/v1/container_requests/${containerUuid}/log` && + PROCESS_PANEL_LOG_EVENT_TYPES.indexOf(logFileToLogType(file)) > -1 && + file.type === CollectionFileType.FILE && + file.size > 0 + )) as CollectionFile[]; +}; + +/** + * Loads the contents of each file from each file's lastByte simultaneously + * while respecting the maxLogFetchSize by requesting the start and end + * of the desired block and inserting a snipline. + * @param logFilesWithProgress CollectionFiles with the last byte previously loaded + * @param logService + * @param process + * @returns LogFragment[] containing a single LogFragment corresponding to each input file + */ +const loadContainerLogFileContents = async (logFilesWithProgress: FileWithProgress[], logService: LogService, process: Process) => ( + (await Promise.all(logFilesWithProgress.filter(({file}) => file.size > 0).map(({file, lastByte}) => { + const requestSize = file.size - lastByte; + if (requestSize > maxLogFetchSize) { + const chunkSize = Math.floor(maxLogFetchSize / 2); + const firstChunkEnd = lastByte+chunkSize-1; + return Promise.all([ + logService.getLogFileContents(process.containerRequest.uuid, file, lastByte, firstChunkEnd), + logService.getLogFileContents(process.containerRequest.uuid, file, file.size-chunkSize, file.size-1) + ] as Promise<(LogFragment | undefined)>[]); + } else { + return Promise.all([logService.getLogFileContents(process.containerRequest.uuid, file, lastByte, file.size-1)]); } - return [...items, ...reverse(itemsLast)]; + }))).filter((logResponseSet) => ( // Top level filter ensures array of LogFragments is not empty and contains 1 or more fragments containing log lines + logResponseSet.length && logResponseSet.some(logFragment => logFragment && logFragment.contents.length) + )).map((logResponseSet)=> { + // Remove fragments from subarrays that are undefined or have no lines + const responseSet = logResponseSet.filter((logFragment): logFragment is LogFragment => ( + !!logFragment && logFragment.contents.length > 0 + )); + + // For any multi fragment response set, modify the last line of non-final chunks to include a line break and snip line + // Don't add snip line as a separate line so that sorting won't reorder it + for (let i = 1; i < responseSet.length; i++) { + const fragment = responseSet[i-1]; + const lastLineIndex = fragment.contents.length-1; + const lastLineContents = fragment.contents[lastLineIndex]; + const newLastLine = `${lastLineContents}\n${SNIPLINE}`; + + responseSet[i-1].contents[lastLineIndex] = newLastLine; + } + + // Merge LogFragment Array (representing multiple log line arrays) into single LogLine[] / LogFragment + return responseSet.reduce((acc, curr: LogFragment) => ({ + logType: curr.logType, + contents: [...(acc.contents || []), ...curr.contents] + }), {} as LogFragment); + }) +); + +const createInitialLogPanelState = (logFiles: CollectionFile[], logFragments: LogFragment[]): {filters: string[], logs: ProcessLogs} => { + const logs = groupLogs(logFiles, logFragments); + const filters = Object.keys(logs); + return { filters, logs }; +} + +/** + * Converts LogFragments into ProcessLogs, grouping and sorting All/Main logs + * @param logFiles + * @param logFragments + * @returns ProcessLogs for the store + */ +const groupLogs = (logFiles: CollectionFile[], logFragments: LogFragment[]): ProcessLogs => { + const sortableLogFragments = mergeMultilineLoglines(logFragments); + + const allLogs = mergeSortLogFragments(sortableLogFragments); + const mainLogs = mergeSortLogFragments(sortableLogFragments.filter((fragment) => (MAIN_EVENT_TYPES.includes(fragment.logType)))); + + const groupedLogs = logFragments.reduce((grouped, fragment) => ({ + ...grouped, + [fragment.logType as string]: {lastByte: fetchLastByteNumber(logFiles, fragment.logType), contents: fragment.contents} + }), {}); + + return { + [MAIN_FILTER_TYPE]: {lastByte: undefined, contents: mainLogs}, + [ALL_FILTER_TYPE]: {lastByte: undefined, contents: allLogs}, + ...groupedLogs, } - return items; }; -const createInitialLogPanelState = (logResources: LogResource[]) => { - const allLogs = logsToLines(logResources); - const mainLogs = logsToLines(logResources.filter( - e => MAIN_EVENT_TYPES.indexOf(e.eventType) > -1 - )); - const groupedLogResources = groupBy(logResources, log => log.eventType); - const groupedLogs = Object - .keys(groupedLogResources) - .reduce((grouped, key) => ({ - ...grouped, - [key]: logsToLines(groupedLogResources[key]) - }), {}); - const filters = [ - MAIN_FILTER_TYPE, - ALL_FILTER_TYPE, - ...Object.keys(groupedLogs) - ].filter(e => e !== LogEventType.SNIP); - const logs = { - [MAIN_FILTER_TYPE]: mainLogs, - [ALL_FILTER_TYPE]: allLogs, - ...groupedLogs - }; - return { filters, logs }; +/** + * Checks for non-timestamped log lines and merges them with the previous line, assumes they are multi-line logs + * If there is no previous line (first line has no timestamp), the line is deleted. + * Only used for combined logs that need sorting by timestamp after merging + * @param logFragments + * @returns Modified LogFragment[] + */ +const mergeMultilineLoglines = (logFragments: LogFragment[]) => ( + logFragments.map((fragment) => { + // Avoid altering the original fragment copy + let fragmentCopy: LogFragment = { + logType: fragment.logType, + contents: [...fragment.contents], + } + // Merge any non-timestamped lines in sortable log types with previous line + if (fragmentCopy.contents.length && !NON_SORTED_LOG_TYPES.includes(fragmentCopy.logType)) { + for (let i = 0; i < fragmentCopy.contents.length; i++) { + const lineContents = fragmentCopy.contents[i]; + if (!lineContents.match(LOG_TIMESTAMP_PATTERN)) { + // Partial line without timestamp detected + if (i > 0) { + // If not first line, copy line to previous line + const previousLineContents = fragmentCopy.contents[i-1]; + const newPreviousLineContents = `${previousLineContents}\n${lineContents}`; + fragmentCopy.contents[i-1] = newPreviousLineContents; + } + // Delete the current line and prevent iterating + fragmentCopy.contents.splice(i, 1); + i--; + } + } + } + return fragmentCopy; + }) +); + +/** + * Merges log lines of different types and sorts types that contain timestamps (are sortable) + * @param logFragments + * @returns string[] of merged and sorted log lines + */ +const mergeSortLogFragments = (logFragments: LogFragment[]): string[] => { + const sortableLines = fragmentsToLines(logFragments + .filter((fragment) => (!NON_SORTED_LOG_TYPES.includes(fragment.logType)))); + + const nonSortableLines = fragmentsToLines(logFragments + .filter((fragment) => (NON_SORTED_LOG_TYPES.includes(fragment.logType))) + .sort((a, b) => (a.logType.localeCompare(b.logType)))); + + return [...sortableLines.sort(sortLogLines), ...nonSortableLines] }; -const logsToLines = (logs: LogResource[]) => - logs.map(({ properties }) => properties.text); +const sortLogLines = (a: string, b: string) => { + return a.localeCompare(b); +}; + +const fragmentsToLines = (fragments: LogFragment[]): string[] => ( + fragments.reduce((acc, fragment: LogFragment) => ( + acc.concat(...fragment.contents) + ), [] as string[]) +); + +const fetchLastByteNumber = (logFiles: CollectionFile[], key: string) => { + return logFiles.find((file) => (file.name.startsWith(key)))?.size +}; export const navigateToLogCollection = (uuid: string) => async (dispatch: Dispatch, getState: () => RootState, services: ServiceRepository) => { @@ -156,7 +271,6 @@ const MAIN_EVENT_TYPES = [ LogEventType.CRUNCH_RUN, LogEventType.STDERR, LogEventType.STDOUT, - LogEventType.SNIP, ]; const PROCESS_PANEL_LOG_EVENT_TYPES = [ @@ -170,5 +284,9 @@ const PROCESS_PANEL_LOG_EVENT_TYPES = [ LogEventType.STDOUT, LogEventType.CONTAINER, LogEventType.KEEPSTORE, - LogEventType.SNIP, +]; + +const NON_SORTED_LOG_TYPES = [ + LogEventType.NODE_INFO, + LogEventType.CONTAINER, ]; diff --git a/src/store/process-logs-panel/process-logs-panel-reducer.ts b/src/store/process-logs-panel/process-logs-panel-reducer.ts index c502f1b1..e7dd3562 100644 --- a/src/store/process-logs-panel/process-logs-panel-reducer.ts +++ b/src/store/process-logs-panel/process-logs-panel-reducer.ts @@ -2,13 +2,13 @@ // // SPDX-License-Identifier: AGPL-3.0 -import { ProcessLogsPanel } from './process-logs-panel'; +import { ProcessLogs, ProcessLogsPanel } from './process-logs-panel'; import { ProcessLogsPanelAction, processLogsPanelActions } from './process-logs-panel-actions'; const initialState: ProcessLogsPanel = { filters: [], selectedFilter: '', - logs: { '': [] }, + logs: {}, }; export const processLogsPanelReducer = (state = initialState, action: ProcessLogsPanelAction): ProcessLogsPanel => @@ -23,13 +23,24 @@ export const processLogsPanelReducer = (state = initialState, action: ProcessLog ...state, selectedFilter }), - ADD_PROCESS_LOGS_PANEL_ITEM: ({ logType, log }) => { - const filters = state.filters.indexOf(logType) > -1 - ? state.filters - : [...state.filters, logType]; - const currentLogs = state.logs[logType] || []; - const logsOfType = [...currentLogs, log]; - const logs = { ...state.logs, [logType]: logsOfType }; + ADD_PROCESS_LOGS_PANEL_ITEM: (groupedLogs: ProcessLogs) => { + // Update filters + const newFilters = Object.keys(groupedLogs).filter((logType) => (!state.filters.includes(logType))); + const filters = [...state.filters, ...newFilters]; + + // Append new log lines + const logs = Object.keys(groupedLogs).reduce((acc, logType) => { + if (Object.keys(acc).includes(logType)) { + // If log type exists, append lines and update lastByte + return {...acc, [logType]: { + lastByte: groupedLogs[logType].lastByte, + contents: [...acc[logType].contents, ...groupedLogs[logType].contents] + }}; + } else { + return {...acc, [logType]: groupedLogs[logType]}; + } + }, state.logs); + return { ...state, logs, filters }; }, default: () => state, diff --git a/src/store/process-logs-panel/process-logs-panel.ts b/src/store/process-logs-panel/process-logs-panel.ts index 0ca5d679..531d3723 100644 --- a/src/store/process-logs-panel/process-logs-panel.ts +++ b/src/store/process-logs-panel/process-logs-panel.ts @@ -12,11 +12,11 @@ export interface ProcessLogsPanel { } export interface ProcessLogs { - [logType: string]: string[]; + [logType: string]: {lastByte: number | undefined, contents: string[]}; } -export const getProcessPanelLogs = ({ selectedFilter, logs }: ProcessLogsPanel) => { - return logs[selectedFilter]; +export const getProcessPanelLogs = ({ selectedFilter, logs }: ProcessLogsPanel): string[] => { + return logs[selectedFilter]?.contents || []; }; export const getProcessLogsPanelCurrentUuid = (router: RouterState) => { diff --git a/src/views/process-panel/process-log-card.tsx b/src/views/process-panel/process-log-card.tsx index e14f98f9..bcd4b240 100644 --- a/src/views/process-panel/process-log-card.tsx +++ b/src/views/process-panel/process-log-card.tsx @@ -15,6 +15,7 @@ import { Grid, Typography, } from '@material-ui/core'; +import { useAsyncInterval } from 'common/use-async-interval'; import { ArvadosTheme } from 'common/custom-theme'; import { CloseIcon, @@ -84,6 +85,7 @@ export interface ProcessLogsCardActionProps { onLogFilterChange: (filter: FilterOption) => void; navigateToLog: (uuid: string) => void; onCopy: (text: string) => void; + pollProcessLogs: (processUuid: string) => Promise; } type ProcessLogsCardProps = ProcessLogsCardDataProps @@ -94,13 +96,17 @@ type ProcessLogsCardProps = ProcessLogsCardDataProps export const ProcessLogsCard = withStyles(styles)( ({ classes, process, filters, selectedFilter, lines, - onLogFilterChange, navigateToLog, onCopy, + onLogFilterChange, navigateToLog, onCopy, pollProcessLogs, doHidePanel, doMaximizePanel, doUnMaximizePanel, panelMaximized, panelName }: ProcessLogsCardProps) => { const [wordWrap, setWordWrap] = useState(true); const [fontSize, setFontSize] = useState(3); const fontBaseSize = 10; const fontStepSize = 1; + useAsyncInterval(() => ( + pollProcessLogs(process.containerRequest.uuid) + ), 2000); + return }); - diff --git a/src/views/process-panel/process-log-code-snippet.tsx b/src/views/process-panel/process-log-code-snippet.tsx index 2b7391c2..50d343d6 100644 --- a/src/views/process-panel/process-log-code-snippet.tsx +++ b/src/views/process-panel/process-log-code-snippet.tsx @@ -33,7 +33,7 @@ const styles: StyleRulesCallback = (theme: ArvadosTheme) => ({ }, }, logText: { - padding: theme.spacing.unit * 0.5, + padding: `0 ${theme.spacing.unit*0.5}px`, }, wordWrap: { whiteSpace: 'pre-wrap', @@ -126,4 +126,4 @@ export const ProcessLogCodeSnippet = withStyles(styles)(connect(mapStateToProps) ) } - })); \ No newline at end of file + })); diff --git a/src/views/process-panel/process-panel-root.tsx b/src/views/process-panel/process-panel-root.tsx index d99c62ec..c04cf62a 100644 --- a/src/views/process-panel/process-panel-root.tsx +++ b/src/views/process-panel/process-panel-root.tsx @@ -61,6 +61,7 @@ export interface ProcessPanelRootActionProps { loadNodeJson: (containerRequest: ContainerRequestResource) => void; loadOutputDefinitions: (containerRequest: ContainerRequestResource) => void; updateOutputParams: () => void; + pollProcessLogs: (processUuid: string) => Promise; } export type ProcessPanelRootProps = ProcessPanelRootDataProps & ProcessPanelRootActionProps & WithStyles; @@ -147,6 +148,7 @@ export const ProcessPanelRoot = withStyles(styles)( )} onLogFilterChange={props.onLogFilterChange} navigateToLog={props.navigateToLog} + pollProcessLogs={props.pollProcessLogs} /> diff --git a/src/views/process-panel/process-panel.tsx b/src/views/process-panel/process-panel.tsx index 9dcb72cf..575c6591 100644 --- a/src/views/process-panel/process-panel.tsx +++ b/src/views/process-panel/process-panel.tsx @@ -26,7 +26,7 @@ import { loadNodeJson } from 'store/process-panel/process-panel-actions'; import { cancelRunningWorkflow, resumeOnHoldWorkflow, startWorkflow } from 'store/processes/processes-actions'; -import { navigateToLogCollection, setProcessLogsPanelFilter } from 'store/process-logs-panel/process-logs-panel-actions'; +import { navigateToLogCollection, pollProcessLogs, setProcessLogsPanelFilter } from 'store/process-logs-panel/process-logs-panel-actions'; import { snackbarActions, SnackbarKind } from 'store/snackbar/snackbar-actions'; const mapStateToProps = ({ router, auth, resources, processPanel, processLogsPanel }: RootState): ProcessPanelRootDataProps => { @@ -71,6 +71,7 @@ const mapDispatchToProps = (dispatch: Dispatch): ProcessPanelRootActionProps => loadOutputDefinitions: (containerRequest) => dispatch(loadOutputDefinitions(containerRequest)), updateOutputParams: () => dispatch(updateOutputParams()), loadNodeJson: (containerRequest) => dispatch(loadNodeJson(containerRequest)), + pollProcessLogs: (processUuid) => dispatch(pollProcessLogs(processUuid)), }); const getFilters = (processPanel: ProcessPanelState, processes: Process[]) => { diff --git a/src/websocket/websocket.ts b/src/websocket/websocket.ts index 6bb505b3..39940ce5 100644 --- a/src/websocket/websocket.ts +++ b/src/websocket/websocket.ts @@ -11,7 +11,6 @@ import { ResourceKind } from 'models/resource'; import { loadProcess } from 'store/processes/processes-actions'; import { getProcess, getSubprocesses } from 'store/processes/process'; import { LogEventType } from 'models/log'; -import { addProcessLogsPanelItem } from 'store/process-logs-panel/process-logs-panel-actions'; import { subprocessPanelActions } from "store/subprocess-panel/subprocess-panel-actions"; import { projectPanelActions } from "store/project-panel/project-panel-action"; import { getProjectPanelCurrentUuid } from 'store/project-panel/project-panel-action'; @@ -73,7 +72,5 @@ const messageListener = (store: RootStore) => (message: ResourceEventMessage) => default: return; } - } else { - return store.dispatch(addProcessLogsPanelItem(message as ResourceEventMessage<{ text: string }>)); } }; -- 2.30.2