import { navigateTo } from 'store/navigation/navigation-action';
import { snackbarActions, SnackbarKind } from 'store/snackbar/snackbar-actions';
import { CollectionFile, CollectionFileType } from "models/collection-file";
-import { ContainerRequestResource } from "models/container-request";
+import { ContainerRequestResource, ContainerRequestState } from "models/container-request";
const SNIPLINE = `================ ✀ ================ ✀ ========= Some log(s) were skipped ========= ✀ ================ ✀ ================`;
const LOG_TIMESTAMP_PATTERN = /^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}\.[0-9]{9}Z/;
export const initProcessLogsPanel = (processUuid: string) =>
async (dispatch: Dispatch, getState: () => RootState, { logService }: ServiceRepository) => {
+ let process: Process | undefined;
try {
dispatch(processLogsPanelActions.RESET_PROCESS_LOGS_PANEL());
- const process = getProcess(processUuid)(getState().resources);
+ process = getProcess(processUuid)(getState().resources);
if (process?.containerRequest?.uuid) {
// Get log file size info
const logFiles = await loadContainerLogFileList(process.containerRequest, logService);
// Populate lastbyte 0 for each file
- const filesWithProgress = logFiles.map((file) => ({file, lastByte: 0}));
+ const filesWithProgress = logFiles.map((file) => ({ file, lastByte: 0 }));
// Fetch array of LogFragments
const logLines = await loadContainerLogFileContents(filesWithProgress, logService, process);
const initialState = createInitialLogPanelState(logFiles, logLines);
dispatch(processLogsPanelActions.INIT_PROCESS_LOGS_PANEL(initialState));
}
- } catch(e) {
+ } catch (e) {
// On error, populate empty state to allow polling to start
const initialState = createInitialLogPanelState([], []);
dispatch(processLogsPanelActions.INIT_PROCESS_LOGS_PANEL(initialState));
// Only show toast on errors other than 404 since 404 is expected when logs do not exist yet
if (e.status !== 404) {
- dispatch(snackbarActions.OPEN_SNACKBAR({ message: 'Could not load process logs', hideDuration: 2000, kind: SnackbarKind.ERROR }));
+ dispatch(snackbarActions.OPEN_SNACKBAR({ message: 'Error loading process logs', hideDuration: 4000, kind: SnackbarKind.ERROR }));
+ }
+ if (e.status === 404 && process?.containerRequest.state === ContainerRequestState.FINAL) {
+ dispatch(snackbarActions.OPEN_SNACKBAR({ message: 'Log collection was trashed or deleted.', hideDuration: 4000, kind: SnackbarKind.WARNING }));
}
}
};
const isChanged = !isNew && currentStateLogLastByte < updatedFile.size;
if (isNew || isChanged) {
- return acc.concat({file: updatedFile, lastByte: currentStateLogLastByte});
+ return acc.concat({ file: updatedFile, lastByte: currentStateLogLastByte });
} else {
return acc;
}
* @returns LogFragment[] containing a single LogFragment corresponding to each input file
*/
const loadContainerLogFileContents = async (logFilesWithProgress: FileWithProgress[], logService: LogService, process: Process) => (
- (await Promise.allSettled(logFilesWithProgress.filter(({file}) => file.size > 0).map(({file, lastByte}) => {
+ (await Promise.allSettled(logFilesWithProgress.filter(({ file }) => file.size > 0).map(({ file, lastByte }) => {
const requestSize = file.size - lastByte;
if (requestSize > maxLogFetchSize) {
const chunkSize = Math.floor(maxLogFetchSize / 2);
- const firstChunkEnd = lastByte+chunkSize-1;
+ const firstChunkEnd = lastByte + chunkSize - 1;
return Promise.all([
logService.getLogFileContents(process.containerRequest, file, lastByte, firstChunkEnd),
- logService.getLogFileContents(process.containerRequest, file, file.size-chunkSize, file.size-1)
+ logService.getLogFileContents(process.containerRequest, file, file.size - chunkSize, file.size - 1)
] as Promise<(LogFragment)>[]);
} else {
- return Promise.all([logService.getLogFileContents(process.containerRequest, file, lastByte, file.size-1)]);
+ return Promise.all([logService.getLogFileContents(process.containerRequest, file, lastByte, file.size - 1)]);
}
})).then((res) => {
if (res.length && res.every(promiseResult => (promiseResult.status === 'rejected'))) {
// error if every request failed
const error = res.find(
(promiseResult): promiseResult is PromiseRejectedResult => promiseResult.status === 'rejected'
- )?.reason;
+ )?.reason;
return Promise.reject(error);
}
return res.filter((promiseResult): promiseResult is PromiseFulfilledResult<LogFragment[]> => (
// (prevent incorrect snipline generation or an un-resumable situation)
!!promiseResult.value.every(logFragment => logFragment.contents.length)
)).map(one => one.value)
- })).map((logResponseSet)=> {
+ })).map((logResponseSet) => {
// For any multi fragment response set, modify the last line of non-final chunks to include a line break and snip line
// Don't add snip line as a separate line so that sorting won't reorder it
for (let i = 1; i < logResponseSet.length; i++) {
- const fragment = logResponseSet[i-1];
- const lastLineIndex = fragment.contents.length-1;
+ const fragment = logResponseSet[i - 1];
+ const lastLineIndex = fragment.contents.length - 1;
const lastLineContents = fragment.contents[lastLineIndex];
const newLastLine = `${lastLineContents}\n${SNIPLINE}`;
- logResponseSet[i-1].contents[lastLineIndex] = newLastLine;
+ logResponseSet[i - 1].contents[lastLineIndex] = newLastLine;
}
// Merge LogFragment Array (representing multiple log line arrays) into single LogLine[] / LogFragment
})
);
-const createInitialLogPanelState = (logFiles: CollectionFile[], logFragments: LogFragment[]): {filters: string[], logs: ProcessLogs} => {
+const createInitialLogPanelState = (logFiles: CollectionFile[], logFragments: LogFragment[]): { filters: string[], logs: ProcessLogs } => {
const logs = groupLogs(logFiles, logFragments);
const filters = Object.keys(logs);
return { filters, logs };
const groupedLogs = logFragments.reduce((grouped, fragment) => ({
...grouped,
- [fragment.logType as string]: {lastByte: fetchLastByteNumber(logFiles, fragment.logType), contents: fragment.contents}
+ [fragment.logType as string]: { lastByte: fetchLastByteNumber(logFiles, fragment.logType), contents: fragment.contents }
}), {});
return {
- [MAIN_FILTER_TYPE]: {lastByte: undefined, contents: mainLogs},
- [ALL_FILTER_TYPE]: {lastByte: undefined, contents: allLogs},
+ [MAIN_FILTER_TYPE]: { lastByte: undefined, contents: mainLogs },
+ [ALL_FILTER_TYPE]: { lastByte: undefined, contents: allLogs },
...groupedLogs,
}
};
// Partial line without timestamp detected
if (i > 0) {
// If not first line, copy line to previous line
- const previousLineContents = fragmentCopy.contents[i-1];
+ const previousLineContents = fragmentCopy.contents[i - 1];
const newPreviousLineContents = `${previousLineContents}\n${lineContents}`;
- fragmentCopy.contents[i-1] = newPreviousLineContents;
+ fragmentCopy.contents[i - 1] = newPreviousLineContents;
}
// Delete the current line and prevent iterating
fragmentCopy.contents.splice(i, 1);
await services.collectionService.get(uuid);
dispatch<any>(navigateTo(uuid));
} catch {
- dispatch(snackbarActions.OPEN_SNACKBAR({ message: 'Could not request collection', hideDuration: 2000, kind: SnackbarKind.ERROR }));
+ dispatch(snackbarActions.OPEN_SNACKBAR({ message: 'Log collection was trashed or deleted.', hideDuration: 4000, kind: SnackbarKind.WARNING }));
}
};