Commit fad69502 authored by Jose Vargas's avatar Jose Vargas

Make collapsible job sections infinitely nested

This makes the collapsible job sections of the job
logs infinitely nested, addressing some existing bug
and introducing the new functionality using a new
line parser function
parent 132a369d
...@@ -7,6 +7,7 @@ export default { ...@@ -7,6 +7,7 @@ export default {
components: { components: {
LogLine, LogLine,
LogLineHeader, LogLineHeader,
CollapsibleLogSection: () => import('./collapsible_section.vue'),
}, },
props: { props: {
section: { section: {
...@@ -22,6 +23,9 @@ export default { ...@@ -22,6 +23,9 @@ export default {
badgeDuration() { badgeDuration() {
return this.section.line && this.section.line.section_duration; return this.section.line && this.section.line.section_duration;
}, },
infinitelyNestedCollapsibleSections() {
return gon.features.infinitelyCollapsibleSections;
},
}, },
methods: { methods: {
handleOnClickCollapsibleLine(section) { handleOnClickCollapsibleLine(section) {
...@@ -40,6 +44,19 @@ export default { ...@@ -40,6 +44,19 @@ export default {
@toggleLine="handleOnClickCollapsibleLine(section)" @toggleLine="handleOnClickCollapsibleLine(section)"
/> />
<template v-if="!section.isClosed"> <template v-if="!section.isClosed">
<template v-if="infinitelyNestedCollapsibleSections">
<template v-for="line in section.lines">
<collapsible-log-section
v-if="line.isHeader"
:key="line.line.offset"
:section="line"
:trace-endpoint="traceEndpoint"
@onClickCollapsibleLine="handleOnClickCollapsibleLine"
/>
<log-line v-else :key="line.offset" :line="line" :path="traceEndpoint" />
</template>
</template>
<template v-else>
<log-line <log-line
v-for="line in section.lines" v-for="line in section.lines"
:key="line.offset" :key="line.offset"
...@@ -47,5 +64,6 @@ export default { ...@@ -47,5 +64,6 @@ export default {
:path="traceEndpoint" :path="traceEndpoint"
/> />
</template> </template>
</template>
</div> </div>
</template> </template>
...@@ -14,7 +14,9 @@ export default { ...@@ -14,7 +14,9 @@ export default {
render(h, { props }) { render(h, { props }) {
const { lineNumber, path } = props; const { lineNumber, path } = props;
const parsedLineNumber = lineNumber + 1; const parsedLineNumber = gon.features?.infinitelyCollapsibleSections
? lineNumber
: lineNumber + 1;
const lineId = `L${parsedLineNumber}`; const lineId = `L${parsedLineNumber}`;
const lineHref = `${path}#${lineId}`; const lineHref = `${path}#${lineId}`;
......
import Vue from 'vue'; import Vue from 'vue';
import * as types from './mutation_types'; import * as types from './mutation_types';
import { logLinesParser, updateIncrementalTrace } from './utils'; import { logLinesParser, logLinesParserNew, updateIncrementalTrace } from './utils';
export default { export default {
[types.SET_JOB_ENDPOINT](state, endpoint) { [types.SET_JOB_ENDPOINT](state, endpoint) {
...@@ -20,12 +20,25 @@ export default { ...@@ -20,12 +20,25 @@ export default {
}, },
[types.RECEIVE_TRACE_SUCCESS](state, log = {}) { [types.RECEIVE_TRACE_SUCCESS](state, log = {}) {
const infinitelyCollapsibleSectionsFlag = gon.features.infinitelyCollapsibleSections;
if (log.state) { if (log.state) {
state.traceState = log.state; state.traceState = log.state;
} }
if (log.append) { if (log.append) {
if (infinitelyCollapsibleSectionsFlag) {
if (log.lines) {
const parsedResult = logLinesParserNew(
log.lines,
state.auxiliaryPartialTraceHelpers,
state.trace,
);
state.trace = parsedResult.parsedLines;
state.auxiliaryPartialTraceHelpers = parsedResult.auxiliaryPartialTraceHelpers;
}
} else {
state.trace = log.lines ? updateIncrementalTrace(log.lines, state.trace) : state.trace; state.trace = log.lines ? updateIncrementalTrace(log.lines, state.trace) : state.trace;
}
state.traceSize += log.size; state.traceSize += log.size;
} else { } else {
...@@ -33,7 +46,14 @@ export default { ...@@ -33,7 +46,14 @@ export default {
// the trace response will not have a defined // the trace response will not have a defined
// html or size. We keep the old value otherwise these // html or size. We keep the old value otherwise these
// will be set to `null` // will be set to `null`
if (infinitelyCollapsibleSectionsFlag) {
const parsedResult = logLinesParserNew(log.lines);
state.trace = parsedResult.parsedLines;
state.auxiliaryPartialTraceHelpers = parsedResult.auxiliaryPartialTraceHelpers;
} else {
state.trace = log.lines ? logLinesParser(log.lines) : state.trace; state.trace = log.lines ? logLinesParser(log.lines) : state.trace;
}
state.traceSize = log.size || state.traceSize; state.traceSize = log.size || state.traceSize;
} }
......
...@@ -30,4 +30,7 @@ export default () => ({ ...@@ -30,4 +30,7 @@ export default () => ({
selectedStage: '', selectedStage: '',
stages: [], stages: [],
jobs: [], jobs: [],
// to parse partial logs
auxiliaryPartialTraceHelpers: {},
}); });
...@@ -131,6 +131,75 @@ export const logLinesParser = (lines = [], accumulator = []) => ...@@ -131,6 +131,75 @@ export const logLinesParser = (lines = [], accumulator = []) =>
[...accumulator], [...accumulator],
); );
export const logLinesParserNew = (lines = [], previousTraceState = {}, prevParsedLines = []) => {
let currentLine = previousTraceState?.prevLineCount ? previousTraceState.prevLineCount : 0;
let currentHeader = previousTraceState?.currentHeader ? previousTraceState.currentHeader : null;
let isPreviousLineHeader = previousTraceState?.isPreviousLineHeader
? previousTraceState.isPreviousLineHeader
: false;
const parsedLines = prevParsedLines.length > 0 ? prevParsedLines : [];
const sectionsQueue = previousTraceState?.sectionsQueue ? previousTraceState.sectionsQueue : [];
for (let i = 0; i < lines.length; i += 1) {
const line = lines[i];
// First runs we can use the current index, later runs we have to retrieve the last number of lines
currentLine = previousTraceState?.prevLineCount ? currentLine + 1 : i + 1;
if (line.section_header && !isPreviousLineHeader) {
// If there's no previous line header that means we're at the root of the log
isPreviousLineHeader = true;
parsedLines.push(parseHeaderLine(line, currentLine));
currentHeader = { index: parsedLines.length - 1 };
} else if (line.section_header && isPreviousLineHeader) {
// If there's a current section, we can't push to the parsedLines array
sectionsQueue.push(currentHeader);
currentHeader = parseHeaderLine(line, currentLine); // Let's parse the incoming header line
} else if (line.section && !line.section_duration) {
if (currentHeader?.index) {
parsedLines[currentHeader.index].lines.push(parseLine(line, currentLine));
} else {
currentHeader.lines.push(parseLine(line, currentLine));
}
} else if (line.section && line.section_duration) {
// NOTE: This marks the end of a section_header
const previousSection = sectionsQueue.pop();
// Add the duration to section header
// If at the root, just push the end to the current parsedLine,
// otherwise, push it to the previous sections queue
if (currentHeader?.index) {
parsedLines[currentHeader.index].line.section_duration = line.section_duration;
isPreviousLineHeader = false;
currentHeader = null;
} else {
currentHeader.line.section_duration = line.section_duration;
if (previousSection && previousSection?.index) {
// Is the previous section on root?
parsedLines[previousSection.index].lines.push(currentHeader);
} else if (previousSection && !previousSection?.index) {
previousSection.lines.push(currentHeader);
}
currentHeader = previousSection;
}
} else {
parsedLines.push(parseLine(line, currentLine));
}
}
return {
parsedLines,
auxiliaryPartialTraceHelpers: {
isPreviousLineHeader,
currentHeader,
sectionsQueue,
prevLineCount: lines.length,
},
};
};
/** /**
* Finds the repeated offset, removes the old one * Finds the repeated offset, removes the old one
* *
......
...@@ -17,6 +17,10 @@ class Projects::JobsController < Projects::ApplicationController ...@@ -17,6 +17,10 @@ class Projects::JobsController < Projects::ApplicationController
before_action :verify_proxy_request!, only: :proxy_websocket_authorize before_action :verify_proxy_request!, only: :proxy_websocket_authorize
before_action :push_jobs_table_vue, only: [:index] before_action :push_jobs_table_vue, only: [:index]
before_action do
push_frontend_feature_flag(:infinitely_collapsible_sections, @project, default_enabled: :yaml)
end
layout 'project' layout 'project'
feature_category :continuous_integration feature_category :continuous_integration
......
---
name: infinitely_collapsible_sections
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/65496
rollout_issue_url:
milestone: '14.1'
type: development
group: group::pipeline execution
default_enabled: false
...@@ -24,6 +24,7 @@ describe('Job App', () => { ...@@ -24,6 +24,7 @@ describe('Job App', () => {
let store; let store;
let wrapper; let wrapper;
let mock; let mock;
let origGon;
const initSettings = { const initSettings = {
endpoint: `${TEST_HOST}jobs/123.json`, endpoint: `${TEST_HOST}jobs/123.json`,
...@@ -85,11 +86,17 @@ describe('Job App', () => { ...@@ -85,11 +86,17 @@ describe('Job App', () => {
beforeEach(() => { beforeEach(() => {
mock = new MockAdapter(axios); mock = new MockAdapter(axios);
store = createStore(); store = createStore();
origGon = window.gon;
window.gon = { features: { infinitelyCollapsibleSections: false } }; // NOTE: All of this passes with the feature flag
}); });
afterEach(() => { afterEach(() => {
wrapper.destroy(); wrapper.destroy();
mock.restore(); mock.restore();
window.gon = origGon;
}); });
describe('while loading', () => { describe('while loading', () => {
......
...@@ -4,6 +4,7 @@ import { collapsibleSectionClosed, collapsibleSectionOpened } from './mock_data' ...@@ -4,6 +4,7 @@ import { collapsibleSectionClosed, collapsibleSectionOpened } from './mock_data'
describe('Job Log Collapsible Section', () => { describe('Job Log Collapsible Section', () => {
let wrapper; let wrapper;
let origGon;
const traceEndpoint = 'jobs/335'; const traceEndpoint = 'jobs/335';
...@@ -18,8 +19,16 @@ describe('Job Log Collapsible Section', () => { ...@@ -18,8 +19,16 @@ describe('Job Log Collapsible Section', () => {
}); });
}; };
beforeEach(() => {
origGon = window.gon;
window.gon = { features: { infinitelyCollapsibleSections: false } }; // NOTE: This also works with true
});
afterEach(() => { afterEach(() => {
wrapper.destroy(); wrapper.destroy();
window.gon = origGon;
}); });
describe('with closed section', () => { describe('with closed section', () => {
......
...@@ -9,6 +9,7 @@ describe('Job Log', () => { ...@@ -9,6 +9,7 @@ describe('Job Log', () => {
let actions; let actions;
let state; let state;
let store; let store;
let origGon;
const localVue = createLocalVue(); const localVue = createLocalVue();
localVue.use(Vuex); localVue.use(Vuex);
...@@ -25,6 +26,10 @@ describe('Job Log', () => { ...@@ -25,6 +26,10 @@ describe('Job Log', () => {
toggleCollapsibleLine: () => {}, toggleCollapsibleLine: () => {},
}; };
origGon = window.gon;
window.gon = { features: { infinitelyCollapsibleSections: false } }; // NOTE: This also passes with the feature flag set to ON
state = { state = {
trace: logLinesParser(jobLog), trace: logLinesParser(jobLog),
traceEndpoint: 'jobs/id', traceEndpoint: 'jobs/id',
...@@ -40,6 +45,8 @@ describe('Job Log', () => { ...@@ -40,6 +45,8 @@ describe('Job Log', () => {
afterEach(() => { afterEach(() => {
wrapper.destroy(); wrapper.destroy();
window.gon = origGon;
}); });
const findCollapsibleLine = () => wrapper.find('.collapsible-line'); const findCollapsibleLine = () => wrapper.find('.collapsible-line');
......
...@@ -58,6 +58,71 @@ export const utilsMockData = [ ...@@ -58,6 +58,71 @@ export const utilsMockData = [
}, },
]; ];
export const multipleCollapsibleSectionsMockData = [
{
offset: 1001,
content: [{ text: ' on docker-auto-scale-com 8a6210b8' }],
},
{
offset: 1002,
content: [
{
text: 'Executing "step_script" stage of the job script',
},
],
section: 'step-script',
section_header: true,
},
{
offset: 1003,
content: [{ text: 'sleep 60' }],
section: 'step-script',
},
{
offset: 1004,
content: [
{
text:
'Lorem ipsum dolor sit amet, consectetur adipiscing elit. Etiam lorem dolor, congue ac condimentum vitae',
},
],
section: 'step-script',
},
{
offset: 1005,
content: [{ text: 'executing...' }],
section: 'step-script',
},
{
offset: 1006,
content: [{ text: '1st collapsible section' }],
section: 'collapsible-1',
section_header: true,
},
{
offset: 1007,
content: [
{
text:
'Lorem ipsum dolor sit amet, consectetur adipiscing elit. Etiam lorem dolor, congue ac condimentum vitae',
},
],
section: 'collapsible-1',
},
{
offset: 1008,
content: [],
section: 'collapsible-1',
section_duration: '01:00',
},
{
offset: 1009,
content: [],
section: 'step-script',
section_duration: '10:00',
},
];
export const originalTrace = [ export const originalTrace = [
{ {
offset: 1, offset: 1,
......
...@@ -4,12 +4,21 @@ import state from '~/jobs/store/state'; ...@@ -4,12 +4,21 @@ import state from '~/jobs/store/state';
describe('Jobs Store Mutations', () => { describe('Jobs Store Mutations', () => {
let stateCopy; let stateCopy;
let origGon;
const html = const html =
'I, [2018-08-17T22:57:45.707325 #1841] INFO -- : Writing /builds/ab89e95b0fa0b9272ea0c797b76908f24d36992630e9325273a4ce3.png<br>I'; 'I, [2018-08-17T22:57:45.707325 #1841] INFO -- : Writing /builds/ab89e95b0fa0b9272ea0c797b76908f24d36992630e9325273a4ce3.png<br>I';
beforeEach(() => { beforeEach(() => {
stateCopy = state(); stateCopy = state();
origGon = window.gon;
window.gon = { features: { infinitelyCollapsibleSections: false } };
});
afterEach(() => {
window.gon = origGon;
}); });
describe('SET_JOB_ENDPOINT', () => { describe('SET_JOB_ENDPOINT', () => {
...@@ -267,3 +276,88 @@ describe('Jobs Store Mutations', () => { ...@@ -267,3 +276,88 @@ describe('Jobs Store Mutations', () => {
}); });
}); });
}); });
describe('Job Store mutations, feature flag ON', () => {
let stateCopy;
let origGon;
const html =
'I, [2018-08-17T22:57:45.707325 #1841] INFO -- : Writing /builds/ab89e95b0fa0b9272ea0c797b76908f24d36992630e9325273a4ce3.png<br>I';
beforeEach(() => {
stateCopy = state();
origGon = window.gon;
window.gon = { features: { infinitelyCollapsibleSections: true } };
});
afterEach(() => {
window.gon = origGon;
});
describe('RECEIVE_TRACE_SUCCESS', () => {
describe('with new job log', () => {
describe('log.lines', () => {
describe('when append is true', () => {
it('sets the parsed log ', () => {
mutations[types.RECEIVE_TRACE_SUCCESS](stateCopy, {
append: true,
size: 511846,
complete: true,
lines: [
{
offset: 1,
content: [{ text: 'Running with gitlab-runner 11.12.1 (5a147c92)' }],
},
],
});
expect(stateCopy.trace).toEqual([
{
offset: 1,
content: [{ text: 'Running with gitlab-runner 11.12.1 (5a147c92)' }],
lineNumber: 1,
},
]);
});
});
describe('when it is defined', () => {
it('sets the parsed log ', () => {
mutations[types.RECEIVE_TRACE_SUCCESS](stateCopy, {
append: false,
size: 511846,
complete: true,
lines: [
{ offset: 0, content: [{ text: 'Running with gitlab-runner 11.11.1 (5a147c92)' }] },
],
});
expect(stateCopy.trace).toEqual([
{
offset: 0,
content: [{ text: 'Running with gitlab-runner 11.11.1 (5a147c92)' }],
lineNumber: 1,
},
]);
});
});
describe('when it is null', () => {
it('sets the default value', () => {
mutations[types.RECEIVE_TRACE_SUCCESS](stateCopy, {
append: true,
html,
size: 511846,
complete: false,
lines: null,
});
expect(stateCopy.trace).toEqual([]);
});
});
});
});
});
});
import { import {
logLinesParser, logLinesParser,
logLinesParserNew,
updateIncrementalTrace, updateIncrementalTrace,
parseHeaderLine, parseHeaderLine,
parseLine, parseLine,
...@@ -17,6 +18,7 @@ import { ...@@ -17,6 +18,7 @@ import {
headerTraceIncremental, headerTraceIncremental,
collapsibleTrace, collapsibleTrace,
collapsibleTraceIncremental, collapsibleTraceIncremental,
multipleCollapsibleSectionsMockData,
} from '../components/log/mock_data'; } from '../components/log/mock_data';
describe('Jobs Store Utils', () => { describe('Jobs Store Utils', () => {
...@@ -216,6 +218,87 @@ describe('Jobs Store Utils', () => { ...@@ -216,6 +218,87 @@ describe('Jobs Store Utils', () => {
}); });
}); });
describe('logLinesParserNew', () => {
let result;
beforeEach(() => {
result = logLinesParserNew(utilsMockData);
});
describe('regular line', () => {
it('adds a lineNumber property with correct index', () => {
expect(result.parsedLines[0].lineNumber).toEqual(1);
expect(result.parsedLines[1].line.lineNumber).toEqual(2);
});
});
describe('collapsible section', () => {
it('adds a `isClosed` property', () => {
expect(result.parsedLines[1].isClosed).toEqual(false);
});
it('adds a `isHeader` property', () => {
expect(result.parsedLines[1].isHeader).toEqual(true);
});
it('creates a lines array property with the content of the collapsible section', () => {
expect(result.parsedLines[1].lines.length).toEqual(2);
expect(result.parsedLines[1].lines[0].content).toEqual(utilsMockData[2].content);
expect(result.parsedLines[1].lines[1].content).toEqual(utilsMockData[3].content);
});
});
describe('section duration', () => {
it('adds the section information to the header section', () => {
expect(result.parsedLines[1].line.section_duration).toEqual(
utilsMockData[4].section_duration,
);
});
it('does not add section duration as a line', () => {
expect(result.parsedLines[1].lines.includes(utilsMockData[4])).toEqual(false);
});
});
describe('multiple collapsible sections', () => {
beforeEach(() => {
result = logLinesParserNew(multipleCollapsibleSectionsMockData);
});
it('should contain a section inside another section', () => {
const innerSection = [
{
isClosed: false,
isHeader: true,
line: {
content: [{ text: '1st collapsible section' }],
lineNumber: 6,
offset: 1006,
section: 'collapsible-1',
section_duration: '01:00',
section_header: true,
},
lines: [
{
content: [
{
text:
'Lorem ipsum dolor sit amet, consectetur adipiscing elit. Etiam lorem dolor, congue ac condimentum vitae',
},
],
lineNumber: 7,
offset: 1007,
section: 'collapsible-1',
},
],
},
];
expect(result.parsedLines[1].lines).toEqual(expect.arrayContaining(innerSection));
});
});
});
describe('findOffsetAndRemove', () => { describe('findOffsetAndRemove', () => {
describe('when last item is header', () => { describe('when last item is header', () => {
const existingLog = [ const existingLog = [
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment