Commit 02e105d6 authored by Paul Slaughter's avatar Paul Slaughter

Merge branch '197402-diff-batch-loads-overwrites-previous-diff-files' into 'master'

Fix authoritative source of truth for diff files when loading batched diffs

See merge request gitlab-org/gitlab!23274
parents a8779fd0 4021438d
...@@ -111,15 +111,22 @@ export const fetchDiffFilesBatch = ({ commit, state }) => { ...@@ -111,15 +111,22 @@ export const fetchDiffFilesBatch = ({ commit, state }) => {
commit(types.SET_BATCH_LOADING, true); commit(types.SET_BATCH_LOADING, true);
commit(types.SET_RETRIEVING_BATCHES, true); commit(types.SET_RETRIEVING_BATCHES, true);
const getBatch = page => const getBatch = (page = 1) =>
axios axios
.get(state.endpointBatch, { .get(state.endpointBatch, {
params: { ...urlParams, page }, params: {
...urlParams,
page,
},
}) })
.then(({ data: { pagination, diff_files } }) => { .then(({ data: { pagination, diff_files } }) => {
commit(types.SET_DIFF_DATA_BATCH, { diff_files }); commit(types.SET_DIFF_DATA_BATCH, { diff_files });
commit(types.SET_BATCH_LOADING, false); commit(types.SET_BATCH_LOADING, false);
if (!pagination.next_page) commit(types.SET_RETRIEVING_BATCHES, false);
if (!pagination.next_page) {
commit(types.SET_RETRIEVING_BATCHES, false);
}
return pagination.next_page; return pagination.next_page;
}) })
.then(nextPage => nextPage && getBatch(nextPage)) .then(nextPage => nextPage && getBatch(nextPage))
...@@ -132,6 +139,11 @@ export const fetchDiffFilesBatch = ({ commit, state }) => { ...@@ -132,6 +139,11 @@ export const fetchDiffFilesBatch = ({ commit, state }) => {
export const fetchDiffFilesMeta = ({ commit, state }) => { export const fetchDiffFilesMeta = ({ commit, state }) => {
const worker = new TreeWorker(); const worker = new TreeWorker();
const urlParams = {};
if (state.useSingleDiffStyle) {
urlParams.view = state.diffViewType;
}
commit(types.SET_LOADING, true); commit(types.SET_LOADING, true);
...@@ -142,16 +154,17 @@ export const fetchDiffFilesMeta = ({ commit, state }) => { ...@@ -142,16 +154,17 @@ export const fetchDiffFilesMeta = ({ commit, state }) => {
}); });
return axios return axios
.get(state.endpointMetadata) .get(mergeUrlParams(urlParams, state.endpointMetadata))
.then(({ data }) => { .then(({ data }) => {
const strippedData = { ...data }; const strippedData = { ...data };
delete strippedData.diff_files; delete strippedData.diff_files;
commit(types.SET_LOADING, false); commit(types.SET_LOADING, false);
commit(types.SET_MERGE_REQUEST_DIFFS, data.merge_request_diffs || []); commit(types.SET_MERGE_REQUEST_DIFFS, data.merge_request_diffs || []);
commit(types.SET_DIFF_DATA, strippedData); commit(types.SET_DIFF_DATA, strippedData);
prepareDiffData(data); worker.postMessage(prepareDiffData(data, state.diffFiles));
worker.postMessage(data.diff_files);
return data; return data;
}) })
.catch(() => worker.terminate()); .catch(() => worker.terminate());
...@@ -226,7 +239,7 @@ export const startRenderDiffsQueue = ({ state, commit }) => { ...@@ -226,7 +239,7 @@ export const startRenderDiffsQueue = ({ state, commit }) => {
const nextFile = state.diffFiles.find( const nextFile = state.diffFiles.find(
file => file =>
!file.renderIt && !file.renderIt &&
(file.viewer && (!file.viewer.collapsed || !file.viewer.name === diffViewerModes.text)), (file.viewer && (!file.viewer.collapsed || file.viewer.name !== diffViewerModes.text)),
); );
if (nextFile) { if (nextFile) {
......
...@@ -148,8 +148,8 @@ export default { ...@@ -148,8 +148,8 @@ export default {
}, },
[types.ADD_COLLAPSED_DIFFS](state, { file, data }) { [types.ADD_COLLAPSED_DIFFS](state, { file, data }) {
prepareDiffData(data); const files = prepareDiffData(data);
const [newFileData] = data.diff_files.filter(f => f.file_hash === file.file_hash); const [newFileData] = files.filter(f => f.file_hash === file.file_hash);
const selectedFile = state.diffFiles.find(f => f.file_hash === file.file_hash); const selectedFile = state.diffFiles.find(f => f.file_hash === file.file_hash);
Object.assign(selectedFile, { ...newFileData }); Object.assign(selectedFile, { ...newFileData });
}, },
......
...@@ -217,30 +217,19 @@ function diffFileUniqueId(file) { ...@@ -217,30 +217,19 @@ function diffFileUniqueId(file) {
return `${file.content_sha}-${file.file_hash}`; return `${file.content_sha}-${file.file_hash}`;
} }
function combineDiffFilesWithPriorFiles(files, prior = []) { function mergeTwoFiles(target, source) {
files.forEach(file => { const originalInline = target.highlighted_diff_lines;
const id = diffFileUniqueId(file); const originalParallel = target.parallel_diff_lines;
const oldMatch = prior.find(oldFile => diffFileUniqueId(oldFile) === id); const missingInline = !originalInline.length;
const missingParallel = !originalParallel.length;
if (oldMatch) {
const missingInline = !file.highlighted_diff_lines;
const missingParallel = !file.parallel_diff_lines;
if (missingInline) {
Object.assign(file, {
highlighted_diff_lines: oldMatch.highlighted_diff_lines,
});
}
if (missingParallel) { return {
Object.assign(file, { ...target,
parallel_diff_lines: oldMatch.parallel_diff_lines, highlighted_diff_lines: missingInline ? source.highlighted_diff_lines : originalInline,
}); parallel_diff_lines: missingParallel ? source.parallel_diff_lines : originalParallel,
} renderIt: source.renderIt,
} collapsed: source.collapsed,
}); };
return files;
} }
function ensureBasicDiffFileLines(file) { function ensureBasicDiffFileLines(file) {
...@@ -260,13 +249,16 @@ function cleanRichText(text) { ...@@ -260,13 +249,16 @@ function cleanRichText(text) {
} }
function prepareLine(line) { function prepareLine(line) {
return Object.assign(line, { if (!line.alreadyPrepared) {
Object.assign(line, {
rich_text: cleanRichText(line.rich_text), rich_text: cleanRichText(line.rich_text),
discussionsExpanded: true, discussionsExpanded: true,
discussions: [], discussions: [],
hasForm: false, hasForm: false,
text: undefined, text: undefined,
alreadyPrepared: true,
}); });
}
} }
function prepareDiffFileLines(file) { function prepareDiffFileLines(file) {
...@@ -288,12 +280,12 @@ function prepareDiffFileLines(file) { ...@@ -288,12 +280,12 @@ function prepareDiffFileLines(file) {
parallelLinesCount += 1; parallelLinesCount += 1;
prepareLine(line.right); prepareLine(line.right);
} }
});
Object.assign(file, { Object.assign(file, {
inlineLinesCount: inlineLines.length, inlineLinesCount: inlineLines.length,
parallelLinesCount, parallelLinesCount,
}); });
});
return file; return file;
} }
...@@ -318,11 +310,26 @@ function finalizeDiffFile(file) { ...@@ -318,11 +310,26 @@ function finalizeDiffFile(file) {
return file; return file;
} }
export function prepareDiffData(diffData, priorFiles) { function deduplicateFilesList(files) {
return combineDiffFilesWithPriorFiles(diffData.diff_files, priorFiles) const dedupedFiles = files.reduce((newList, file) => {
const id = diffFileUniqueId(file);
return {
...newList,
[id]: newList[id] ? mergeTwoFiles(newList[id], file) : file,
};
}, {});
return Object.values(dedupedFiles);
}
export function prepareDiffData(diff, priorFiles = []) {
const cleanedFiles = (diff.diff_files || [])
.map(ensureBasicDiffFileLines) .map(ensureBasicDiffFileLines)
.map(prepareDiffFileLines) .map(prepareDiffFileLines)
.map(finalizeDiffFile); .map(finalizeDiffFile);
return deduplicateFilesList([...priorFiles, ...cleanedFiles]);
} }
export function getDiffPositionByLineCode(diffFiles, useSingleDiffStyle) { export function getDiffPositionByLineCode(diffFiles, useSingleDiffStyle) {
......
# frozen_string_literal: true
require 'spec_helper'
describe 'Batch diffs', :js do
include MergeRequestDiffHelpers
include RepoHelpers
let(:project) { create(:project, :repository) }
let(:merge_request) { create(:merge_request, source_project: project, source_branch: 'master', target_branch: 'empty-branch') }
before do
stub_feature_flags(single_mr_diff_view: true)
stub_feature_flags(diffs_batch_load: true)
sign_in(project.owner)
visit diffs_project_merge_request_path(merge_request.project, merge_request)
wait_for_requests
# Add discussion to first line of first file
click_diff_line(find('.diff-file.file-holder:first-of-type tr.line_holder.new:first-of-type'))
page.within('.js-discussion-note-form') do
fill_in('note_note', with: 'First Line Comment')
click_button('Comment')
end
# Add discussion to first line of last file
click_diff_line(find('.diff-file.file-holder:last-of-type tr.line_holder.new:first-of-type'))
page.within('.js-discussion-note-form') do
fill_in('note_note', with: 'Last Line Comment')
click_button('Comment')
end
wait_for_requests
end
it 'assigns discussions to diff files across multiple batch pages' do
# Reload so we know the discussions are persisting across batch loads
visit page.current_url
# Wait for JS to settle
wait_for_requests
expect(page).to have_selector('.diff-files-holder .file-holder', count: 39)
# Confirm discussions are applied to appropriate files (should be contained in multiple diff pages)
page.within('.diff-file.file-holder:first-of-type .notes .timeline-entry .note .note-text') do
expect(page).to have_content('First Line Comment')
end
page.within('.diff-file.file-holder:last-of-type .notes .timeline-entry .note .note-text') do
expect(page).to have_content('Last Line Comment')
end
end
context 'when user visits a URL with a link directly to to a discussion' do
context 'which is in the first batched page of diffs' do
it 'scrolls to the correct discussion' do
page.within('.diff-file.file-holder:first-of-type') do
click_link('just now')
end
visit page.current_url
wait_for_requests
# Confirm scrolled to correct UI element
expect(page.find('.diff-file.file-holder:first-of-type .discussion-notes .timeline-entry li.note[id]').obscured?).to be_falsey
expect(page.find('.diff-file.file-holder:last-of-type .discussion-notes .timeline-entry li.note[id]').obscured?).to be_truthy
end
end
context 'which is in at least page 2 of the batched pages of diffs' do
it 'scrolls to the correct discussion' do
page.within('.diff-file.file-holder:last-of-type') do
click_link('just now')
end
visit page.current_url
wait_for_requests
# Confirm scrolled to correct UI element
expect(page.find('.diff-file.file-holder:first-of-type .discussion-notes .timeline-entry li.note[id]').obscured?).to be_truthy
expect(page.find('.diff-file.file-holder:last-of-type .discussion-notes .timeline-entry li.note[id]').obscured?).to be_falsey
end
end
end
context 'when user switches view styles' do
before do
find('.js-show-diff-settings').click
click_button 'Side-by-side'
wait_for_requests
end
it 'has the correct discussions applied to files across batched pages' do
expect(page).to have_selector('.diff-files-holder .file-holder', count: 39)
page.within('.diff-file.file-holder:first-of-type .notes .timeline-entry .note .note-text') do
expect(page).to have_content('First Line Comment')
end
page.within('.diff-file.file-holder:last-of-type .notes .timeline-entry .note .note-text') do
expect(page).to have_content('Last Line Comment')
end
end
end
end
...@@ -158,16 +158,120 @@ describe('DiffsStoreActions', () => { ...@@ -158,16 +158,120 @@ describe('DiffsStoreActions', () => {
const res1 = { diff_files: [], pagination: { next_page: 2 } }; const res1 = { diff_files: [], pagination: { next_page: 2 } };
const res2 = { diff_files: [], pagination: {} }; const res2 = { diff_files: [], pagination: {} };
mock mock
.onGet(endpointBatch, { params: { page: undefined, per_page: DIFFS_PER_PAGE, w: '1' } }) .onGet(endpointBatch, {
.reply(200, res1); params: { page: 1, per_page: DIFFS_PER_PAGE, w: '1', view: 'inline' },
})
.reply(200, res1)
.onGet(endpointBatch, {
params: { page: 2, per_page: DIFFS_PER_PAGE, w: '1', view: 'inline' },
})
.reply(200, res2);
testAction(
fetchDiffFilesBatch,
{},
{ endpointBatch, useSingleDiffStyle: true, diffViewType: 'inline' },
[
{ type: types.SET_BATCH_LOADING, payload: true },
{ type: types.SET_RETRIEVING_BATCHES, payload: true },
{ type: types.SET_DIFF_DATA_BATCH, payload: { diff_files: res1.diff_files } },
{ type: types.SET_BATCH_LOADING, payload: false },
{ type: types.SET_DIFF_DATA_BATCH, payload: { diff_files: [] } },
{ type: types.SET_BATCH_LOADING, payload: false },
{ type: types.SET_RETRIEVING_BATCHES, payload: false },
],
[],
() => {
mock.restore();
done();
},
);
});
});
describe('fetchDiffFilesMeta', () => {
it('should fetch diff meta information', done => {
const endpointMetadata = '/fetch/diffs_meta?view=inline';
const mock = new MockAdapter(axios);
const data = { diff_files: [] };
const res = { data };
mock.onGet(endpointMetadata).reply(200, res);
testAction(
fetchDiffFilesMeta,
{},
{ endpointMetadata },
[
{ type: types.SET_LOADING, payload: true },
{ type: types.SET_LOADING, payload: false },
{ type: types.SET_MERGE_REQUEST_DIFFS, payload: [] },
{ type: types.SET_DIFF_DATA, payload: { data } },
],
[],
() => {
mock.restore();
done();
},
);
});
});
describe('when the single diff view feature flag is off', () => {
describe('fetchDiffFiles', () => {
it('should fetch diff files', done => {
const endpoint = '/fetch/diff/files?w=1';
const mock = new MockAdapter(axios);
const res = { diff_files: 1, merge_request_diffs: [] };
mock.onGet(endpoint).reply(200, res);
testAction(
fetchDiffFiles,
{},
{
endpoint,
diffFiles: [],
showWhitespace: false,
diffViewType: 'inline',
useSingleDiffStyle: false,
},
[
{ type: types.SET_LOADING, payload: true },
{ type: types.SET_LOADING, payload: false },
{ type: types.SET_MERGE_REQUEST_DIFFS, payload: res.merge_request_diffs },
{ type: types.SET_DIFF_DATA, payload: res },
],
[],
() => {
mock.restore();
done();
},
);
fetchDiffFiles({ state: { endpoint }, commit: () => null })
.then(data => {
expect(data).toEqual(res);
done();
})
.catch(done.fail);
});
});
describe('fetchDiffFilesBatch', () => {
it('should fetch batch diff files', done => {
const endpointBatch = '/fetch/diffs_batch';
const mock = new MockAdapter(axios);
const res1 = { diff_files: [], pagination: { next_page: 2 } };
const res2 = { diff_files: [], pagination: {} };
mock mock
.onGet(endpointBatch, { params: { page: 1, per_page: DIFFS_PER_PAGE, w: '1' } })
.reply(200, res1)
.onGet(endpointBatch, { params: { page: 2, per_page: DIFFS_PER_PAGE, w: '1' } }) .onGet(endpointBatch, { params: { page: 2, per_page: DIFFS_PER_PAGE, w: '1' } })
.reply(200, res2); .reply(200, res2);
testAction( testAction(
fetchDiffFilesBatch, fetchDiffFilesBatch,
{}, {},
{ endpointBatch }, { endpointBatch, useSingleDiffStyle: false },
[ [
{ type: types.SET_BATCH_LOADING, payload: true }, { type: types.SET_BATCH_LOADING, payload: true },
{ type: types.SET_RETRIEVING_BATCHES, payload: true }, { type: types.SET_RETRIEVING_BATCHES, payload: true },
...@@ -188,7 +292,7 @@ describe('DiffsStoreActions', () => { ...@@ -188,7 +292,7 @@ describe('DiffsStoreActions', () => {
describe('fetchDiffFilesMeta', () => { describe('fetchDiffFilesMeta', () => {
it('should fetch diff meta information', done => { it('should fetch diff meta information', done => {
const endpointMetadata = '/fetch/diffs_meta'; const endpointMetadata = '/fetch/diffs_meta?';
const mock = new MockAdapter(axios); const mock = new MockAdapter(axios);
const data = { diff_files: [] }; const data = { diff_files: [] };
const res = { data }; const res = { data };
...@@ -197,7 +301,7 @@ describe('DiffsStoreActions', () => { ...@@ -197,7 +301,7 @@ describe('DiffsStoreActions', () => {
testAction( testAction(
fetchDiffFilesMeta, fetchDiffFilesMeta,
{}, {},
{ endpointMetadata }, { endpointMetadata, useSingleDiffStyle: false },
[ [
{ type: types.SET_LOADING, payload: true }, { type: types.SET_LOADING, payload: true },
{ type: types.SET_LOADING, payload: false }, { type: types.SET_LOADING, payload: false },
...@@ -212,6 +316,7 @@ describe('DiffsStoreActions', () => { ...@@ -212,6 +316,7 @@ describe('DiffsStoreActions', () => {
); );
}); });
}); });
});
describe('setHighlightedRow', () => { describe('setHighlightedRow', () => {
it('should mark currently selected diff and set lineHash and fileHash of highlightedRow', () => { it('should mark currently selected diff and set lineHash and fileHash of highlightedRow', () => {
......
...@@ -55,8 +55,8 @@ describe('DiffsStoreMutations', () => { ...@@ -55,8 +55,8 @@ describe('DiffsStoreMutations', () => {
const state = { const state = {
diffFiles: [ diffFiles: [
{ {
content_sha: diffFileMockData.content_sha, ...diffFileMockData,
file_hash: diffFileMockData.file_hash, parallel_diff_lines: [],
}, },
], ],
}; };
......
...@@ -333,10 +333,10 @@ describe('DiffsStoreUtils', () => { ...@@ -333,10 +333,10 @@ describe('DiffsStoreUtils', () => {
diff_files: [Object.assign({}, mock, { highlighted_diff_lines: undefined })], diff_files: [Object.assign({}, mock, { highlighted_diff_lines: undefined })],
}; };
utils.prepareDiffData(preparedDiff); preparedDiff.diff_files = utils.prepareDiffData(preparedDiff);
utils.prepareDiffData(splitInlineDiff); splitInlineDiff.diff_files = utils.prepareDiffData(splitInlineDiff);
utils.prepareDiffData(splitParallelDiff); splitParallelDiff.diff_files = utils.prepareDiffData(splitParallelDiff);
utils.prepareDiffData(completedDiff, [mock]); completedDiff.diff_files = utils.prepareDiffData(completedDiff, [mock]);
}); });
it('sets the renderIt and collapsed attribute on files', () => { it('sets the renderIt and collapsed attribute on files', () => {
...@@ -390,6 +390,37 @@ describe('DiffsStoreUtils', () => { ...@@ -390,6 +390,37 @@ describe('DiffsStoreUtils', () => {
expect(completedDiff.diff_files[0].parallel_diff_lines.length).toBeGreaterThan(0); expect(completedDiff.diff_files[0].parallel_diff_lines.length).toBeGreaterThan(0);
expect(completedDiff.diff_files[0].highlighted_diff_lines.length).toBeGreaterThan(0); expect(completedDiff.diff_files[0].highlighted_diff_lines.length).toBeGreaterThan(0);
}); });
it('leaves files in the existing state', () => {
const priorFiles = [mock];
const fakeNewFile = {
...mock,
content_sha: 'ABC',
file_hash: 'DEF',
};
const updatedFilesList = utils.prepareDiffData({ diff_files: [fakeNewFile] }, priorFiles);
expect(updatedFilesList).toEqual([mock, fakeNewFile]);
});
it('completes an existing split diff without overwriting existing diffs', () => {
// The current state has a file that has only loaded inline lines
const priorFiles = [{ ...mock, parallel_diff_lines: [] }];
// The next (batch) load loads two files: the other half of that file, and a new file
const fakeBatch = [
{ ...mock, highlighted_diff_lines: undefined },
{ ...mock, highlighted_diff_lines: undefined, content_sha: 'ABC', file_hash: 'DEF' },
];
const updatedFilesList = utils.prepareDiffData({ diff_files: fakeBatch }, priorFiles);
expect(updatedFilesList).toEqual([
mock,
jasmine.objectContaining({
content_sha: 'ABC',
file_hash: 'DEF',
}),
]);
});
}); });
describe('isDiscussionApplicableToLine', () => { describe('isDiscussionApplicableToLine', () => {
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment