Commit fba4ebfb authored by Sarah Groff Hennigh-Palermo's avatar Sarah Groff Hennigh-Palermo Committed by Andrew Fontaine

Adds the base graph

Adds new files, adapts previous
parent 6670a97f
export const PARSE_FAILURE = 'parse_failure';
export const LOAD_FAILURE = 'load_failure';
export const UNSUPPORTED_DATA = 'unsupported_data';
export const DEFAULT = 'default';
<script>
import { GlAlert } from '@gitlab/ui';
import axios from '~/lib/utils/axios_utils';
import { __ } from '~/locale';
import DagGraph from './dag_graph.vue';
import { DEFAULT, PARSE_FAILURE, LOAD_FAILURE, UNSUPPORTED_DATA } from './constants';
import { parseData } from './utils';
export default {
// eslint-disable-next-line @gitlab/require-i18n-strings
name: 'Dag',
components: {
DagGraph,
GlAlert,
},
props: {
......@@ -18,15 +23,47 @@ export default {
data() {
return {
showFailureAlert: false,
failureType: null,
graphData: null,
};
},
errorTexts: {
[LOAD_FAILURE]: __('We are currently unable to fetch data for this graph.'),
[PARSE_FAILURE]: __('There was an error parsing the data for this graph.'),
[UNSUPPORTED_DATA]: __('A DAG must have two dependent jobs to be visualized on this tab.'),
[DEFAULT]: __('An unknown error occurred while loading this graph.'),
},
computed: {
failure() {
switch (this.failureType) {
case LOAD_FAILURE:
return {
text: this.$options.errorTexts[LOAD_FAILURE],
variant: 'danger',
};
case PARSE_FAILURE:
return {
text: this.$options.errorTexts[PARSE_FAILURE],
variant: 'danger',
};
case UNSUPPORTED_DATA:
return {
text: this.$options.errorTexts[UNSUPPORTED_DATA],
variant: 'info',
};
default:
return {
text: this.$options.errorTexts[DEFAULT],
vatiant: 'danger',
};
}
},
shouldDisplayGraph() {
return !this.showFailureAlert;
return Boolean(!this.showFailureAlert && this.graphData);
},
},
mounted() {
const { drawGraph, reportFailure } = this;
const { processGraphData, reportFailure } = this;
if (!this.graphUrl) {
reportFailure();
......@@ -36,30 +73,43 @@ export default {
axios
.get(this.graphUrl)
.then(response => {
drawGraph(response.data);
processGraphData(response.data);
})
.catch(reportFailure);
.catch(() => reportFailure(LOAD_FAILURE));
},
methods: {
drawGraph(data) {
return data;
processGraphData(data) {
let parsed;
try {
parsed = parseData(data.stages);
} catch {
this.reportFailure(PARSE_FAILURE);
return;
}
if (parsed.links.length < 2) {
this.reportFailure(UNSUPPORTED_DATA);
return;
}
this.graphData = parsed;
},
hideAlert() {
this.showFailureAlert = false;
},
reportFailure() {
reportFailure(type) {
this.showFailureAlert = true;
this.failureType = type;
},
},
};
</script>
<template>
<div>
<gl-alert v-if="showFailureAlert" variant="danger" @dismiss="hideAlert">
{{ __('We are currently unable to fetch data for this graph.') }}
<gl-alert v-if="showFailureAlert" :variant="failure.variant" @dismiss="hideAlert">
{{ failure.text }}
</gl-alert>
<div v-if="shouldDisplayGraph" data-testid="dag-graph-container">
<!-- graph goes here -->
</div>
<dag-graph v-if="shouldDisplayGraph" :graph-data="graphData" @onFailure="reportFailure" />
</div>
</template>
This diff is collapsed.
......@@ -141,7 +141,13 @@ export const parseData = data => {
values for the nodes and links in the graph.
*/
export const createSankey = ({ width, height, nodeWidth, nodePadding, paddingForLabels }) => {
export const createSankey = ({
width = 10,
height = 10,
nodeWidth = 10,
nodePadding = 10,
paddingForLabels = 1,
} = {}) => {
const sankeyGenerator = sankey()
.nodeId(({ name }) => name)
.nodeAlign(sankeyLeft)
......
......@@ -867,6 +867,9 @@ msgstr ""
msgid "A .NET Core console application template, customizable for any .NET Core project"
msgstr ""
msgid "A DAG must have two dependent jobs to be visualized on this tab."
msgstr ""
msgid "A GitBook site that uses Netlify for CI/CD instead of GitLab, but still with all the other great GitLab features."
msgstr ""
......@@ -2404,6 +2407,9 @@ msgstr ""
msgid "An unexpected error occurred while stopping the Web Terminal."
msgstr ""
msgid "An unknown error occurred while loading this graph."
msgstr ""
msgid "Analytics"
msgstr ""
......@@ -22064,6 +22070,9 @@ msgstr ""
msgid "There was an error loading users activity calendar."
msgstr ""
msgid "There was an error parsing the data for this graph."
msgstr ""
msgid "There was an error removing the e-mail."
msgstr ""
......
import { mount } from '@vue/test-utils';
import DagGraph from '~/pipelines/components/dag/dag_graph.vue';
import { createSankey, removeOrphanNodes } from '~/pipelines/components/dag/utils';
import { parsedData } from './mock_data';
describe('The DAG graph', () => {
let wrapper;
const getGraph = () => wrapper.find('.dag-graph-container > svg');
const getAllLinks = () => wrapper.findAll('.dag-link');
const getAllNodes = () => wrapper.findAll('.dag-node');
const getAllLabels = () => wrapper.findAll('foreignObject');
const createComponent = (propsData = {}) => {
if (wrapper?.destroy) {
wrapper.destroy();
}
wrapper = mount(DagGraph, {
attachToDocument: true,
propsData,
data() {
return {
color: () => {},
width: 0,
height: 0,
};
},
});
};
beforeEach(() => {
createComponent({ graphData: parsedData });
});
afterEach(() => {
wrapper.destroy();
wrapper = null;
});
describe('in the basic case', () => {
beforeEach(() => {
/*
The graph uses random to offset links. To keep the snapshot consistent,
we mock Math.random. Wheeeee!
*/
const randomNumber = jest.spyOn(global.Math, 'random');
randomNumber.mockImplementation(() => 0.2);
createComponent({ graphData: parsedData });
});
it('renders the graph svg', () => {
expect(getGraph().exists()).toBe(true);
expect(getGraph().html()).toMatchSnapshot();
});
});
describe('links', () => {
it('renders the expected number of links', () => {
expect(getAllLinks()).toHaveLength(parsedData.links.length);
});
it('renders the expected number of gradients', () => {
expect(wrapper.findAll('linearGradient')).toHaveLength(parsedData.links.length);
});
it('renders the expected number of clip paths', () => {
expect(wrapper.findAll('clipPath')).toHaveLength(parsedData.links.length);
});
});
describe('nodes and labels', () => {
const sankeyNodes = createSankey()(parsedData).nodes;
const processedNodes = removeOrphanNodes(sankeyNodes);
describe('nodes', () => {
it('renders the expected number of nodes', () => {
expect(getAllNodes()).toHaveLength(processedNodes.length);
});
});
describe('labels', () => {
it('renders the expected number of labels as foreignObjects', () => {
expect(getAllLabels()).toHaveLength(processedNodes.length);
});
it('renders the title as text', () => {
expect(
getAllLabels()
.at(0)
.text(),
).toBe(parsedData.nodes[0].name);
});
});
});
});
import { mount } from '@vue/test-utils';
import { shallowMount } from '@vue/test-utils';
import MockAdapter from 'axios-mock-adapter';
import axios from '~/lib/utils/axios_utils';
import waitForPromises from 'helpers/wait_for_promises';
import { GlAlert } from '@gitlab/ui';
import Dag from '~/pipelines/components/dag/dag.vue';
import DagGraph from '~/pipelines/components/dag/dag_graph.vue';
describe('Pipeline DAG graph', () => {
import {
DEFAULT,
PARSE_FAILURE,
LOAD_FAILURE,
UNSUPPORTED_DATA,
} from '~/pipelines/components/dag//constants';
import { mockBaseData, tooSmallGraph, unparseableGraph } from './mock_data';
describe('Pipeline DAG graph wrapper', () => {
let wrapper;
let axiosMock;
let mock;
const getAlert = () => wrapper.find(GlAlert);
const getGraph = () => wrapper.find('[data-testid="dag-graph-container"]');
const dataPath = 'root/test/pipelines/90/dag.json';
const getGraph = () => wrapper.find(DagGraph);
const getErrorText = type => wrapper.vm.$options.errorTexts[type];
const createComponent = (propsData = {}, method = mount) => {
axiosMock = new MockAdapter(axios);
const dataPath = '/root/test/pipelines/90/dag.json';
const createComponent = (propsData = {}) => {
if (wrapper?.destroy) {
wrapper.destroy();
}
wrapper = method(Dag, {
wrapper = shallowMount(Dag, {
propsData,
data() {
return {
......@@ -30,8 +38,12 @@ describe('Pipeline DAG graph', () => {
});
};
beforeEach(() => {
mock = new MockAdapter(axios);
});
afterEach(() => {
axiosMock.restore();
mock.restore();
wrapper.destroy();
wrapper = null;
});
......@@ -41,34 +53,80 @@ describe('Pipeline DAG graph', () => {
createComponent({ graphUrl: undefined });
});
it('shows the alert and not the graph', () => {
it('shows the DEFAULT alert and not the graph', () => {
expect(getAlert().exists()).toBe(true);
expect(getAlert().text()).toBe(getErrorText(DEFAULT));
expect(getGraph().exists()).toBe(false);
});
});
describe('when there is a dataUrl', () => {
beforeEach(() => {
createComponent({ graphUrl: dataPath });
describe('but the data fetch fails', () => {
beforeEach(() => {
mock.onGet(dataPath).replyOnce(500);
createComponent({ graphUrl: dataPath });
});
it('shows the LOAD_FAILURE alert and not the graph', () => {
return wrapper.vm
.$nextTick()
.then(waitForPromises)
.then(() => {
expect(getAlert().exists()).toBe(true);
expect(getAlert().text()).toBe(getErrorText(LOAD_FAILURE));
expect(getGraph().exists()).toBe(false);
});
});
});
it('shows the graph and not the alert', () => {
expect(getAlert().exists()).toBe(false);
expect(getGraph().exists()).toBe(true);
describe('the data fetch succeeds but the parse fails', () => {
beforeEach(() => {
mock.onGet(dataPath).replyOnce(200, unparseableGraph);
createComponent({ graphUrl: dataPath });
});
it('shows the PARSE_FAILURE alert and not the graph', () => {
return wrapper.vm
.$nextTick()
.then(waitForPromises)
.then(() => {
expect(getAlert().exists()).toBe(true);
expect(getAlert().text()).toBe(getErrorText(PARSE_FAILURE));
expect(getGraph().exists()).toBe(false);
});
});
});
describe('but the data fetch fails', () => {
describe('and the data fetch and parse succeeds', () => {
beforeEach(() => {
mock.onGet(dataPath).replyOnce(200, mockBaseData);
createComponent({ graphUrl: dataPath });
});
it('shows the graph and not the alert', () => {
return wrapper.vm
.$nextTick()
.then(waitForPromises)
.then(() => {
expect(getAlert().exists()).toBe(false);
expect(getGraph().exists()).toBe(true);
});
});
});
describe('the data fetch and parse succeeds, but the resulting graph is too small', () => {
beforeEach(() => {
axiosMock.onGet(dataPath).replyOnce(500);
mock.onGet(dataPath).replyOnce(200, tooSmallGraph);
createComponent({ graphUrl: dataPath });
});
it('shows the alert and not the graph', () => {
it('shows the UNSUPPORTED_DATA alert and not the graph', () => {
return wrapper.vm
.$nextTick()
.then(waitForPromises)
.then(() => {
expect(getAlert().exists()).toBe(true);
expect(getAlert().text()).toBe(getErrorText(UNSUPPORTED_DATA));
expect(getGraph().exists()).toBe(false);
});
});
......
......@@ -3,7 +3,7 @@
as well as non-parallel jobs with spaces in the name to prevent
us relying on spaces as an indicator.
*/
export default {
export const mockBaseData = {
stages: [
{
name: 'test',
......@@ -42,3 +42,349 @@ export default {
},
],
};
export const tooSmallGraph = {
stages: [
{
name: 'test',
groups: [
{
name: 'jest',
size: 2,
jobs: [{ name: 'jest 1/2' }, { name: 'jest 2/2' }],
},
{
name: 'rspec',
size: 1,
jobs: [{ name: 'rspec', needs: ['frontend fixtures'] }],
},
],
},
{
name: 'fixtures',
groups: [
{
name: 'frontend fixtures',
size: 1,
jobs: [{ name: 'frontend fixtures' }],
},
],
},
{
name: 'un-needed',
groups: [
{
name: 'un-needed',
size: 1,
jobs: [{ name: 'un-needed' }],
},
],
},
],
};
export const unparseableGraph = [
{
name: 'test',
groups: [
{
name: 'jest',
size: 2,
jobs: [{ name: 'jest 1/2', needs: ['frontend fixtures'] }, { name: 'jest 2/2' }],
},
{
name: 'rspec',
size: 1,
jobs: [{ name: 'rspec', needs: ['frontend fixtures'] }],
},
],
},
{
name: 'un-needed',
groups: [
{
name: 'un-needed',
size: 1,
jobs: [{ name: 'un-needed' }],
},
],
},
];
/*
This represents data that has been parsed by the wrapper
*/
export const parsedData = {
nodes: [
{
name: 'build_a',
size: 1,
jobs: [
{
name: 'build_a',
},
],
category: 'build',
},
{
name: 'build_b',
size: 1,
jobs: [
{
name: 'build_b',
},
],
category: 'build',
},
{
name: 'test_a',
size: 1,
jobs: [
{
name: 'test_a',
needs: ['build_a'],
},
],
category: 'test',
},
{
name: 'test_b',
size: 1,
jobs: [
{
name: 'test_b',
},
],
category: 'test',
},
{
name: 'test_c',
size: 1,
jobs: [
{
name: 'test_c',
},
],
category: 'test',
},
{
name: 'test_d',
size: 1,
jobs: [
{
name: 'test_d',
},
],
category: 'test',
},
{
name: 'post_test_a',
size: 1,
jobs: [
{
name: 'post_test_a',
},
],
category: 'post-test',
},
{
name: 'post_test_b',
size: 1,
jobs: [
{
name: 'post_test_b',
},
],
category: 'post-test',
},
{
name: 'post_test_c',
size: 1,
jobs: [
{
name: 'post_test_c',
needs: ['test_a', 'test_b'],
},
],
category: 'post-test',
},
{
name: 'staging_a',
size: 1,
jobs: [
{
name: 'staging_a',
needs: ['post_test_a'],
},
],
category: 'staging',
},
{
name: 'staging_b',
size: 1,
jobs: [
{
name: 'staging_b',
needs: ['post_test_b'],
},
],
category: 'staging',
},
{
name: 'staging_c',
size: 1,
jobs: [
{
name: 'staging_c',
},
],
category: 'staging',
},
{
name: 'staging_d',
size: 1,
jobs: [
{
name: 'staging_d',
},
],
category: 'staging',
},
{
name: 'staging_e',
size: 1,
jobs: [
{
name: 'staging_e',
},
],
category: 'staging',
},
{
name: 'canary_a',
size: 1,
jobs: [
{
name: 'canary_a',
needs: ['staging_a', 'staging_b'],
},
],
category: 'canary',
},
{
name: 'canary_b',
size: 1,
jobs: [
{
name: 'canary_b',
},
],
category: 'canary',
},
{
name: 'canary_c',
size: 1,
jobs: [
{
name: 'canary_c',
needs: ['staging_b'],
},
],
category: 'canary',
},
{
name: 'production_a',
size: 1,
jobs: [
{
name: 'production_a',
needs: ['canary_a'],
},
],
category: 'production',
},
{
name: 'production_b',
size: 1,
jobs: [
{
name: 'production_b',
},
],
category: 'production',
},
{
name: 'production_c',
size: 1,
jobs: [
{
name: 'production_c',
},
],
category: 'production',
},
{
name: 'production_d',
size: 1,
jobs: [
{
name: 'production_d',
needs: ['canary_c'],
},
],
category: 'production',
},
],
links: [
{
source: 'build_a',
target: 'test_a',
value: 10,
},
{
source: 'test_a',
target: 'post_test_c',
value: 10,
},
{
source: 'test_b',
target: 'post_test_c',
value: 10,
},
{
source: 'post_test_a',
target: 'staging_a',
value: 10,
},
{
source: 'post_test_b',
target: 'staging_b',
value: 10,
},
{
source: 'staging_a',
target: 'canary_a',
value: 10,
},
{
source: 'staging_b',
target: 'canary_a',
value: 10,
},
{
source: 'staging_b',
target: 'canary_c',
value: 10,
},
{
source: 'canary_a',
target: 'production_a',
value: 10,
},
{
source: 'canary_c',
target: 'production_d',
value: 10,
},
],
};
......@@ -8,12 +8,12 @@ import {
getMaxNodes,
} from '~/pipelines/components/dag/utils';
import mockGraphData from './mock_data';
import { mockBaseData } from './mock_data';
describe('DAG visualization parsing utilities', () => {
const { nodes, nodeDict } = createNodesStructure(mockGraphData.stages);
const { nodes, nodeDict } = createNodesStructure(mockBaseData.stages);
const unfilteredLinks = makeLinksFromNodes(nodes, nodeDict);
const parsed = parseData(mockGraphData.stages);
const parsed = parseData(mockBaseData.stages);
const layoutSettings = {
width: 200,
......@@ -30,10 +30,10 @@ describe('DAG visualization parsing utilities', () => {
const parallelJobName = 'jest 1/2';
const singleJobName = 'frontend fixtures';
const { name, jobs, size } = mockGraphData.stages[0].groups[0];
const { name, jobs, size } = mockBaseData.stages[0].groups[0];
it('returns the expected node structure', () => {
expect(nodes[0]).toHaveProperty('category', mockGraphData.stages[0].name);
expect(nodes[0]).toHaveProperty('category', mockBaseData.stages[0].name);
expect(nodes[0]).toHaveProperty('name', name);
expect(nodes[0]).toHaveProperty('jobs', jobs);
expect(nodes[0]).toHaveProperty('size', size);
......
......@@ -3551,10 +3551,10 @@ d3-zoom@1:
d3-selection "1"
d3-transition "1"
d3@^5.14, d3@^5.7.0:
version "5.15.0"
resolved "https://registry.yarnpkg.com/d3/-/d3-5.15.0.tgz#ffd44958e6a3cb8a59a84429c45429b8bca5677a"
integrity sha512-C+E80SL2nLLtmykZ6klwYj5rPqB5nlfN5LdWEAVdWPppqTD8taoJi2PxLZjPeYT8FFRR2yucXq+kBlOnnvZeLg==
d3@^5.14, d3@^5.16.0, d3@^5.7.0:
version "5.16.0"
resolved "https://registry.yarnpkg.com/d3/-/d3-5.16.0.tgz#9c5e8d3b56403c79d4ed42fbd62f6113f199c877"
integrity sha512-4PL5hHaHwX4m7Zr1UapXW23apo6pexCgdetdJ5kTmADpG/7T9Gkxw0M0tf/pjoB63ezCCm0u5UaFYy2aMt0Mcw==
dependencies:
d3-array "1"
d3-axis "1"
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment