Commit 909ea6c5 authored by Phil Hughes's avatar Phil Hughes

Merge branch 'mrincon-move-monitor-store-test-to-jest' into 'master'

Migrate monitor store tests from karma to jest

See merge request gitlab-org/gitlab!20134
parents 47701bf1 7d255e4d
......@@ -328,3 +328,138 @@ export const metricsGroupsAPIResponse = [
],
},
];
export const environmentData = [
{
id: 34,
name: 'production',
state: 'available',
external_url: 'http://root-autodevops-deploy.my-fake-domain.com',
environment_type: null,
stop_action: false,
metrics_path: '/root/hello-prometheus/environments/34/metrics',
environment_path: '/root/hello-prometheus/environments/34',
stop_path: '/root/hello-prometheus/environments/34/stop',
terminal_path: '/root/hello-prometheus/environments/34/terminal',
folder_path: '/root/hello-prometheus/environments/folders/production',
created_at: '2018-06-29T16:53:38.301Z',
updated_at: '2018-06-29T16:57:09.825Z',
last_deployment: {
id: 127,
},
},
{
id: 35,
name: 'review/noop-branch',
state: 'available',
external_url: 'http://root-autodevops-deploy-review-noop-branc-die93w.my-fake-domain.com',
environment_type: 'review',
stop_action: true,
metrics_path: '/root/hello-prometheus/environments/35/metrics',
environment_path: '/root/hello-prometheus/environments/35',
stop_path: '/root/hello-prometheus/environments/35/stop',
terminal_path: '/root/hello-prometheus/environments/35/terminal',
folder_path: '/root/hello-prometheus/environments/folders/review',
created_at: '2018-07-03T18:39:41.702Z',
updated_at: '2018-07-03T18:44:54.010Z',
last_deployment: {
id: 128,
},
},
{
id: 36,
name: 'no-deployment/noop-branch',
state: 'available',
created_at: '2018-07-04T18:39:41.702Z',
updated_at: '2018-07-04T18:44:54.010Z',
},
];
export const metricsDashboardResponse = {
dashboard: {
dashboard: 'Environment metrics',
priority: 1,
panel_groups: [
{
group: 'System metrics (Kubernetes)',
priority: 5,
panels: [
{
title: 'Memory Usage (Total)',
type: 'area-chart',
y_label: 'Total Memory Used',
weight: 4,
metrics: [
{
id: 'system_metrics_kubernetes_container_memory_total',
query_range:
'avg(sum(container_memory_usage_bytes{container_name!="POD",pod_name=~"^%{ci_environment_slug}-(.*)",namespace="%{kube_namespace}"}) by (job)) without (job) /1024/1024/1024',
label: 'Total',
unit: 'GB',
metric_id: 12,
prometheus_endpoint_path: 'http://test',
},
],
},
{
title: 'Core Usage (Total)',
type: 'area-chart',
y_label: 'Total Cores',
weight: 3,
metrics: [
{
id: 'system_metrics_kubernetes_container_cores_total',
query_range:
'avg(sum(rate(container_cpu_usage_seconds_total{container_name!="POD",pod_name=~"^%{ci_environment_slug}-(.*)",namespace="%{kube_namespace}"}[15m])) by (job)) without (job)',
label: 'Total',
unit: 'cores',
metric_id: 13,
},
],
},
{
title: 'Memory Usage (Pod average)',
type: 'line-chart',
y_label: 'Memory Used per Pod',
weight: 2,
metrics: [
{
id: 'system_metrics_kubernetes_container_memory_average',
query_range:
'avg(sum(container_memory_usage_bytes{container_name!="POD",pod_name=~"^%{ci_environment_slug}-(.*)",namespace="%{kube_namespace}"}) by (job)) without (job) / count(avg(container_memory_usage_bytes{container_name!="POD",pod_name=~"^%{ci_environment_slug}-(.*)",namespace="%{kube_namespace}"}) without (job)) /1024/1024',
label: 'Pod average',
unit: 'MB',
metric_id: 14,
},
],
},
],
},
],
},
status: 'success',
};
export const dashboardGitResponse = [
{
default: true,
display_name: 'Default',
can_edit: false,
project_blob_path: null,
path: 'config/prometheus/common_metrics.yml',
},
{
default: false,
display_name: 'Custom Dashboard 1',
can_edit: true,
project_blob_path: `${mockProjectDir}/blob/master/dashboards/.gitlab/dashboards/dashboard_1.yml`,
path: '.gitlab/dashboards/dashboard_1.yml',
},
{
default: false,
display_name: 'Custom Dashboard 2',
can_edit: true,
project_blob_path: `${mockProjectDir}/blob/master/dashboards/.gitlab/dashboards/dashboard_2.yml`,
path: '.gitlab/dashboards/dashboard_2.yml',
},
];
import axios from '~/lib/utils/axios_utils';
import MockAdapter from 'axios-mock-adapter';
import { TEST_HOST } from 'helpers/test_constants';
import { backOffRequest } from '~/monitoring/stores/actions';
import testAction from 'helpers/vuex_action_helper';
import axios from '~/lib/utils/axios_utils';
import statusCodes from '~/lib/utils/http_status';
import { backOff } from '~/lib/utils/common_utils';
import store from '~/monitoring/stores';
import * as types from '~/monitoring/stores/mutation_types';
import {
backOffRequest,
fetchDashboard,
receiveMetricsDashboardSuccess,
receiveMetricsDashboardFailure,
fetchDeploymentsData,
fetchEnvironmentsData,
fetchPrometheusMetrics,
fetchPrometheusMetric,
requestMetricsData,
setEndpoints,
setGettingStartedEmptyState,
} from '~/monitoring/stores/actions';
import storeState from '~/monitoring/stores/state';
import {
deploymentData,
environmentData,
metricsDashboardResponse,
metricsGroupsAPIResponse,
dashboardGitResponse,
} from '../mock_data';
jest.mock('~/lib/utils/common_utils');
const resetStore = str => {
str.replaceState({
showEmptyState: true,
emptyState: 'loading',
groups: [],
});
};
const MAX_REQUESTS = 3;
describe('Monitoring store helpers', () => {
......@@ -51,3 +83,334 @@ describe('Monitoring store helpers', () => {
});
});
});
describe('Monitoring store actions', () => {
let mock;
beforeEach(() => {
mock = new MockAdapter(axios);
});
afterEach(() => {
resetStore(store);
mock.restore();
});
describe('requestMetricsData', () => {
it('sets emptyState to loading', () => {
const commit = jest.fn();
const { state } = store;
requestMetricsData({
state,
commit,
});
expect(commit).toHaveBeenCalledWith(types.REQUEST_METRICS_DATA);
});
});
describe('fetchDeploymentsData', () => {
it('commits RECEIVE_DEPLOYMENTS_DATA_SUCCESS on error', done => {
const dispatch = jest.fn();
const { state } = store;
state.deploymentsEndpoint = '/success';
mock.onGet(state.deploymentsEndpoint).reply(200, {
deployments: deploymentData,
});
fetchDeploymentsData({
state,
dispatch,
})
.then(() => {
expect(dispatch).toHaveBeenCalledWith('receiveDeploymentsDataSuccess', deploymentData);
done();
})
.catch(done.fail);
});
it('commits RECEIVE_DEPLOYMENTS_DATA_FAILURE on error', done => {
const dispatch = jest.fn();
const { state } = store;
state.deploymentsEndpoint = '/error';
mock.onGet(state.deploymentsEndpoint).reply(500);
fetchDeploymentsData({
state,
dispatch,
})
.then(() => {
expect(dispatch).toHaveBeenCalledWith('receiveDeploymentsDataFailure');
done();
})
.catch(done.fail);
});
});
describe('fetchEnvironmentsData', () => {
it('commits RECEIVE_ENVIRONMENTS_DATA_SUCCESS on error', done => {
const dispatch = jest.fn();
const { state } = store;
state.environmentsEndpoint = '/success';
mock.onGet(state.environmentsEndpoint).reply(200, {
environments: environmentData,
});
fetchEnvironmentsData({
state,
dispatch,
})
.then(() => {
expect(dispatch).toHaveBeenCalledWith('receiveEnvironmentsDataSuccess', environmentData);
done();
})
.catch(done.fail);
});
it('commits RECEIVE_ENVIRONMENTS_DATA_FAILURE on error', done => {
const dispatch = jest.fn();
const { state } = store;
state.environmentsEndpoint = '/error';
mock.onGet(state.environmentsEndpoint).reply(500);
fetchEnvironmentsData({
state,
dispatch,
})
.then(() => {
expect(dispatch).toHaveBeenCalledWith('receiveEnvironmentsDataFailure');
done();
})
.catch(done.fail);
});
});
describe('Set endpoints', () => {
let mockedState;
beforeEach(() => {
mockedState = storeState();
});
it('should commit SET_ENDPOINTS mutation', done => {
testAction(
setEndpoints,
{
metricsEndpoint: 'additional_metrics.json',
deploymentsEndpoint: 'deployments.json',
environmentsEndpoint: 'deployments.json',
},
mockedState,
[
{
type: types.SET_ENDPOINTS,
payload: {
metricsEndpoint: 'additional_metrics.json',
deploymentsEndpoint: 'deployments.json',
environmentsEndpoint: 'deployments.json',
},
},
],
[],
done,
);
});
});
describe('Set empty states', () => {
let mockedState;
beforeEach(() => {
mockedState = storeState();
});
it('should commit SET_METRICS_ENDPOINT mutation', done => {
testAction(
setGettingStartedEmptyState,
null,
mockedState,
[
{
type: types.SET_GETTING_STARTED_EMPTY_STATE,
},
],
[],
done,
);
});
});
describe('fetchDashboard', () => {
let dispatch;
let state;
const response = metricsDashboardResponse;
beforeEach(() => {
dispatch = jest.fn();
state = storeState();
state.dashboardEndpoint = '/dashboard';
});
it('dispatches receive and success actions', done => {
const params = {};
mock.onGet(state.dashboardEndpoint).reply(200, response);
fetchDashboard(
{
state,
dispatch,
},
params,
)
.then(() => {
expect(dispatch).toHaveBeenCalledWith('requestMetricsDashboard');
expect(dispatch).toHaveBeenCalledWith('receiveMetricsDashboardSuccess', {
response,
params,
});
done();
})
.catch(done.fail);
});
it('dispatches failure action', done => {
const params = {};
mock.onGet(state.dashboardEndpoint).reply(500);
fetchDashboard(
{
state,
dispatch,
},
params,
)
.then(() => {
expect(dispatch).toHaveBeenCalledWith(
'receiveMetricsDashboardFailure',
new Error('Request failed with status code 500'),
);
done();
})
.catch(done.fail);
});
});
describe('receiveMetricsDashboardSuccess', () => {
let commit;
let dispatch;
let state;
beforeEach(() => {
commit = jest.fn();
dispatch = jest.fn();
state = storeState();
});
it('stores groups ', () => {
const params = {};
const response = metricsDashboardResponse;
receiveMetricsDashboardSuccess(
{
state,
commit,
dispatch,
},
{
response,
params,
},
);
expect(commit).toHaveBeenCalledWith(
types.RECEIVE_METRICS_DATA_SUCCESS,
metricsDashboardResponse.dashboard.panel_groups,
);
expect(dispatch).toHaveBeenCalledWith('fetchPrometheusMetrics', params);
});
it('sets the dashboards loaded from the repository', () => {
const params = {};
const response = metricsDashboardResponse;
response.all_dashboards = dashboardGitResponse;
receiveMetricsDashboardSuccess(
{
state,
commit,
dispatch,
},
{
response,
params,
},
);
expect(commit).toHaveBeenCalledWith(types.SET_ALL_DASHBOARDS, dashboardGitResponse);
});
});
describe('receiveMetricsDashboardFailure', () => {
let commit;
beforeEach(() => {
commit = jest.fn();
});
it('commits failure action', () => {
receiveMetricsDashboardFailure({
commit,
});
expect(commit).toHaveBeenCalledWith(types.RECEIVE_METRICS_DATA_FAILURE, undefined);
});
it('commits failure action with error', () => {
receiveMetricsDashboardFailure(
{
commit,
},
'uh-oh',
);
expect(commit).toHaveBeenCalledWith(types.RECEIVE_METRICS_DATA_FAILURE, 'uh-oh');
});
});
describe('fetchPrometheusMetrics', () => {
let commit;
let dispatch;
beforeEach(() => {
commit = jest.fn();
dispatch = jest.fn();
});
it('commits empty state when state.groups is empty', done => {
const state = storeState();
const params = {};
fetchPrometheusMetrics(
{
state,
commit,
dispatch,
},
params,
)
.then(() => {
expect(commit).toHaveBeenCalledWith(types.SET_NO_DATA_EMPTY_STATE);
expect(dispatch).not.toHaveBeenCalled();
done();
})
.catch(done.fail);
});
it('dispatches fetchPrometheusMetric for each panel query', done => {
const params = {};
const state = storeState();
state.dashboard.panel_groups = metricsDashboardResponse.dashboard.panel_groups;
const metric = state.dashboard.panel_groups[0].panels[0].metrics[0];
fetchPrometheusMetrics(
{
state,
commit,
dispatch,
},
params,
)
.then(() => {
expect(dispatch).toHaveBeenCalledTimes(3);
expect(dispatch).toHaveBeenCalledWith('fetchPrometheusMetric', {
metric,
params,
});
done();
})
.catch(done.fail);
done();
});
});
describe('fetchPrometheusMetric', () => {
it('commits prometheus query result', done => {
const commit = jest.fn();
const params = {
start: '2019-08-06T12:40:02.184Z',
end: '2019-08-06T20:40:02.184Z',
};
const metric = metricsDashboardResponse.dashboard.panel_groups[0].panels[0].metrics[0];
const state = storeState();
const data = metricsGroupsAPIResponse[0].panels[0].metrics[0];
const response = {
data,
};
mock.onGet('http://test').reply(200, response);
fetchPrometheusMetric({ state, commit }, { metric, params })
.then(() => {
expect(commit).toHaveBeenCalledWith(types.SET_QUERY_RESULT, {
metricId: metric.metric_id,
result: data.result,
});
done();
})
.catch(done.fail);
});
});
});
......@@ -11,81 +11,62 @@ import { uniqMetricsId } from '~/monitoring/stores/utils';
describe('Monitoring mutations', () => {
let stateCopy;
beforeEach(() => {
stateCopy = state();
});
describe(types.RECEIVE_METRICS_DATA_SUCCESS, () => {
describe('RECEIVE_METRICS_DATA_SUCCESS', () => {
let groups;
beforeEach(() => {
stateCopy.dashboard.panel_groups = [];
groups = metricsGroupsAPIResponse;
});
it('adds a key to the group', () => {
mutations[types.RECEIVE_METRICS_DATA_SUCCESS](stateCopy, groups);
expect(stateCopy.dashboard.panel_groups[0].key).toBe('system-metrics-kubernetes--0');
});
it('normalizes values', () => {
mutations[types.RECEIVE_METRICS_DATA_SUCCESS](stateCopy, groups);
const expectedLabel = 'Pod average';
const { label, query_range } = stateCopy.dashboard.panel_groups[0].metrics[0].metrics[0];
expect(label).toEqual(expectedLabel);
expect(query_range.length).toBeGreaterThan(0);
});
it('contains one group, which it has two panels and one metrics property', () => {
mutations[types.RECEIVE_METRICS_DATA_SUCCESS](stateCopy, groups);
expect(stateCopy.dashboard.panel_groups).toBeDefined();
expect(stateCopy.dashboard.panel_groups.length).toEqual(1);
expect(stateCopy.dashboard.panel_groups[0].panels.length).toEqual(2);
expect(stateCopy.dashboard.panel_groups[0].panels[0].metrics.length).toEqual(1);
expect(stateCopy.dashboard.panel_groups[0].panels[1].metrics.length).toEqual(1);
});
it('assigns queries a metric id', () => {
mutations[types.RECEIVE_METRICS_DATA_SUCCESS](stateCopy, groups);
expect(stateCopy.dashboard.panel_groups[0].metrics[0].queries[0].metricId).toEqual(
'17_system_metrics_kubernetes_container_memory_average',
);
});
describe('dashboard endpoint', () => {
const dashboardGroups = metricsDashboardResponse.dashboard.panel_groups;
it('aliases group panels to metrics for backwards compatibility', () => {
mutations[types.RECEIVE_METRICS_DATA_SUCCESS](stateCopy, dashboardGroups);
expect(stateCopy.dashboard.panel_groups[0].metrics[0]).toBeDefined();
});
it('aliases panel metrics to queries for backwards compatibility', () => {
mutations[types.RECEIVE_METRICS_DATA_SUCCESS](stateCopy, dashboardGroups);
expect(stateCopy.dashboard.panel_groups[0].metrics[0].queries).toBeDefined();
});
});
});
describe(types.RECEIVE_DEPLOYMENTS_DATA_SUCCESS, () => {
describe('RECEIVE_DEPLOYMENTS_DATA_SUCCESS', () => {
it('stores the deployment data', () => {
stateCopy.deploymentData = [];
mutations[types.RECEIVE_DEPLOYMENTS_DATA_SUCCESS](stateCopy, deploymentData);
expect(stateCopy.deploymentData).toBeDefined();
expect(stateCopy.deploymentData.length).toEqual(3);
expect(typeof stateCopy.deploymentData[0]).toEqual('object');
});
});
describe('SET_ENDPOINTS', () => {
it('should set all the endpoints', () => {
mutations[types.SET_ENDPOINTS](stateCopy, {
......@@ -95,7 +76,6 @@ describe('Monitoring mutations', () => {
dashboardEndpoint: 'dashboard.json',
projectPath: '/gitlab-org/gitlab-foss',
});
expect(stateCopy.metricsEndpoint).toEqual('additional_metrics.json');
expect(stateCopy.environmentsEndpoint).toEqual('environments.json');
expect(stateCopy.deploymentsEndpoint).toEqual('deployments.json');
......@@ -103,46 +83,44 @@ describe('Monitoring mutations', () => {
expect(stateCopy.projectPath).toEqual('/gitlab-org/gitlab-foss');
});
});
describe('SET_QUERY_RESULT', () => {
const metricId = 12;
const id = 'system_metrics_kubernetes_container_memory_total';
const result = [{ values: [[0, 1], [1, 1], [1, 3]] }];
const result = [
{
values: [[0, 1], [1, 1], [1, 3]],
},
];
beforeEach(() => {
const dashboardGroups = metricsDashboardResponse.dashboard.panel_groups;
mutations[types.RECEIVE_METRICS_DATA_SUCCESS](stateCopy, dashboardGroups);
});
it('clears empty state', () => {
mutations[types.SET_QUERY_RESULT](stateCopy, {
metricId,
result,
});
expect(stateCopy.showEmptyState).toBe(false);
});
it('sets metricsWithData value', () => {
const uniqId = uniqMetricsId({ metric_id: metricId, id });
const uniqId = uniqMetricsId({
metric_id: metricId,
id,
});
mutations[types.SET_QUERY_RESULT](stateCopy, {
metricId: uniqId,
result,
});
expect(stateCopy.metricsWithData).toEqual([uniqId]);
});
it('does not store empty results', () => {
mutations[types.SET_QUERY_RESULT](stateCopy, {
metricId,
result: [],
});
expect(stateCopy.metricsWithData).toEqual([]);
});
});
describe('SET_ALL_DASHBOARDS', () => {
it('stores `undefined` dashboards as an empty array', () => {
mutations[types.SET_ALL_DASHBOARDS](stateCopy, undefined);
......@@ -158,7 +136,6 @@ describe('Monitoring mutations', () => {
it('stores dashboards loaded from the git repository', () => {
mutations[types.SET_ALL_DASHBOARDS](stateCopy, dashboardGitResponse);
expect(stateCopy.allDashboards).toEqual(dashboardGitResponse);
});
});
......
import {
anomalyMockGraphData as importedAnomalyMockGraphData,
deploymentData as importedDeploymentData,
metricsNewGroupsAPIResponse as importedMetricsNewGroupsAPIResponse,
metricsGroupsAPIResponse as importedMetricsGroupsAPIResponse,
environmentData as importedEnvironmentData,
dashboardGitResponse as importedDashboardGitResponse,
} from '../../frontend/monitoring/mock_data';
// TODO Check if these exports are still needed
export const anomalyMockGraphData = importedAnomalyMockGraphData;
export const deploymentData = importedDeploymentData;
export const metricsNewGroupsAPIResponse = importedMetricsNewGroupsAPIResponse;
export const metricsGroupsAPIResponse = importedMetricsGroupsAPIResponse;
export const environmentData = importedEnvironmentData;
export const dashboardGitResponse = importedDashboardGitResponse;
export const mockApiEndpoint = `${gl.TEST_HOST}/monitoring/mock`;
export const mockProjectPath = '/frontend-fixtures/environments-project';
export const mockedQueryResultPayload = {
metricId: '17_system_metrics_kubernetes_container_memory_average',
result: [
......@@ -101,141 +98,6 @@ export const mockedQueryResultPayloadCoresTotal = {
],
};
export const environmentData = [
{
id: 34,
name: 'production',
state: 'available',
external_url: 'http://root-autodevops-deploy.my-fake-domain.com',
environment_type: null,
stop_action: false,
metrics_path: '/root/hello-prometheus/environments/34/metrics',
environment_path: '/root/hello-prometheus/environments/34',
stop_path: '/root/hello-prometheus/environments/34/stop',
terminal_path: '/root/hello-prometheus/environments/34/terminal',
folder_path: '/root/hello-prometheus/environments/folders/production',
created_at: '2018-06-29T16:53:38.301Z',
updated_at: '2018-06-29T16:57:09.825Z',
last_deployment: {
id: 127,
},
},
{
id: 35,
name: 'review/noop-branch',
state: 'available',
external_url: 'http://root-autodevops-deploy-review-noop-branc-die93w.my-fake-domain.com',
environment_type: 'review',
stop_action: true,
metrics_path: '/root/hello-prometheus/environments/35/metrics',
environment_path: '/root/hello-prometheus/environments/35',
stop_path: '/root/hello-prometheus/environments/35/stop',
terminal_path: '/root/hello-prometheus/environments/35/terminal',
folder_path: '/root/hello-prometheus/environments/folders/review',
created_at: '2018-07-03T18:39:41.702Z',
updated_at: '2018-07-03T18:44:54.010Z',
last_deployment: {
id: 128,
},
},
{
id: 36,
name: 'no-deployment/noop-branch',
state: 'available',
created_at: '2018-07-04T18:39:41.702Z',
updated_at: '2018-07-04T18:44:54.010Z',
},
];
export const metricsDashboardResponse = {
dashboard: {
dashboard: 'Environment metrics',
priority: 1,
panel_groups: [
{
group: 'System metrics (Kubernetes)',
priority: 5,
panels: [
{
title: 'Memory Usage (Total)',
type: 'area-chart',
y_label: 'Total Memory Used',
weight: 4,
metrics: [
{
id: 'system_metrics_kubernetes_container_memory_total',
query_range:
'avg(sum(container_memory_usage_bytes{container_name!="POD",pod_name=~"^%{ci_environment_slug}-(.*)",namespace="%{kube_namespace}"}) by (job)) without (job) /1024/1024/1024',
label: 'Total',
unit: 'GB',
metric_id: 12,
prometheus_endpoint_path: 'http://test',
},
],
},
{
title: 'Core Usage (Total)',
type: 'area-chart',
y_label: 'Total Cores',
weight: 3,
metrics: [
{
id: 'system_metrics_kubernetes_container_cores_total',
query_range:
'avg(sum(rate(container_cpu_usage_seconds_total{container_name!="POD",pod_name=~"^%{ci_environment_slug}-(.*)",namespace="%{kube_namespace}"}[15m])) by (job)) without (job)',
label: 'Total',
unit: 'cores',
metric_id: 13,
},
],
},
{
title: 'Memory Usage (Pod average)',
type: 'line-chart',
y_label: 'Memory Used per Pod',
weight: 2,
metrics: [
{
id: 'system_metrics_kubernetes_container_memory_average',
query_range:
'avg(sum(container_memory_usage_bytes{container_name!="POD",pod_name=~"^%{ci_environment_slug}-(.*)",namespace="%{kube_namespace}"}) by (job)) without (job) / count(avg(container_memory_usage_bytes{container_name!="POD",pod_name=~"^%{ci_environment_slug}-(.*)",namespace="%{kube_namespace}"}) without (job)) /1024/1024',
label: 'Pod average',
unit: 'MB',
metric_id: 14,
},
],
},
],
},
],
},
status: 'success',
};
export const dashboardGitResponse = [
{
default: true,
display_name: 'Default',
can_edit: false,
project_blob_path: null,
path: 'config/prometheus/common_metrics.yml',
},
{
default: false,
display_name: 'Custom Dashboard 1',
can_edit: true,
project_blob_path: `${mockProjectPath}/blob/master/dashboards/.gitlab/dashboards/dashboard_1.yml`,
path: '.gitlab/dashboards/dashboard_1.yml',
},
{
default: false,
display_name: 'Custom Dashboard 2',
can_edit: true,
project_blob_path: `${mockProjectPath}/blob/master/dashboards/.gitlab/dashboards/dashboard_2.yml`,
path: '.gitlab/dashboards/dashboard_2.yml',
},
];
export const graphDataPrometheusQuery = {
title: 'Super Chart A2',
type: 'single-stat',
......
import axios from '~/lib/utils/axios_utils';
import MockAdapter from 'axios-mock-adapter';
import store from '~/monitoring/stores';
import * as types from '~/monitoring/stores/mutation_types';
import {
fetchDashboard,
receiveMetricsDashboardSuccess,
receiveMetricsDashboardFailure,
fetchDeploymentsData,
fetchEnvironmentsData,
fetchPrometheusMetrics,
fetchPrometheusMetric,
requestMetricsData,
setEndpoints,
setGettingStartedEmptyState,
} from '~/monitoring/stores/actions';
import storeState from '~/monitoring/stores/state';
import testAction from 'spec/helpers/vuex_action_helper';
import { resetStore } from '../helpers';
import {
deploymentData,
environmentData,
metricsDashboardResponse,
metricsGroupsAPIResponse,
dashboardGitResponse,
} from '../mock_data';
describe('Monitoring store actions', () => {
let mock;
beforeEach(() => {
mock = new MockAdapter(axios);
});
afterEach(() => {
resetStore(store);
mock.restore();
});
describe('requestMetricsData', () => {
it('sets emptyState to loading', () => {
const commit = jasmine.createSpy();
const { state } = store;
requestMetricsData({ state, commit });
expect(commit).toHaveBeenCalledWith(types.REQUEST_METRICS_DATA);
});
});
describe('fetchDeploymentsData', () => {
it('commits RECEIVE_DEPLOYMENTS_DATA_SUCCESS on error', done => {
const dispatch = jasmine.createSpy();
const { state } = store;
state.deploymentsEndpoint = '/success';
mock.onGet(state.deploymentsEndpoint).reply(200, {
deployments: deploymentData,
});
fetchDeploymentsData({ state, dispatch })
.then(() => {
expect(dispatch).toHaveBeenCalledWith('receiveDeploymentsDataSuccess', deploymentData);
done();
})
.catch(done.fail);
});
it('commits RECEIVE_DEPLOYMENTS_DATA_FAILURE on error', done => {
const dispatch = jasmine.createSpy();
const { state } = store;
state.deploymentsEndpoint = '/error';
mock.onGet(state.deploymentsEndpoint).reply(500);
fetchDeploymentsData({ state, dispatch })
.then(() => {
expect(dispatch).toHaveBeenCalledWith('receiveDeploymentsDataFailure');
done();
})
.catch(done.fail);
});
});
describe('fetchEnvironmentsData', () => {
it('commits RECEIVE_ENVIRONMENTS_DATA_SUCCESS on error', done => {
const dispatch = jasmine.createSpy();
const { state } = store;
state.environmentsEndpoint = '/success';
mock.onGet(state.environmentsEndpoint).reply(200, {
environments: environmentData,
});
fetchEnvironmentsData({ state, dispatch })
.then(() => {
expect(dispatch).toHaveBeenCalledWith('receiveEnvironmentsDataSuccess', environmentData);
done();
})
.catch(done.fail);
});
it('commits RECEIVE_ENVIRONMENTS_DATA_FAILURE on error', done => {
const dispatch = jasmine.createSpy();
const { state } = store;
state.environmentsEndpoint = '/error';
mock.onGet(state.environmentsEndpoint).reply(500);
fetchEnvironmentsData({ state, dispatch })
.then(() => {
expect(dispatch).toHaveBeenCalledWith('receiveEnvironmentsDataFailure');
done();
})
.catch(done.fail);
});
});
describe('Set endpoints', () => {
let mockedState;
beforeEach(() => {
mockedState = storeState();
});
it('should commit SET_ENDPOINTS mutation', done => {
testAction(
setEndpoints,
{
metricsEndpoint: 'additional_metrics.json',
deploymentsEndpoint: 'deployments.json',
environmentsEndpoint: 'deployments.json',
},
mockedState,
[
{
type: types.SET_ENDPOINTS,
payload: {
metricsEndpoint: 'additional_metrics.json',
deploymentsEndpoint: 'deployments.json',
environmentsEndpoint: 'deployments.json',
},
},
],
[],
done,
);
});
});
describe('Set empty states', () => {
let mockedState;
beforeEach(() => {
mockedState = storeState();
});
it('should commit SET_METRICS_ENDPOINT mutation', done => {
testAction(
setGettingStartedEmptyState,
null,
mockedState,
[{ type: types.SET_GETTING_STARTED_EMPTY_STATE }],
[],
done,
);
});
});
describe('fetchDashboard', () => {
let dispatch;
let state;
const response = metricsDashboardResponse;
beforeEach(() => {
dispatch = jasmine.createSpy();
state = storeState();
state.dashboardEndpoint = '/dashboard';
});
it('dispatches receive and success actions', done => {
const params = {};
mock.onGet(state.dashboardEndpoint).reply(200, response);
fetchDashboard({ state, dispatch }, params)
.then(() => {
expect(dispatch).toHaveBeenCalledWith('requestMetricsDashboard');
expect(dispatch).toHaveBeenCalledWith('receiveMetricsDashboardSuccess', {
response,
params,
});
done();
})
.catch(done.fail);
});
it('dispatches failure action', done => {
const params = {};
mock.onGet(state.dashboardEndpoint).reply(500);
fetchDashboard({ state, dispatch }, params)
.then(() => {
expect(dispatch).toHaveBeenCalledWith(
'receiveMetricsDashboardFailure',
new Error('Request failed with status code 500'),
);
done();
})
.catch(done.fail);
});
});
describe('receiveMetricsDashboardSuccess', () => {
let commit;
let dispatch;
let state;
beforeEach(() => {
commit = jasmine.createSpy();
dispatch = jasmine.createSpy();
state = storeState();
});
it('stores groups ', () => {
const params = {};
const response = metricsDashboardResponse;
receiveMetricsDashboardSuccess({ state, commit, dispatch }, { response, params });
expect(commit).toHaveBeenCalledWith(
types.RECEIVE_METRICS_DATA_SUCCESS,
metricsDashboardResponse.dashboard.panel_groups,
);
expect(dispatch).toHaveBeenCalledWith('fetchPrometheusMetrics', params);
});
it('sets the dashboards loaded from the repository', () => {
const params = {};
const response = metricsDashboardResponse;
response.all_dashboards = dashboardGitResponse;
receiveMetricsDashboardSuccess({ state, commit, dispatch }, { response, params });
expect(commit).toHaveBeenCalledWith(types.SET_ALL_DASHBOARDS, dashboardGitResponse);
});
});
describe('receiveMetricsDashboardFailure', () => {
let commit;
beforeEach(() => {
commit = jasmine.createSpy();
});
it('commits failure action', () => {
receiveMetricsDashboardFailure({ commit });
expect(commit).toHaveBeenCalledWith(types.RECEIVE_METRICS_DATA_FAILURE, undefined);
});
it('commits failure action with error', () => {
receiveMetricsDashboardFailure({ commit }, 'uh-oh');
expect(commit).toHaveBeenCalledWith(types.RECEIVE_METRICS_DATA_FAILURE, 'uh-oh');
});
});
describe('fetchPrometheusMetrics', () => {
let commit;
let dispatch;
beforeEach(() => {
commit = jasmine.createSpy();
dispatch = jasmine.createSpy();
});
it('commits empty state when state.groups is empty', done => {
const state = storeState();
const params = {};
fetchPrometheusMetrics({ state, commit, dispatch }, params)
.then(() => {
expect(commit).toHaveBeenCalledWith(types.SET_NO_DATA_EMPTY_STATE);
expect(dispatch).not.toHaveBeenCalled();
done();
})
.catch(done.fail);
});
it('dispatches fetchPrometheusMetric for each panel query', done => {
const params = {};
const state = storeState();
state.dashboard.panel_groups = metricsDashboardResponse.dashboard.panel_groups;
const metric = state.dashboard.panel_groups[0].panels[0].metrics[0];
fetchPrometheusMetrics({ state, commit, dispatch }, params)
.then(() => {
expect(dispatch.calls.count()).toEqual(3);
expect(dispatch).toHaveBeenCalledWith('fetchPrometheusMetric', { metric, params });
done();
})
.catch(done.fail);
done();
});
});
describe('fetchPrometheusMetric', () => {
it('commits prometheus query result', done => {
const commit = jasmine.createSpy();
const params = {
start: '2019-08-06T12:40:02.184Z',
end: '2019-08-06T20:40:02.184Z',
};
const metric = metricsDashboardResponse.dashboard.panel_groups[0].panels[0].metrics[0];
const state = storeState();
const data = metricsGroupsAPIResponse[0].panels[0].metrics[0];
const response = { data };
mock.onGet('http://test').reply(200, response);
fetchPrometheusMetric({ state, commit }, { metric, params });
setTimeout(() => {
expect(commit).toHaveBeenCalledWith(types.SET_QUERY_RESULT, {
metricId: metric.metric_id,
result: data.result,
});
done();
});
});
});
});
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment