Commit 679e6805 authored by Stan Hu's avatar Stan Hu

Merge branch 'master' into ce-to-ee-2018-03-29

parents 6732f3fd 6aa5e3ff
...@@ -296,7 +296,7 @@ On each database node perform the following: ...@@ -296,7 +296,7 @@ On each database node perform the following:
# Replace XXX.XXX.XXX.XXX/YY with Network Address # Replace XXX.XXX.XXX.XXX/YY with Network Address
postgresql['trust_auth_cidr_addresses'] = %w(XXX.XXX.XXX.XXX/YY) postgresql['trust_auth_cidr_addresses'] = %w(XXX.XXX.XXX.XXX/YY)
repmgr['trust_auth_cidr_addresses'] = %w(XXX.XXX.XXX.XXX/YY) repmgr['trust_auth_cidr_addresses'] = %w(127.0.0.1/32 XXX.XXX.XXX.XXX/YY)
# Replace placeholders: # Replace placeholders:
# #
...@@ -537,6 +537,12 @@ Ensure that all migrations ran: ...@@ -537,6 +537,12 @@ Ensure that all migrations ran:
gitlab-rake gitlab:db:configure gitlab-rake gitlab:db:configure
``` ```
> **Note**: If you encounter a `rake aborted!` error stating that PGBouncer is failing to connect to
PostgreSQL it may be that your PGBouncer node's IP address is missing from
PostgreSQL's `trust_auth_cidr_addresses` in `gitlab.rb` on your database nodes. See
[PGBouncer error `ERROR: pgbouncer cannot connect to server`](#pgbouncer-error-error-pgbouncer-cannot-connect-to-server)
in the Troubleshooting section before proceeding.
#### Ensure GitLab is running #### Ensure GitLab is running
At this point, your GitLab instance should be up and running. Verify you are At this point, your GitLab instance should be up and running. Verify you are
...@@ -966,6 +972,34 @@ For PostgreSQL, it is usually safe to restart the master node by default. Automa ...@@ -966,6 +972,34 @@ For PostgreSQL, it is usually safe to restart the master node by default. Automa
On the consul server nodes, it is important to restart the consul service in a controlled fashion. Read our [consul documentation](consul.md#restarting-the-server-cluster) for instructions on how to restart the service. On the consul server nodes, it is important to restart the consul service in a controlled fashion. Read our [consul documentation](consul.md#restarting-the-server-cluster) for instructions on how to restart the service.
#### PGBouncer error `ERROR: pgbouncer cannot connect to server`
You may get this error when running `gitlab-rake gitlab:db:configure` or you
may see the error in the PGBouncer log file.
```
PG::ConnectionBad: ERROR: pgbouncer cannot connect to server
```
The problem may be that your PGBouncer node's IP address is not included in the
`trust_auth_cidr_addresses` setting in `/etc/gitlab/gitlab.rb` on the database nodes.
You can confirm that this is the issue by checking the PostgreSQL log on the master
database node. If you see the following error then `trust_auth_cidr_addresses`
is the problem.
```
2018-03-29_13:59:12.11776 FATAL: no pg_hba.conf entry for host "123.123.123.123", user "pgbouncer", database "gitlabhq_production", SSL off
```
To fix the problem, add the IP address to `/etc/gitlab/gitlab.rb`.
```
postgresql['trust_auth_cidr_addresses'] = %w(123.123.123.123/32 <other_cidrs>)
```
[Reconfigure GitLab] for the changes to take effect.
#### Issues with other components #### Issues with other components
If you're running into an issue with a component not outlined here, be sure to check the troubleshooting section of their specific documentation page. If you're running into an issue with a component not outlined here, be sure to check the troubleshooting section of their specific documentation page.
......
import axios from '~/lib/utils/axios_utils';
import * as types from './mutation_types';
export const setHeadBlobPath = ({ commit }, blobPath) => commit(types.SET_HEAD_BLOB_PATH, blobPath);
export const setBaseBlobPath = ({ commit }, blobPath) => commit(types.SET_BASE_BLOB_PATH, blobPath);
/**
* SAST
*/
export const setSastHeadPath = ({ commit }, path) => commit(types.SET_SAST_HEAD_PATH, path);
export const setSastBasePath = ({ commit }, path) => commit(types.SET_SAST_BASE_PATH, path);
export const requestSastReports = ({ commit }) => commit(types.REQUEST_SAST_REPORTS);
export const receiveSastReports = ({ commit }, response) =>
commit(types.RECEIVE_SAST_REPORTS, response);
export const receiveSastError = ({ commit }, error) =>
commit(types.RECEIVE_SAST_REPORTS_ERROR, error);
export const fetchSastReports = ({ state, dispatch }) => {
const base = state.sast.paths.base;
const head = state.sast.paths.head;
dispatch('requestSastReports');
Promise.all([
head ? axios.get(head) : Promise.resolve(),
base ? axios.get(base) : Promise.resolve(),
])
.then(values => {
dispatch('receiveSastReports', {
head: values[0] ? values[0].data : null,
base: values[1] ? values[1].data : null,
});
})
.catch(() => {
dispatch('receiveSastError');
});
};
/**
* SAST CONTAINER
*/
export const setSastContainerHeadPath = ({ commit }, path) =>
commit(types.SET_SAST_CONTAINER_HEAD_PATH, path);
export const setSastContainerBasePath = ({ commit }, path) =>
commit(types.SET_SAST_CONTAINER_BASE_PATH, path);
export const requestSastContainerReports = ({ commit }) =>
commit(types.REQUEST_SAST_CONTAINER_REPORTS);
export const receiveSastContainerReports = ({ commit }, response) =>
commit(types.RECEIVE_SAST_CONTAINER_REPORTS, response);
export const receiveSastContainerError = ({ commit }, error) =>
commit(types.RECEIVE_SAST_CONTAINER_ERROR, error);
export const fetchSastContainerReports = ({ state, dispatch }) => {
const base = state.sastContainer.paths.base;
const head = state.sastContainer.paths.head;
dispatch('requestSastContainerReports');
Promise.all([
head ? axios.get(head) : Promise.resolve(),
base ? axios.get(base) : Promise.resolve(),
])
.then(values => {
dispatch('receiveSastContainerReports', {
head: values[0] ? values[0].data : null,
base: values[1] ? values[1].data : null,
});
})
.catch(() => {
dispatch('receiveSastContainerError');
});
};
/**
* DAST
*/
export const setDastHeadPath = ({ commit }, path) => commit(types.SET_DAST_HEAD_PATH, path);
export const setDastBasePath = ({ commit }, path) => commit(types.SET_DAST_BASE_PATH, path);
export const requestDastReports = ({ commit }) => commit(types.REQUEST_DAST_REPORTS);
export const receiveDastReports = ({ commit }, response) =>
commit(types.RECEIVE_DAST_REPORTS, response);
export const receiveDastError = ({ commit }, error) => commit(types.RECEIVE_DAST_ERROR, error);
export const fetchDastReports = ({ state, dispatch }) => {
const base = state.dast.paths.base;
const head = state.dast.paths.head;
dispatch('requestDastReports');
Promise.all([
head ? axios.get(head) : Promise.resolve(),
base ? axios.get(base) : Promise.resolve(),
])
.then(values => {
dispatch('receiveDastReports', {
head: values[0] ? values[0].data : null,
base: values[1] ? values[1].data : null,
});
})
.catch(() => {
dispatch('receiveDastError');
});
};
/**
* DEPENDENCY SCANNING
*/
export const setDependencyScanningHeadPath = ({ commit }, path) =>
commit(types.SET_DEPENDENCY_SCANNING_HEAD_PATH, path);
export const setDependencyScanningBasePath = ({ commit }, path) =>
commit(types.SET_DEPENDENCY_SCANNING_BASE_PATH, path);
export const requestDependencyScanningReports = ({ commit }) =>
commit(types.REQUEST_DEPENDENCY_SCANNING_REPORTS);
export const receiveDependencyScanningReports = ({ commit }, response) =>
commit(types.RECEIVE_DEPENDENCY_SCANNING_REPORTS, response);
export const receiveDependencyScanningError = ({ commit }, error) =>
commit(types.RECEIVE_DEPENDENCY_SCANNING_ERROR, error);
export const fetchDependencyScanningReports = ({ state, dispatch }) => {
const base = state.dependencyScanning.paths.base;
const head = state.dependencyScanning.paths.head;
dispatch('requestDependencyScanningReports');
Promise.all([
head ? axios.get(head) : Promise.resolve(),
base ? axios.get(base) : Promise.resolve(),
])
.then(values => {
dispatch('receiveDependencyScanningReports', {
head: values[0] ? values[0].data : null,
base: values[1] ? values[1].data : null,
});
})
.catch(() => {
dispatch('receiveDependencyScanningError');
});
};
import { n__, s__ } from '~/locale';
import { textBuilder, statusIcon } from './utils';
export const groupedSastText = ({ sast }) =>
textBuilder(
'SAST',
sast.paths,
sast.newIssues.length,
sast.resolvedIssues.length,
sast.allIssues.length,
);
export const groupedSastContainerText = ({ sastContainer }) =>
textBuilder(
'Container scanning',
sastContainer.paths,
sastContainer.newIssues.length,
sastContainer.resolvedIssues.length,
);
export const groupedDastText = ({ dast }) =>
textBuilder('DAST', dast.paths, dast.newIssues.length, dast.resolvedIssues.length);
export const groupedDependencyText = ({ dependencyScanning }) =>
textBuilder(
'Dependency scanning',
dependencyScanning.paths,
dependencyScanning.newIssues.length,
dependencyScanning.resolvedIssues.length,
);
export const groupedSummaryText = (state, getters) => {
const { added, fixed } = state.summaryCounts;
// All reports returned error
if (getters.allReportsHaveError) {
return s__('ciReport|Security scanning failed loading any results');
}
// No base is present in any report
if (getters.noBaseInAllReports) {
if (added > 0) {
return n__(
'Security scanning was unable to compare existing and new vulnerabilities. It detected %d vulnerability',
'Security scanning was unable to compare existing and new vulnerabilities. It detected %d vulnerabilities',
added,
);
}
return s__(
'Security scanning was unable to compare existing and new vulnerabilities. It detected no vulnerabilities.',
);
}
const text = [s__('ciReport|Security scanning')];
if (getters.areReportsLoading) {
text.push('(in progress)');
}
if (added > 0 && fixed === 0) {
text.push(n__('detected %d new vulnerability', 'detected %d new vulnerabilities', added));
}
if (added > 0 && fixed > 0) {
text.push(
`${n__('detected %d new vulnerability', 'detected %d new vulnerabilities', added)} ${n__(
'and %d fixed vulnerability',
'and %d fixed vulnerabilities',
fixed,
)}`,
);
}
if (added === 0 && fixed > 0) {
text.push(n__('detected %d fixed vulnerability', 'detected %d fixed vulnerabilities', fixed));
}
if (added === 0 && fixed === 0) {
text.push(s__('detected no vulnerabilities'));
}
return text.join(' ');
};
export const sastStatusIcon = ({ sast }) => statusIcon(sast.hasError, sast.newIssues.length);
export const sastContainerStatusIcon = ({ sastContainer }) =>
statusIcon(sastContainer.hasError, sastContainer.newIssues.length);
export const dastStatusIcon = ({ dast }) => statusIcon(dast.hasError, dast.newIssues.length);
export const dependencyScanningStatusIcon = ({ dependencyScanning }) =>
statusIcon(dependencyScanning.hasError, dependencyScanning.newIssues.length);
export const areReportsLoading = state =>
state.sast.isLoading ||
state.dast.isLoading ||
state.sastContainer.isLoading ||
state.dependencyScanning.isLoading;
export const allReportsHaveError = state =>
state.sast.hasError &&
state.dast.hasError &&
state.sastContainer.hasError &&
state.dependencyScanning.hasError;
export const anyReportHasError = state =>
state.sast.hasError ||
state.dast.hasError ||
state.sastContainer.hasError ||
state.dependencyScanning.hasError;
export const noBaseInAllReports = state =>
!state.sast.paths.base &&
!state.dast.paths.base &&
!state.sastContainer.paths.base &&
!state.dependencyScanning.paths.base;
import Vue from 'vue';
import Vuex from 'vuex';
import * as actions from './actions';
import * as getters from './getters';
import mutations from './mutations';
import state from './state';
Vue.use(Vuex);
const store = new Vuex.Store({
actions,
getters,
mutations,
state: state(),
});
export default store;
export const SET_HEAD_BLOB_PATH = 'SET_HEAD_BLOB_PATH';
export const SET_BASE_BLOB_PATH = 'SET_BASE_BLOB_PATH';
// SAST
export const SET_SAST_HEAD_PATH = 'SET_SAST_HEAD_PATH';
export const SET_SAST_BASE_PATH = 'SET_SAST_BASE_PATH';
export const REQUEST_SAST_REPORTS = 'REQUEST_SAST_REPORTS';
export const RECEIVE_SAST_REPORTS = 'RECEIVE_SAST_REPORTS';
export const RECEIVE_SAST_REPORTS_ERROR = 'RECEIVE_SAST_REPORTS_ERROR';
// SAST CONTAINER
export const SET_SAST_CONTAINER_HEAD_PATH = 'SET_SAST_CONTAINER_HEAD_PATH';
export const SET_SAST_CONTAINER_BASE_PATH = 'SET_SAST_CONTAINER_BASE_PATH';
export const REQUEST_SAST_CONTAINER_REPORTS = 'REQUEST_SAST_CONTAINER_REPORTS';
export const RECEIVE_SAST_CONTAINER_REPORTS = 'RECEIVE_SAST_CONTAINER_REPORTS';
export const RECEIVE_SAST_CONTAINER_ERROR = 'RECEIVE_SAST_CONTAINER_ERROR';
// DAST
export const SET_DAST_HEAD_PATH = 'SET_DAST_HEAD_PATH';
export const SET_DAST_BASE_PATH = 'SET_DAST_BASE_PATH';
export const REQUEST_DAST_REPORTS = 'REQUEST_DAST_REPORTS';
export const RECEIVE_DAST_REPORTS = 'RECEIVE_DAST_REPORTS';
export const RECEIVE_DAST_ERROR = 'RECEIVE_DAST_ERROR';
// DEPENDENCY_SCANNING
export const SET_DEPENDENCY_SCANNING_HEAD_PATH = 'SET_DEPENDENCY_SCANNING_HEAD_PATH';
export const SET_DEPENDENCY_SCANNING_BASE_PATH = 'SET_DEPENDENCY_SCANNING_BASE_PATH';
export const REQUEST_DEPENDENCY_SCANNING_REPORTS = 'REQUEST_DEPENDENCY_SCANNING_REPORTS';
export const RECEIVE_DEPENDENCY_SCANNING_REPORTS = 'RECEIVE_DEPENDENCY_SCANNING_REPORTS';
export const RECEIVE_DEPENDENCY_SCANNING_ERROR = 'RECEIVE_DEPENDENCY_SCANNING_ERROR';
import * as types from './mutation_types';
import {
parseSastIssues,
filterByKey,
parseSastContainer,
parseDastIssues,
getUnapprovedVulnerabilities,
} from './utils';
export default {
[types.SET_HEAD_BLOB_PATH](state, path) {
Object.assign(state.blobPath, { head: path });
},
[types.SET_BASE_BLOB_PATH](state, path) {
Object.assign(state.blobPath, { base: path });
},
// SAST
[types.SET_SAST_HEAD_PATH](state, path) {
Object.assign(state.sast.paths, { head: path });
},
[types.SET_SAST_BASE_PATH](state, path) {
Object.assign(state.sast.paths, { base: path });
},
[types.REQUEST_SAST_REPORTS](state) {
Object.assign(state.sast, { isLoading: true });
},
/**
* Compares sast results and returns the formatted report
*
* Sast has 3 types of issues: newIssues, resolvedIssues and allIssues.
*
* When we have both base and head:
* - newIssues = head - base
* - resolvedIssues = base - head
* - allIssues = head - newIssues - resolvedIssues
*
* When we only have head
* - newIssues = head
* - resolvedIssues = 0
* - allIssues = 0
*/
[types.RECEIVE_SAST_REPORTS](state, reports) {
if (reports.base && reports.head) {
const filterKey = 'cve';
const parsedHead = parseSastIssues(reports.head, state.blobPath.head);
const parsedBase = parseSastIssues(reports.base, state.blobPath.base);
const newIssues = filterByKey(parsedHead, parsedBase, filterKey);
const resolvedIssues = filterByKey(parsedBase, parsedHead, filterKey);
const allIssues = filterByKey(parsedHead, newIssues.concat(resolvedIssues), filterKey);
Object.assign(state, {
sast: {
...state.sast,
newIssues,
resolvedIssues,
allIssues,
isLoading: false,
},
summaryCounts: {
added: state.summaryCounts.added + newIssues.length,
fixed: state.summaryCounts.fixed + resolvedIssues.length,
},
});
} else if (reports.head && !reports.base) {
const newIssues = parseSastIssues(reports.head, state.blobPath.head);
Object.assign(state.sast, {
newIssues,
isLoading: false,
});
}
},
[types.RECEIVE_SAST_REPORTS_ERROR](state) {
Object.assign(state.sast, {
isLoading: false,
hasError: true,
});
},
// SAST CONTAINER
[types.SET_SAST_CONTAINER_HEAD_PATH](state, path) {
Object.assign(state.sastContainer.paths, { head: path });
},
[types.SET_SAST_CONTAINER_BASE_PATH](state, path) {
Object.assign(state.sastContainer.paths, { base: path });
},
[types.REQUEST_SAST_CONTAINER_REPORTS](state) {
Object.assign(state.sastContainer, { isLoading: true });
},
/**
* For sast container we only render unapproved vulnerabilities.
*/
[types.RECEIVE_SAST_CONTAINER_REPORTS](state, reports) {
if (reports.base && reports.head) {
const headIssues = getUnapprovedVulnerabilities(
parseSastContainer(reports.head.vulnerabilities),
reports.head.unapproved,
);
const baseIssues = getUnapprovedVulnerabilities(
parseSastContainer(reports.base.vulnerabilities),
reports.base.unapproved,
);
const filterKey = 'vulnerability';
const newIssues = filterByKey(headIssues, baseIssues, filterKey);
const resolvedIssues = filterByKey(baseIssues, headIssues, filterKey);
Object.assign(state, {
sastContainer: {
...state.sastContainer,
isLoading: false,
newIssues,
resolvedIssues,
},
summaryCounts: {
added: state.summaryCounts.added + newIssues.length,
fixed: state.summaryCounts.fixed + resolvedIssues.length,
},
});
} else if (reports.head && !reports.base) {
Object.assign(state.sastContainer, {
isLoading: false,
newIssues: getUnapprovedVulnerabilities(
parseSastContainer(reports.head.vulnerabilities),
reports.head.unapproved,
),
});
}
},
[types.RECEIVE_SAST_CONTAINER_ERROR](state) {
Object.assign(state.sastContainer, {
isLoading: false,
hasError: true,
});
},
// DAST
[types.SET_DAST_HEAD_PATH](state, path) {
Object.assign(state.dast.paths, { head: path });
},
[types.SET_DAST_BASE_PATH](state, path) {
Object.assign(state.dast.paths, { base: path });
},
[types.REQUEST_DAST_REPORTS](state) {
Object.assign(state.dast, { isLoading: true });
},
[types.RECEIVE_DAST_REPORTS](state, reports) {
if (reports.head && reports.base) {
const headIssues = parseDastIssues(reports.head.site.alerts);
const baseIssues = parseDastIssues(reports.base.site.alerts);
const filterKey = 'pluginid';
const newIssues = filterByKey(headIssues, baseIssues, filterKey);
const resolvedIssues = filterByKey(baseIssues, headIssues, filterKey);
Object.assign(state, {
dast: {
...state.dast,
isLoading: false,
newIssues,
resolvedIssues,
},
summaryCounts: {
added: state.summaryCounts.added + newIssues.length,
fixed: state.summaryCounts.fixed + resolvedIssues.length,
},
});
} else if (reports.head && !reports.base) {
Object.assign(state.dast, {
isLoading: false,
newIssues: parseDastIssues(reports.head.site.alerts),
});
}
},
[types.RECEIVE_DAST_ERROR](state) {
Object.assign(state.dast, {
isLoading: false,
hasError: true,
});
},
// DEPENDECY SCANNING
[types.SET_DEPENDENCY_SCANNING_HEAD_PATH](state, path) {
Object.assign(state.dependencyScanning.paths, { head: path });
},
[types.SET_DEPENDENCY_SCANNING_BASE_PATH](state, path) {
Object.assign(state.dependencyScanning.paths, { base: path });
},
[types.REQUEST_DEPENDENCY_SCANNING_REPORTS](state) {
Object.assign(state.dependencyScanning, { isLoading: true });
},
/**
* Compares dependency scanning results and returns the formatted report
*
* Dependency report has 3 types of issues, newIssues, resolvedIssues and allIssues.
*
* When we have both base and head:
* - newIssues = head - base
* - resolvedIssues = base - head
* - allIssues = head - newIssues - resolvedIssues
*
* When we only have head
* - newIssues = head
* - resolvedIssues = 0
* - allIssues = 0
*/
[types.RECEIVE_DEPENDENCY_SCANNING_REPORTS](state, reports) {
if (reports.base && reports.head) {
const filterKey = 'cve';
const parsedHead = parseSastIssues(reports.head, state.blobPath.head);
const parsedBase = parseSastIssues(reports.base, state.blobPath.base);
const newIssues = filterByKey(parsedHead, parsedBase, filterKey);
const resolvedIssues = filterByKey(parsedBase, parsedHead, filterKey);
const allIssues = filterByKey(parsedHead, newIssues.concat(resolvedIssues), filterKey);
Object.assign(state, {
dependencyScanning: {
...state.dependencyScanning,
newIssues,
resolvedIssues,
allIssues,
isLoading: false,
},
summaryCounts: {
added: state.summaryCounts.added + newIssues.length,
fixed: state.summaryCounts.fixed + resolvedIssues.length,
},
});
} else {
Object.assign(state.dependencyScanning, {
newIssues: parseSastIssues(reports.head, state.blobPath.head),
isLoading: false,
});
}
},
[types.RECEIVE_DEPENDENCY_SCANNING_ERROR](state) {
Object.assign(state.dependencyScanning, {
isLoading: false,
hasError: true,
});
},
};
export default () => ({
summaryCounts: {
added: 0,
fixed: 0,
},
blobPath: {
head: null,
base: null,
},
sast: {
paths: {
head: null,
base: null,
},
isLoading: false,
hasError: false,
newIssues: [],
resolvedIssues: [],
allIssues: [],
},
sastContainer: {
paths: {
head: null,
base: null,
},
isLoading: false,
hasError: false,
newIssues: [],
resolvedIssues: [],
},
dast: {
paths: {
head: null,
base: null,
},
isLoading: false,
hasError: false,
newIssues: [],
resolvedIssues: [],
},
dependencyScanning: {
paths: {
head: null,
base: null,
},
isLoading: false,
hasError: false,
newIssues: [],
resolvedIssues: [],
allIssues: [],
},
});
import { stripHtml } from '~/lib/utils/text_utility';
import { n__, s__, sprintf } from '~/locale';
/**
* Maps SAST & Dependency scanning issues:
* { tool: String, message: String, url: String , cve: String ,
* file: String , solution: String, priority: String }
* to contain:
* { name: String, path: String, line: String, urlPath: String, priority: String }
* @param {Array} issues
* @param {String} path
*/
export const parseSastIssues = (issues = [], path = '') =>
issues.map(issue => ({
...issue,
name: issue.message,
path: issue.file,
urlPath: issue.line ? `${path}/${issue.file}#L${issue.line}` : `${path}/${issue.file}`,
}),
);
/**
* Parses Sast Container results into a common format to allow to use the same Vue component
* And adds an external link
*
* @param {Array} data
* @returns {Array}
*/
export const parseSastContainer = (data = []) =>
data.map(element => ({
...element,
name: element.vulnerability,
priority: element.severity,
path: element.namespace,
// external link to provide better description
nameLink: `https://cve.mitre.org/cgi-bin/cvename.cgi?name=${element.vulnerability}`,
}));
export const parseDastIssues = (issues = []) =>
issues.map(issue => ({
parsedDescription: stripHtml(issue.desc, ' '),
priority: issue.riskdesc,
...issue,
}));
/**
* Compares two arrays by the given key and returns the difference
*
* @param {Array} firstArray
* @param {Array} secondArray
* @param {String} key
* @returns {Array}
*/
export const filterByKey = (firstArray = [], secondArray = [], key = '') =>
firstArray.filter(item => !secondArray.find(el => el[key] === item[key]));
export const getUnapprovedVulnerabilities = (issues = [], unapproved = []) =>
issues.filter(item => unapproved.find(el => el === item.vulnerability));
export const textBuilder = (
type = '',
paths = {},
newIssues = 0,
resolvedIssues = 0,
allIssues = 0,
) => {
// With no issues
if (newIssues === 0 && resolvedIssues === 0 && allIssues === 0) {
return sprintf(s__('ciReport|%{type} detected no security vulnerabilities'), { type });
}
// with no new or fixed but with vulnerabilities
if (newIssues === 0 && resolvedIssues === 0 && allIssues) {
return sprintf(s__('ciReport|%{type} detected no new security vulnerabilities'), { type });
}
// with new issues and only head
if (newIssues > 0 && !paths.base) {
return sprintf(
n__(
'%{type} was unable to compare existing and new vulnerabilities. It detected %d vulnerability',
'%{type} was unable to compare existing and new vulnerabilities. It detected %d vulnerabilities',
newIssues,
),
{ type },
);
}
// with head + base
if (paths.base && paths.head) {
// with only new issues
if (newIssues > 0 && resolvedIssues === 0) {
return sprintf(
n__(
'%{type} detected %d new vulnerability',
'%{type} detected %d new vulnerabilities',
newIssues,
),
{ type },
);
}
// with new and fixed issues
if (newIssues > 0 && resolvedIssues > 0) {
return `${sprintf(
n__(
'%{type} detected %d new vulnerability',
'%{type} detected %d new vulnerabilities',
newIssues,
),
{ type },
)}
${n__('and %d fixed vulnerability', 'and %d fixed vulnerabilities', resolvedIssues)}`;
}
// with only fixed issues
if (newIssues === 0 && resolvedIssues > 0) {
return sprintf(
n__(
'%{type} detected %d fixed vulnerability',
'%{type} detected %d fixed vulnerabilities',
resolvedIssues,
),
{ type },
);
}
}
return '';
};
export const statusIcon = (failed = false, newIssues = 0, neutralIssues = 0) => {
if (failed || newIssues > 0 || neutralIssues > 0) {
return 'warning';
}
return 'success';
};
...@@ -20,7 +20,8 @@ class PgReplicationSlot ...@@ -20,7 +20,8 @@ class PgReplicationSlot
# http://bdr-project.org/docs/stable/monitoring-peers.html # http://bdr-project.org/docs/stable/monitoring-peers.html
def self.slots_retained_bytes def self.slots_retained_bytes
ActiveRecord::Base.connection.execute(<<-SQL.squish) ActiveRecord::Base.connection.execute(<<-SQL.squish)
SELECT slot_name, database, active, pg_xlog_location_diff(pg_current_xlog_insert_location(), restart_lsn) SELECT slot_name, database,
active, #{Gitlab::Database.pg_wal_lsn_diff}(#{Gitlab::Database.pg_current_wal_insert_lsn}(), restart_lsn)
AS retained_bytes AS retained_bytes
FROM pg_replication_slots; FROM pg_replication_slots;
SQL SQL
...@@ -30,7 +31,7 @@ class PgReplicationSlot ...@@ -30,7 +31,7 @@ class PgReplicationSlot
# returns the max number WAL space (in bytes) being used across the replication slots # returns the max number WAL space (in bytes) being used across the replication slots
def self.max_retained_wal def self.max_retained_wal
ActiveRecord::Base.connection.execute(<<-SQL.squish) ActiveRecord::Base.connection.execute(<<-SQL.squish)
SELECT COALESCE(MAX(pg_xlog_location_diff(pg_current_xlog_insert_location(), restart_lsn)), 0) SELECT COALESCE(MAX(#{Gitlab::Database.pg_wal_lsn_diff}(#{Gitlab::Database.pg_current_wal_insert_lsn}(), restart_lsn)), 0)
FROM pg_replication_slots; FROM pg_replication_slots;
SQL SQL
.first.fetch('coalesce').to_i .first.fetch('coalesce').to_i
......
...@@ -63,7 +63,7 @@ module Gitlab ...@@ -63,7 +63,7 @@ module Gitlab
def data_is_recent_enough? def data_is_recent_enough?
# It's possible for a replica to not replay WAL data for a while, # It's possible for a replica to not replay WAL data for a while,
# despite being up to date. This can happen when a primary does not # despite being up to date. This can happen when a primary does not
# receive any writes a for a while. # receive any writes for a while.
# #
# To prevent this from happening we check if the lag size (in bytes) # To prevent this from happening we check if the lag size (in bytes)
# of the replica is small enough for the replica to be useful. We # of the replica is small enough for the replica to be useful. We
...@@ -92,7 +92,10 @@ module Gitlab ...@@ -92,7 +92,10 @@ module Gitlab
# This method will return nil if no lag size could be calculated. # This method will return nil if no lag size could be calculated.
def replication_lag_size def replication_lag_size
location = connection.quote(primary_write_location) location = connection.quote(primary_write_location)
row = query_and_release("SELECT pg_xlog_location_diff(#{location}, pg_last_xlog_replay_location())::float AS diff") row = query_and_release(<<-SQL.squish)
SELECT #{Gitlab::Database.pg_wal_lsn_diff}(#{location}, #{Gitlab::Database.pg_last_wal_replay_lsn}())::float
AS diff
SQL
row['diff'].to_i if row.any? row['diff'].to_i if row.any?
end end
...@@ -110,11 +113,14 @@ module Gitlab ...@@ -110,11 +113,14 @@ module Gitlab
def caught_up?(location) def caught_up?(location)
string = connection.quote(location) string = connection.quote(location)
# In case the host is a primary pg_last_xlog_replay_location() returns # In case the host is a primary pg_last_wal_replay_lsn/pg_last_xlog_replay_location() returns
# NULL. The recovery check ensures we treat the host as up-to-date in # NULL. The recovery check ensures we treat the host as up-to-date in
# such a case. # such a case.
query = "SELECT NOT pg_is_in_recovery() OR " \ query = <<-SQL.squish
"pg_xlog_location_diff(pg_last_xlog_replay_location(), #{string}) >= 0 AS result" SELECT NOT pg_is_in_recovery()
OR #{Gitlab::Database.pg_wal_lsn_diff}(#{Gitlab::Database.pg_last_wal_replay_lsn}(), #{string}) >= 0
AS result
SQL
row = query_and_release(query) row = query_and_release(query)
......
...@@ -89,7 +89,7 @@ module Gitlab ...@@ -89,7 +89,7 @@ module Gitlab
def primary_write_location def primary_write_location
read_write do |connection| read_write do |connection|
row = connection row = connection
.select_all('SELECT pg_current_xlog_insert_location()::text AS location') .select_all("SELECT #{Gitlab::Database.pg_current_wal_insert_lsn}()::text AS location")
.first .first
if row if row
......
...@@ -76,14 +76,15 @@ module Gitlab ...@@ -76,14 +76,15 @@ module Gitlab
def self.db_replication_lag_seconds def self.db_replication_lag_seconds
# Obtain the replication lag in seconds # Obtain the replication lag in seconds
lag = lag =
ActiveRecord::Base.connection.execute(' ActiveRecord::Base.connection.execute(<<-SQL.squish)
SELECT CASE SELECT CASE
WHEN pg_last_xlog_receive_location() = pg_last_xlog_replay_location() WHEN #{Gitlab::Database.pg_last_wal_receive_lsn}() = #{Gitlab::Database.pg_last_wal_receive_lsn}()
THEN 0 THEN 0
ELSE ELSE
EXTRACT (EPOCH FROM now() - pg_last_xact_replay_timestamp())::INTEGER EXTRACT (EPOCH FROM now() - pg_last_xact_replay_timestamp())::INTEGER
END END
AS replication_lag') AS replication_lag
SQL
.first .first
.fetch('replication_lag') .fetch('replication_lag')
......
require 'spec_helper' require 'spec_helper'
describe 'Project settings > [EE] Merge Requests', :js do feature 'Project settings > Issues', :js do
include GitlabRoutingHelper let(:project) { create(:project, :public) }
let(:user) { create(:user) } let(:user) { create(:user) }
let(:project) { create(:project, approvals_before_merge: 1) }
before do background do
gitlab_sign_in(user)
project.add_master(user) project.add_master(user)
sign_in(user)
end
context 'when Issues are initially enabled' do
context 'when Pipelines are initially enabled' do
before do
visit edit_project_path(project)
end
scenario 'shows the Issues settings' do
expect(page).to have_content('Customize your issue restrictions')
within('.sharing-permissions-form') do
find('.project-feature-controls[data-for="project[project_feature_attributes][issues_access_level]"] .project-feature-toggle').click
click_on('Save changes')
end
expect(page).not_to have_content('Customize your issue restrictions')
end
end
end
context 'when Issues are initially disabled' do
before do
project.project_feature.update_attribute('issues_access_level', ProjectFeature::DISABLED)
visit edit_project_path(project)
end
scenario 'does not show the Issues settings' do
expect(page).not_to have_content('Customize your issue restrictions')
within('.sharing-permissions-form') do
find('.project-feature-controls[data-for="project[project_feature_attributes][issues_access_level]"] .project-feature-toggle').click
click_on('Save changes')
end
expect(page).to have_content('Customize your issue restrictions')
end
end end
context 'issuable default templates feature not available' do context 'issuable default templates feature not available' do
......
...@@ -7,9 +7,11 @@ describe PgReplicationSlot, :postgresql do ...@@ -7,9 +7,11 @@ describe PgReplicationSlot, :postgresql do
expect(described_class.max_replication_slots).to be >= 0 expect(described_class.max_replication_slots).to be >= 0
end end
skip = PgReplicationSlot.max_replication_slots <= PgReplicationSlot.count skip_examples = PgReplicationSlot.max_replication_slots <= PgReplicationSlot.count
context 'with enough slots available', skip: (skip ? 'max_replication_slots too small' : nil) do context 'with enough slots available' do
before(:all) do before(:all) do
skip('max_replication_slots too small') if skip_examples
@current_slot_count = @current_slot_count =
ActiveRecord::Base.connection.execute("SELECT COUNT(*) FROM pg_replication_slots;") ActiveRecord::Base.connection.execute("SELECT COUNT(*) FROM pg_replication_slots;")
.first.fetch('count').to_i .first.fetch('count').to_i
...@@ -21,8 +23,10 @@ describe PgReplicationSlot, :postgresql do ...@@ -21,8 +23,10 @@ describe PgReplicationSlot, :postgresql do
end end
after(:all) do after(:all) do
unless skip_examples
ActiveRecord::Base.connection.execute("SELECT pg_drop_replication_slot('test_slot');") ActiveRecord::Base.connection.execute("SELECT pg_drop_replication_slot('test_slot');")
end end
end
it '#slots_count' do it '#slots_count' do
expect(described_class.count).to eq(@current_slot_count + 1) expect(described_class.count).to eq(@current_slot_count + 1)
......
require 'spec_helper' require 'spec_helper'
describe Boards::DestroyService, services: true do describe Boards::DestroyService do
describe '#execute' do describe '#execute' do
let(:project) { create(:project) }
let(:group) { create(:group) } let(:group) { create(:group) }
let!(:board) { create(:board, group: group) }
subject(:service) { described_class.new(group, double) } shared_examples 'remove the board' do |parent_name|
let(:parent) { public_send(parent_name) }
let!(:board) { create(:board, parent_name => parent) }
context 'when group have more than one board' do subject(:service) { described_class.new(parent, double) }
it 'removes board from group' do
create(:board, group: group)
expect { service.execute(board) }.to change(group.boards, :count).by(-1) context "when #{parent_name} have more than one board" do
it "removes board from #{parent_name}" do
create(:board, parent_name => parent)
expect { service.execute(board) }.to change(parent.boards, :count).by(-1)
end end
end end
context 'when group have one board' do context "when #{parent_name} have one board" do
it 'does not remove board from group' do it "does not remove board from #{parent_name}" do
expect { service.execute(board) }.not_to change(group.boards, :count) expect { service.execute(board) }.not_to change(group.boards, :count)
end end
end end
end end
it_behaves_like 'remove the board', :group
it_behaves_like 'remove the board', :project
end
end end
...@@ -46,6 +46,10 @@ module Gitlab ...@@ -46,6 +46,10 @@ module Gitlab
database_version.match(/\A(?:PostgreSQL |)([^\s]+).*\z/)[1] database_version.match(/\A(?:PostgreSQL |)([^\s]+).*\z/)[1]
end end
def self.postgresql_9_or_less?
postgresql? && version.to_f < 10
end
def self.join_lateral_supported? def self.join_lateral_supported?
postgresql? && version.to_f >= 9.3 postgresql? && version.to_f >= 9.3
end end
...@@ -58,6 +62,24 @@ module Gitlab ...@@ -58,6 +62,24 @@ module Gitlab
postgresql? && version.to_f >= 9.6 postgresql? && version.to_f >= 9.6
end end
# map some of the function names that changed between PostgreSQL 9 and 10
# https://wiki.postgresql.org/wiki/New_in_postgres_10
def self.pg_wal_lsn_diff
Gitlab::Database.postgresql_9_or_less? ? 'pg_xlog_location_diff' : 'pg_wal_lsn_diff'
end
def self.pg_current_wal_insert_lsn
Gitlab::Database.postgresql_9_or_less? ? 'pg_current_xlog_insert_location' : 'pg_current_wal_insert_lsn'
end
def self.pg_last_wal_receive_lsn
Gitlab::Database.postgresql_9_or_less? ? 'pg_last_xlog_receive_location' : 'pg_last_wal_receive_lsn'
end
def self.pg_last_wal_replay_lsn
Gitlab::Database.postgresql_9_or_less? ? 'pg_last_xlog_replay_location' : 'pg_last_wal_replay_lsn'
end
def self.nulls_last_order(field, direction = 'ASC') def self.nulls_last_order(field, direction = 'ASC')
order = "#{field} #{direction}" order = "#{field} #{direction}"
......
require 'spec_helper'
feature 'Project settings > Issues', :js do
let(:project) { create(:project, :public) }
let(:user) { create(:user) }
background do
project.add_master(user)
sign_in(user)
end
context 'when Issues are initially enabled' do
context 'when Pipelines are initially enabled' do
before do
visit edit_project_path(project)
end
scenario 'shows the Issues settings' do
expect(page).to have_content('Customize your issue restrictions')
within('.sharing-permissions-form') do
find('.project-feature-controls[data-for="project[project_feature_attributes][issues_access_level]"] .project-feature-toggle').click
click_on('Save changes')
end
expect(page).not_to have_content('Customize your issue restrictions')
end
end
end
context 'when Issues are initially disabled' do
before do
project.project_feature.update_attribute('issues_access_level', ProjectFeature::DISABLED)
visit edit_project_path(project)
end
scenario 'does not show the Issues settings' do
expect(page).not_to have_content('Customize your issue restrictions')
within('.sharing-permissions-form') do
find('.project-feature-controls[data-for="project[project_feature_attributes][issues_access_level]"] .project-feature-toggle').click
click_on('Save changes')
end
expect(page).to have_content('Customize your issue restrictions')
end
end
end
export const baseIssues = [ export const baseIssues = [
{ {
categories: ['Security'], categories: ['Security'],
...@@ -53,7 +52,8 @@ export const sastIssues = [ ...@@ -53,7 +52,8 @@ export const sastIssues = [
url: 'https://groups.google.com/forum/#!topic/rubyonrails-security/335P1DcLG00', url: 'https://groups.google.com/forum/#!topic/rubyonrails-security/335P1DcLG00',
cve: 'CVE-2016-0752', cve: 'CVE-2016-0752',
file: 'Gemfile.lock', file: 'Gemfile.lock',
solution: 'upgrade to >= 5.0.0.beta1.1, >= 4.2.5.1, ~> 4.2.5, >= 4.1.14.1, ~> 4.1.14, ~> 3.2.22.1', solution:
'upgrade to >= 5.0.0.beta1.1, >= 4.2.5.1, ~> 4.2.5, >= 4.1.14.1, ~> 4.1.14, ~> 3.2.22.1',
}, },
{ {
tool: 'bundler_audit', tool: 'bundler_audit',
...@@ -61,7 +61,8 @@ export const sastIssues = [ ...@@ -61,7 +61,8 @@ export const sastIssues = [
url: 'https://groups.google.com/forum/#!topic/rubyonrails-security/9oLY_FCzvoc', url: 'https://groups.google.com/forum/#!topic/rubyonrails-security/9oLY_FCzvoc',
cve: 'CVE-2016-0751', cve: 'CVE-2016-0751',
file: 'Gemfile.lock', file: 'Gemfile.lock',
solution: 'upgrade to >= 5.0.0.beta1.1, >= 4.2.5.1, ~> 4.2.5, >= 4.1.14.1, ~> 4.1.14, ~> 3.2.22.1', solution:
'upgrade to >= 5.0.0.beta1.1, >= 4.2.5.1, ~> 4.2.5, >= 4.1.14.1, ~> 4.1.14, ~> 3.2.22.1',
}, },
]; ];
...@@ -72,7 +73,8 @@ export const sastIssuesBase = [ ...@@ -72,7 +73,8 @@ export const sastIssuesBase = [
url: 'https://groups.google.com/forum/#!topic/rubyonrails-security/335P1DcLG00', url: 'https://groups.google.com/forum/#!topic/rubyonrails-security/335P1DcLG00',
cve: 'CVE-2016-9999', cve: 'CVE-2016-9999',
file: 'Gemfile.lock', file: 'Gemfile.lock',
solution: 'upgrade to >= 5.0.0.beta1.1, >= 4.2.5.1, ~> 4.2.5, >= 4.1.14.1, ~> 4.1.14, ~> 3.2.22.1', solution:
'upgrade to >= 5.0.0.beta1.1, >= 4.2.5.1, ~> 4.2.5, >= 4.1.14.1, ~> 4.1.14, ~> 3.2.22.1',
}, },
{ {
tool: 'bundler_audit', tool: 'bundler_audit',
...@@ -80,7 +82,8 @@ export const sastIssuesBase = [ ...@@ -80,7 +82,8 @@ export const sastIssuesBase = [
url: 'https://groups.google.com/forum/#!topic/rubyonrails-security/335P1DcLG00', url: 'https://groups.google.com/forum/#!topic/rubyonrails-security/335P1DcLG00',
cve: 'CVE-2016-0752', cve: 'CVE-2016-0752',
file: 'Gemfile.lock', file: 'Gemfile.lock',
solution: 'upgrade to >= 5.0.0.beta1.1, >= 4.2.5.1, ~> 4.2.5, >= 4.1.14.1, ~> 4.1.14, ~> 3.2.22.1', solution:
'upgrade to >= 5.0.0.beta1.1, >= 4.2.5.1, ~> 4.2.5, >= 4.1.14.1, ~> 4.1.14, ~> 3.2.22.1',
}, },
]; ];
...@@ -102,7 +105,8 @@ export const parsedSastIssuesStore = [ ...@@ -102,7 +105,8 @@ export const parsedSastIssuesStore = [
url: 'https://groups.google.com/forum/#!topic/rubyonrails-security/335P1DcLG00', url: 'https://groups.google.com/forum/#!topic/rubyonrails-security/335P1DcLG00',
cve: 'CVE-2016-0752', cve: 'CVE-2016-0752',
file: 'Gemfile.lock', file: 'Gemfile.lock',
solution: 'upgrade to >= 5.0.0.beta1.1, >= 4.2.5.1, ~> 4.2.5, >= 4.1.14.1, ~> 4.1.14, ~> 3.2.22.1', solution:
'upgrade to >= 5.0.0.beta1.1, >= 4.2.5.1, ~> 4.2.5, >= 4.1.14.1, ~> 4.1.14, ~> 3.2.22.1',
name: 'Possible Information Leak Vulnerability in Action View', name: 'Possible Information Leak Vulnerability in Action View',
path: 'Gemfile.lock', path: 'Gemfile.lock',
urlPath: 'path/Gemfile.lock', urlPath: 'path/Gemfile.lock',
...@@ -113,7 +117,8 @@ export const parsedSastIssuesStore = [ ...@@ -113,7 +117,8 @@ export const parsedSastIssuesStore = [
url: 'https://groups.google.com/forum/#!topic/rubyonrails-security/9oLY_FCzvoc', url: 'https://groups.google.com/forum/#!topic/rubyonrails-security/9oLY_FCzvoc',
cve: 'CVE-2016-0751', cve: 'CVE-2016-0751',
file: 'Gemfile.lock', file: 'Gemfile.lock',
solution: 'upgrade to >= 5.0.0.beta1.1, >= 4.2.5.1, ~> 4.2.5, >= 4.1.14.1, ~> 4.1.14, ~> 3.2.22.1', solution:
'upgrade to >= 5.0.0.beta1.1, >= 4.2.5.1, ~> 4.2.5, >= 4.1.14.1, ~> 4.1.14, ~> 3.2.22.1',
name: 'Possible Object Leak and Denial of Service attack in Action Pack', name: 'Possible Object Leak and Denial of Service attack in Action Pack',
path: 'Gemfile.lock', path: 'Gemfile.lock',
urlPath: 'path/Gemfile.lock', urlPath: 'path/Gemfile.lock',
...@@ -138,42 +143,67 @@ export const parsedSastIssuesHead = [ ...@@ -138,42 +143,67 @@ export const parsedSastIssuesHead = [
url: 'https://groups.google.com/forum/#!topic/rubyonrails-security/9oLY_FCzvoc', url: 'https://groups.google.com/forum/#!topic/rubyonrails-security/9oLY_FCzvoc',
cve: 'CVE-2016-0751', cve: 'CVE-2016-0751',
file: 'Gemfile.lock', file: 'Gemfile.lock',
solution: 'upgrade to >= 5.0.0.beta1.1, >= 4.2.5.1, ~> 4.2.5, >= 4.1.14.1, ~> 4.1.14, ~> 3.2.22.1', solution:
'upgrade to >= 5.0.0.beta1.1, >= 4.2.5.1, ~> 4.2.5, >= 4.1.14.1, ~> 4.1.14, ~> 3.2.22.1',
name: 'Possible Object Leak and Denial of Service attack in Action Pack', name: 'Possible Object Leak and Denial of Service attack in Action Pack',
path: 'Gemfile.lock', path: 'Gemfile.lock',
urlPath: 'path/Gemfile.lock', urlPath: 'path/Gemfile.lock',
}, },
]; ];
export const parsedSastBaseStore = [{ export const parsedSastBaseStore = [
{
name: 'Test Information Leak Vulnerability in Action View', name: 'Test Information Leak Vulnerability in Action View',
tool: 'bundler_audit', tool: 'bundler_audit',
message: 'Test Information Leak Vulnerability in Action View', message: 'Test Information Leak Vulnerability in Action View',
url: 'https://groups.google.com/forum/#!topic/rubyonrails-security/335P1DcLG00', url: 'https://groups.google.com/forum/#!topic/rubyonrails-security/335P1DcLG00',
cve: 'CVE-2016-9999', cve: 'CVE-2016-9999',
file: 'Gemfile.lock', file: 'Gemfile.lock',
solution: 'upgrade to >= 5.0.0.beta1.1, >= 4.2.5.1, ~> 4.2.5, >= 4.1.14.1, ~> 4.1.14, ~> 3.2.22.1', solution:
'upgrade to >= 5.0.0.beta1.1, >= 4.2.5.1, ~> 4.2.5, >= 4.1.14.1, ~> 4.1.14, ~> 3.2.22.1',
path: 'Gemfile.lock', path: 'Gemfile.lock',
urlPath: 'path/Gemfile.lock', urlPath: 'path/Gemfile.lock',
}]; },
];
export const allIssuesParsed = [{ export const allIssuesParsed = [
{
name: 'Possible Information Leak Vulnerability in Action View', name: 'Possible Information Leak Vulnerability in Action View',
tool: 'bundler_audit', tool: 'bundler_audit',
message: 'Possible Information Leak Vulnerability in Action View', message: 'Possible Information Leak Vulnerability in Action View',
url: 'https://groups.google.com/forum/#!topic/rubyonrails-security/335P1DcLG00', url: 'https://groups.google.com/forum/#!topic/rubyonrails-security/335P1DcLG00',
cve: 'CVE-2016-0752', cve: 'CVE-2016-0752',
file: 'Gemfile.lock', file: 'Gemfile.lock',
solution: 'upgrade to >= 5.0.0.beta1.1, >= 4.2.5.1, ~> 4.2.5, >= 4.1.14.1, ~> 4.1.14, ~> 3.2.22.1', solution:
'upgrade to >= 5.0.0.beta1.1, >= 4.2.5.1, ~> 4.2.5, >= 4.1.14.1, ~> 4.1.14, ~> 3.2.22.1',
path: 'Gemfile.lock', path: 'Gemfile.lock',
urlPath: 'path/Gemfile.lock', urlPath: 'path/Gemfile.lock',
}]; },
];
export const dockerReport = { export const dockerReport = {
unapproved: [ unapproved: ['CVE-2017-12944', 'CVE-2017-16232'],
'CVE-2017-12944', vulnerabilities: [
'CVE-2017-16232', {
vulnerability: 'CVE-2017-12944',
namespace: 'debian:8',
severity: 'Medium',
},
{
vulnerability: 'CVE-2017-16232',
namespace: 'debian:8',
severity: 'Negligible',
},
{
vulnerability: 'CVE-2014-8130',
namespace: 'debian:8',
severity: 'Negligible',
},
], ],
};
export const dockerBaseReport = {
unapproved: ['CVE-2017-12944'],
vulnerabilities: [ vulnerabilities: [
{ {
vulnerability: 'CVE-2017-12944', vulnerability: 'CVE-2017-12944',
...@@ -193,6 +223,39 @@ export const dockerReport = { ...@@ -193,6 +223,39 @@ export const dockerReport = {
], ],
}; };
export const dockerNewIssues = [
{
vulnerability: 'CVE-2017-16232',
namespace: 'debian:8',
severity: 'Negligible',
name: 'CVE-2017-16232',
priority: 'Negligible',
path: 'debian:8',
nameLink: 'https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2017-16232',
},
];
export const dockerOnlyHeadParsed = [
{
vulnerability: 'CVE-2017-12944',
namespace: 'debian:8',
severity: 'Medium',
name: 'CVE-2017-12944',
priority: 'Medium',
path: 'debian:8',
nameLink: 'https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2017-12944',
},
{
vulnerability: 'CVE-2017-16232',
namespace: 'debian:8',
severity: 'Negligible',
name: 'CVE-2017-16232',
priority: 'Negligible',
path: 'debian:8',
nameLink: 'https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2017-16232',
},
];
export const dockerReportParsed = { export const dockerReportParsed = {
unapproved: [ unapproved: [
{ {
...@@ -264,16 +327,19 @@ export const dast = { ...@@ -264,16 +327,19 @@ export const dast = {
riskcode: '1', riskcode: '1',
riskdesc: 'Low (Medium)', riskdesc: 'Low (Medium)',
desc: '<p>No Anti-CSRF tokens were found in a HTML submission form.</p>', desc: '<p>No Anti-CSRF tokens were found in a HTML submission form.</p>',
pluginid: '123',
instances: [ instances: [
{ {
uri: 'http://192.168.32.236:3001/explore?sort=latest_activity_desc', uri: 'http://192.168.32.236:3001/explore?sort=latest_activity_desc',
method: 'GET', method: 'GET',
evidence: '<form class=\'navbar-form\' action=\'/search\' accept-charset=\'UTF-8\' method=\'get\'>', evidence:
"<form class='navbar-form' action='/search' accept-charset='UTF-8' method='get'>",
}, },
{ {
uri: 'http://192.168.32.236:3001/help/user/group/subgroups/index.md', uri: 'http://192.168.32.236:3001/help/user/group/subgroups/index.md',
method: 'GET', method: 'GET',
evidence: '<form class=\'navbar-form\' action=\'/search\' accept-charset=\'UTF-8\' method=\'get\'>', evidence:
"<form class='navbar-form' action='/search' accept-charset='UTF-8' method='get'>",
}, },
], ],
}, },
...@@ -281,7 +347,9 @@ export const dast = { ...@@ -281,7 +347,9 @@ export const dast = {
alert: 'X-Content-Type-Options Header Missing', alert: 'X-Content-Type-Options Header Missing',
name: 'X-Content-Type-Options Header Missing', name: 'X-Content-Type-Options Header Missing',
riskdesc: 'Low (Medium)', riskdesc: 'Low (Medium)',
desc: '<p>The Anti-MIME-Sniffing header X-Content-Type-Options was not set to "nosniff".</p>', desc:
'<p>The Anti-MIME-Sniffing header X-Content-Type-Options was not set to "nosniff".</p>',
pluginid: '3456',
instances: [ instances: [
{ {
uri: 'http://192.168.32.236:3001/assets/webpack/main.bundle.js', uri: 'http://192.168.32.236:3001/assets/webpack/main.bundle.js',
...@@ -294,6 +362,34 @@ export const dast = { ...@@ -294,6 +362,34 @@ export const dast = {
}, },
}; };
export const dastBase = {
site: {
alerts: [
{
name: 'Absence of Anti-CSRF Tokens',
riskcode: '1',
riskdesc: 'Low (Medium)',
desc: '<p>No Anti-CSRF tokens were found in a HTML submission form.</p>',
pluginid: '123',
instances: [
{
uri: 'http://192.168.32.236:3001/explore?sort=latest_activity_desc',
method: 'GET',
evidence:
"<form class='navbar-form' action='/search' accept-charset='UTF-8' method='get'>",
},
{
uri: 'http://192.168.32.236:3001/help/user/group/subgroups/index.md',
method: 'GET',
evidence:
"<form class='navbar-form' action='/search' accept-charset='UTF-8' method='get'>",
},
],
},
],
},
};
export const parsedDast = [ export const parsedDast = [
{ {
name: 'Absence of Anti-CSRF Tokens', name: 'Absence of Anti-CSRF Tokens',
...@@ -302,25 +398,49 @@ export const parsedDast = [ ...@@ -302,25 +398,49 @@ export const parsedDast = [
priority: 'Low (Medium)', priority: 'Low (Medium)',
desc: '<p>No Anti-CSRF tokens were found in a HTML submission form.</p>', desc: '<p>No Anti-CSRF tokens were found in a HTML submission form.</p>',
parsedDescription: ' No Anti-CSRF tokens were found in a HTML submission form. ', parsedDescription: ' No Anti-CSRF tokens were found in a HTML submission form. ',
pluginid: '123',
instances: [ instances: [
{ {
uri: 'http://192.168.32.236:3001/explore?sort=latest_activity_desc', uri: 'http://192.168.32.236:3001/explore?sort=latest_activity_desc',
method: 'GET', method: 'GET',
evidence: '<form class=\'navbar-form\' action=\'/search\' accept-charset=\'UTF-8\' method=\'get\'>', evidence: "<form class='navbar-form' action='/search' accept-charset='UTF-8' method='get'>",
}, },
{ {
uri: 'http://192.168.32.236:3001/help/user/group/subgroups/index.md', uri: 'http://192.168.32.236:3001/help/user/group/subgroups/index.md',
method: 'GET', method: 'GET',
evidence: '<form class=\'navbar-form\' action=\'/search\' accept-charset=\'UTF-8\' method=\'get\'>', evidence: "<form class='navbar-form' action='/search' accept-charset='UTF-8' method='get'>",
},
],
},
{
alert: 'X-Content-Type-Options Header Missing',
name: 'X-Content-Type-Options Header Missing',
riskdesc: 'Low (Medium)',
priority: 'Low (Medium)',
desc: '<p>The Anti-MIME-Sniffing header X-Content-Type-Options was not set to "nosniff".</p>',
pluginid: '3456',
parsedDescription:
' The Anti-MIME-Sniffing header X-Content-Type-Options was not set to "nosniff". ',
instances: [
{
uri: 'http://192.168.32.236:3001/assets/webpack/main.bundle.js',
method: 'GET',
param: 'X-Content-Type-Options',
}, },
], ],
}, { },
];
export const parsedDastNewIssues = [
{
alert: 'X-Content-Type-Options Header Missing', alert: 'X-Content-Type-Options Header Missing',
name: 'X-Content-Type-Options Header Missing', name: 'X-Content-Type-Options Header Missing',
riskdesc: 'Low (Medium)', riskdesc: 'Low (Medium)',
priority: 'Low (Medium)', priority: 'Low (Medium)',
desc: '<p>The Anti-MIME-Sniffing header X-Content-Type-Options was not set to "nosniff".</p>', desc: '<p>The Anti-MIME-Sniffing header X-Content-Type-Options was not set to "nosniff".</p>',
parsedDescription: ' The Anti-MIME-Sniffing header X-Content-Type-Options was not set to "nosniff". ', pluginid: '3456',
parsedDescription:
' The Anti-MIME-Sniffing header X-Content-Type-Options was not set to "nosniff". ',
instances: [ instances: [
{ {
uri: 'http://192.168.32.236:3001/assets/webpack/main.bundle.js', uri: 'http://192.168.32.236:3001/assets/webpack/main.bundle.js',
...@@ -333,7 +453,7 @@ export const parsedDast = [ ...@@ -333,7 +453,7 @@ export const parsedDast = [
/** /**
* SAST report API response for no added & fixed issues but with security issues * SAST report API response for no added & fixed issues but with security issues
*/ */
export const sastHeadAllIssues = [ export const sastHeadAllIssues = [
{ {
tool: 'retire', tool: 'retire',
......
import MockAdapter from 'axios-mock-adapter';
import axios from '~/lib/utils/axios_utils';
import * as actions from 'ee/vue_shared/security_reports/store/actions';
import * as types from 'ee/vue_shared/security_reports/store/mutation_types';
import state from 'ee/vue_shared/security_reports/store/state';
import testAction from '../../../helpers/vuex_action_helper';
import {
sastIssues,
sastIssuesBase,
dast,
dastBase,
dockerReport,
dockerBaseReport,
} from '../mock_data';
describe('security reports actions', () => {
let mockedState;
let mock;
beforeEach(() => {
mockedState = state();
mock = new MockAdapter(axios);
});
afterEach(() => {
mock.restore();
});
describe('setHeadBlobPath', () => {
it('should commit set head blob path', done => {
testAction(
actions.setHeadBlobPath,
'path',
mockedState,
[
{
type: types.SET_HEAD_BLOB_PATH,
payload: 'path',
},
],
[],
done,
);
});
});
describe('setBaseBlobPath', () => {
it('should commit set head blob path', done => {
testAction(
actions.setBaseBlobPath,
'path',
mockedState,
[
{
type: types.SET_BASE_BLOB_PATH,
payload: 'path',
},
],
[],
done,
);
});
});
describe('setSastHeadPath', () => {
it('should commit set head blob path', done => {
testAction(
actions.setSastHeadPath,
'path',
mockedState,
[
{
type: types.SET_SAST_HEAD_PATH,
payload: 'path',
},
],
[],
done,
);
});
});
describe('setSastBasePath', () => {
it('should commit set head blob path', done => {
testAction(
actions.setSastBasePath,
'path',
mockedState,
[
{
type: types.SET_SAST_BASE_PATH,
payload: 'path',
},
],
[],
done,
);
});
});
describe('requestSastReports', () => {
it('should commit request mutation', done => {
testAction(
actions.requestSastReports,
null,
mockedState,
[
{
type: types.REQUEST_SAST_REPORTS,
},
],
[],
done,
);
});
});
describe('receiveSastReports', () => {
it('should commit request mutation', done => {
testAction(
actions.receiveSastReports,
{},
mockedState,
[
{
type: types.RECEIVE_SAST_REPORTS,
payload: {},
},
],
[],
done,
);
});
});
describe('receiveSastError', () => {
it('should commit sast error mutation', done => {
testAction(
actions.receiveSastError,
null,
mockedState,
[
{
type: types.RECEIVE_SAST_REPORTS_ERROR,
},
],
[],
done,
);
});
});
describe('fetchSastReports', () => {
describe('with head and base', () => {
it('should dispatch `receiveSastReports`', done => {
mock.onGet('foo').reply(200, sastIssues);
mock.onGet('bar').reply(200, sastIssuesBase);
mockedState.sast.paths.head = 'foo';
mockedState.sast.paths.base = 'bar';
testAction(
actions.fetchSastReports,
null,
mockedState,
[],
[
{
type: 'requestSastReports',
},
{
type: 'receiveSastReports',
payload: { head: sastIssues, base: sastIssuesBase },
},
],
done,
);
});
it('should dispatch `receiveSastError`', done => {
mock.onGet('foo').reply(500, {});
mockedState.sast.paths.head = 'foo';
mockedState.sast.paths.base = 'bar';
testAction(
actions.fetchSastReports,
null,
mockedState,
[],
[
{
type: 'requestSastReports',
},
{
type: 'receiveSastError',
},
],
done,
);
});
});
describe('with head', () => {
it('should dispatch `receiveSastReports`', done => {
mock.onGet('foo').reply(200, sastIssues);
mockedState.sast.paths.head = 'foo';
testAction(
actions.fetchSastReports,
null,
mockedState,
[],
[
{
type: 'requestSastReports',
},
{
type: 'receiveSastReports',
payload: { head: sastIssues, base: null },
},
],
done,
);
});
it('should dispatch `receiveSastError`', done => {
mock.onGet('foo').reply(500, {});
mockedState.sast.paths.head = 'foo';
testAction(
actions.fetchSastReports,
null,
mockedState,
[],
[
{
type: 'requestSastReports',
},
{
type: 'receiveSastError',
},
],
done,
);
});
});
});
describe('setSastContainerHeadPath', () => {
it('should commit set head blob path', done => {
testAction(
actions.setSastContainerHeadPath,
'path',
mockedState,
[
{
type: types.SET_SAST_CONTAINER_HEAD_PATH,
payload: 'path',
},
],
[],
done,
);
});
});
describe('setSastContainerBasePath', () => {
it('should commit set head blob path', done => {
testAction(
actions.setSastContainerBasePath,
'path',
mockedState,
[
{
type: types.SET_SAST_CONTAINER_BASE_PATH,
payload: 'path',
},
],
[],
done,
);
});
});
describe('requestSastContainerReports', () => {
it('should commit request mutation', done => {
testAction(
actions.requestSastContainerReports,
null,
mockedState,
[
{
type: types.REQUEST_SAST_CONTAINER_REPORTS,
},
],
[],
done,
);
});
});
describe('receiveSastContainerReports', () => {
it('should commit sast receive mutation', done => {
testAction(
actions.receiveSastContainerReports,
{},
mockedState,
[
{
type: types.RECEIVE_SAST_CONTAINER_REPORTS,
payload: {},
},
],
[],
done,
);
});
});
describe('receiveSastContainerError', () => {
it('should commit sast error mutation', done => {
testAction(
actions.receiveSastContainerError,
null,
mockedState,
[
{
type: types.RECEIVE_SAST_CONTAINER_ERROR,
},
],
[],
done,
);
});
});
describe('fetchSastContainerReports', () => {
describe('with head and base', () => {
it('should dispatch `receiveSastContainerReports`', done => {
mock.onGet('foo').reply(200, dockerReport);
mock.onGet('bar').reply(200, dockerBaseReport);
mockedState.sastContainer.paths.head = 'foo';
mockedState.sastContainer.paths.base = 'bar';
testAction(
actions.fetchSastContainerReports,
null,
mockedState,
[],
[
{
type: 'requestSastContainerReports',
},
{
type: 'receiveSastContainerReports',
payload: { head: dockerReport, base: dockerBaseReport },
},
],
done,
);
});
it('should dispatch `receiveSastContainerError`', done => {
mock.onGet('foo').reply(500, {});
mockedState.sastContainer.paths.head = 'foo';
mockedState.sastContainer.paths.base = 'bar';
testAction(
actions.fetchSastContainerReports,
null,
mockedState,
[],
[
{
type: 'requestSastContainerReports',
},
{
type: 'receiveSastContainerError',
},
],
done,
);
});
});
describe('with head', () => {
it('should dispatch `receiveSastContainerReports`', done => {
mock.onGet('foo').reply(200, dockerReport);
mockedState.sastContainer.paths.head = 'foo';
testAction(
actions.fetchSastContainerReports,
null,
mockedState,
[],
[
{
type: 'requestSastContainerReports',
},
{
type: 'receiveSastContainerReports',
payload: { head: dockerReport, base: null },
},
],
done,
);
});
it('should dispatch `receiveSastError`', done => {
mock.onGet('foo').reply(500, {});
mockedState.sastContainer.paths.head = 'foo';
testAction(
actions.fetchSastContainerReports,
null,
mockedState,
[],
[
{
type: 'requestSastContainerReports',
},
{
type: 'receiveSastContainerError',
},
],
done,
);
});
});
});
describe('setDastHeadPath', () => {
it('should commit set head blob path', done => {
testAction(
actions.setDastHeadPath,
'path',
mockedState,
[
{
type: types.SET_DAST_HEAD_PATH,
payload: 'path',
},
],
[],
done,
);
});
});
describe('setDastBasePath', () => {
it('should commit set head blob path', done => {
testAction(
actions.setDastBasePath,
'path',
mockedState,
[
{
type: types.SET_DAST_BASE_PATH,
payload: 'path',
},
],
[],
done,
);
});
});
describe('requestDastReports', () => {
it('should commit request mutation', done => {
testAction(
actions.requestDastReports,
null,
mockedState,
[
{
type: types.REQUEST_DAST_REPORTS,
},
],
[],
done,
);
});
});
describe('receiveDastReports', () => {
it('should commit sast receive mutation', done => {
testAction(
actions.receiveDastReports,
{},
mockedState,
[
{
type: types.RECEIVE_DAST_REPORTS,
payload: {},
},
],
[],
done,
);
});
});
describe('receiveDastError', () => {
it('should commit sast error mutation', done => {
testAction(
actions.receiveDastError,
null,
mockedState,
[
{
type: types.RECEIVE_DAST_ERROR,
},
],
[],
done,
);
});
});
describe('fetchDastReports', () => {
describe('with head and base', () => {
it('should dispatch `receiveDastReports`', done => {
mock.onGet('foo').reply(200, dast);
mock.onGet('bar').reply(200, dastBase);
mockedState.dast.paths.head = 'foo';
mockedState.dast.paths.base = 'bar';
testAction(
actions.fetchDastReports,
null,
mockedState,
[],
[
{
type: 'requestDastReports',
},
{
type: 'receiveDastReports',
payload: { head: dast, base: dastBase },
},
],
done,
);
});
it('should dispatch `receiveDastError`', done => {
mock.onGet('foo').reply(500, {});
mockedState.dast.paths.head = 'foo';
mockedState.dast.paths.base = 'bar';
testAction(
actions.fetchDastReports,
null,
mockedState,
[],
[
{
type: 'requestDastReports',
},
{
type: 'receiveDastError',
},
],
done,
);
});
});
describe('with head', () => {
it('should dispatch `receiveSastContainerReports`', done => {
mock.onGet('foo').reply(200, dast);
mockedState.dast.paths.head = 'foo';
testAction(
actions.fetchDastReports,
null,
mockedState,
[],
[
{
type: 'requestDastReports',
},
{
type: 'receiveDastReports',
payload: { head: dast, base: null },
},
],
done,
);
});
it('should dispatch `receiveSastError`', done => {
mock.onGet('foo').reply(500, {});
mockedState.dast.paths.head = 'foo';
testAction(
actions.fetchDastReports,
null,
mockedState,
[],
[
{
type: 'requestDastReports',
},
{
type: 'receiveDastError',
},
],
done,
);
});
});
});
describe('setDependencyScanningHeadPath', () => {
it('should commit set head blob path', done => {
testAction(
actions.setDependencyScanningHeadPath,
'path',
mockedState,
[
{
type: types.SET_DEPENDENCY_SCANNING_HEAD_PATH,
payload: 'path',
},
],
[],
done,
);
});
});
describe('setDependencyScanningBasePath', () => {
it('should commit set head blob path', done => {
testAction(
actions.setDependencyScanningBasePath,
'path',
mockedState,
[
{
type: types.SET_DEPENDENCY_SCANNING_BASE_PATH,
payload: 'path',
},
],
[],
done,
);
});
});
describe('requestDependencyScanningReports', () => {
it('should commit request mutation', done => {
testAction(
actions.requestDependencyScanningReports,
null,
mockedState,
[
{
type: types.REQUEST_DEPENDENCY_SCANNING_REPORTS,
},
],
[],
done,
);
});
});
describe('receiveDependencyScanningReports', () => {
it('should commit sast receive mutation', done => {
testAction(
actions.receiveDependencyScanningReports,
{},
mockedState,
[
{
type: types.RECEIVE_DEPENDENCY_SCANNING_REPORTS,
payload: {},
},
],
[],
done,
);
});
});
describe('receiveDependencyScanningError', () => {
it('should commit sast error mutation', done => {
testAction(
actions.receiveDependencyScanningError,
null,
mockedState,
[
{
type: types.RECEIVE_DEPENDENCY_SCANNING_ERROR,
},
],
[],
done,
);
});
});
describe('fetchDependencyScanningReports', () => {
describe('with head and base', () => {
it('should dispatch `receiveDependencyScanningReports`', done => {
mock.onGet('foo').reply(200, sastIssues);
mock.onGet('bar').reply(200, sastIssuesBase);
mockedState.dependencyScanning.paths.head = 'foo';
mockedState.dependencyScanning.paths.base = 'bar';
testAction(
actions.fetchDependencyScanningReports,
null,
mockedState,
[],
[
{
type: 'requestDependencyScanningReports',
},
{
type: 'receiveDependencyScanningReports',
payload: { head: sastIssues, base: sastIssuesBase },
},
],
done,
);
});
it('should dispatch `receiveDependencyScanningError`', done => {
mock.onGet('foo').reply(500, {});
mockedState.dependencyScanning.paths.head = 'foo';
mockedState.dependencyScanning.paths.base = 'bar';
testAction(
actions.fetchDependencyScanningReports,
null,
mockedState,
[],
[
{
type: 'requestDependencyScanningReports',
},
{
type: 'receiveDependencyScanningError',
},
],
done,
);
});
});
describe('with head', () => {
it('should dispatch `receiveDependencyScanningReports`', done => {
mock.onGet('foo').reply(200, sastIssues);
mockedState.dependencyScanning.paths.head = 'foo';
testAction(
actions.fetchDependencyScanningReports,
null,
mockedState,
[],
[
{
type: 'requestDependencyScanningReports',
},
{
type: 'receiveDependencyScanningReports',
payload: { head: sastIssues, base: null },
},
],
done,
);
});
it('should dispatch `receiveDependencyScanningError`', done => {
mock.onGet('foo').reply(500, {});
mockedState.dependencyScanning.paths.head = 'foo';
testAction(
actions.fetchDependencyScanningReports,
null,
mockedState,
[],
[
{
type: 'requestDependencyScanningReports',
},
{
type: 'receiveDependencyScanningError',
},
],
done,
);
});
});
});
});
import state from 'ee/vue_shared/security_reports/store/state';
import {
groupedSastText,
groupedSastContainerText,
groupedDastText,
groupedDependencyText,
groupedSummaryText,
allReportsHaveError,
noBaseInAllReports,
areReportsLoading,
sastStatusIcon,
sastContainerStatusIcon,
dastStatusIcon,
dependencyScanningStatusIcon,
anyReportHasError,
} from 'ee/vue_shared/security_reports/store/getters';
describe('Security reports getters', () => {
function removeBreakLine (data) {
return data.replace(/\r?\n|\r/g, '').replace(/\s\s+/g, ' ');
}
describe('groupedSastText', () => {
describe('with no issues', () => {
it('returns no issues text', () => {
expect(groupedSastText(state())).toEqual('SAST detected no security vulnerabilities');
});
});
describe('with only `all` issues', () => {
it('returns no new issues text', () => {
const newState = state();
newState.sast.allIssues = [{}];
expect(groupedSastText(newState)).toEqual('SAST detected no new security vulnerabilities');
});
});
describe('with new issues and without base', () => {
it('returns unable to compare text', () => {
const newState = state();
newState.sast.paths.head = 'foo';
newState.sast.newIssues = [{}];
expect(groupedSastText(newState)).toEqual(
'SAST was unable to compare existing and new vulnerabilities. It detected 1 vulnerability',
);
});
});
describe('with base and head', () => {
describe('with only new issues', () => {
it('returns new issues text', () => {
const newState = state();
newState.sast.paths.head = 'foo';
newState.sast.paths.base = 'bar';
newState.sast.newIssues = [{}];
expect(groupedSastText(newState)).toEqual('SAST detected 1 new vulnerability');
});
});
describe('with new and resolved issues', () => {
it('returns new and fixed issues text', () => {
const newState = state();
newState.sast.paths.head = 'foo';
newState.sast.paths.base = 'bar';
newState.sast.newIssues = [{}];
newState.sast.resolvedIssues = [{}];
expect(removeBreakLine(groupedSastText(newState))).toEqual(
'SAST detected 1 new vulnerability and 1 fixed vulnerability',
);
});
});
describe('with only resolved issues', () => {
it('returns fixed issues text', () => {
const newState = state();
newState.sast.paths.head = 'foo';
newState.sast.paths.base = 'bar';
newState.sast.resolvedIssues = [{}];
expect(groupedSastText(newState)).toEqual('SAST detected 1 fixed vulnerability');
});
});
});
});
describe('groupedSastContainerText', () => {
describe('with no issues', () => {
it('returns no issues text', () => {
expect(groupedSastContainerText(state())).toEqual(
'Container scanning detected no security vulnerabilities',
);
});
});
describe('with new issues and without base', () => {
it('returns unable to compare text', () => {
const newState = state();
newState.sastContainer.paths.head = 'foo';
newState.sastContainer.newIssues = [{}];
expect(groupedSastContainerText(newState)).toEqual(
'Container scanning was unable to compare existing and new vulnerabilities. It detected 1 vulnerability',
);
});
});
describe('with base and head', () => {
describe('with only new issues', () => {
it('returns new issues text', () => {
const newState = state();
newState.sastContainer.paths.head = 'foo';
newState.sastContainer.paths.base = 'foo';
newState.sastContainer.newIssues = [{}];
expect(groupedSastContainerText(newState)).toEqual(
'Container scanning detected 1 new vulnerability',
);
});
});
describe('with new and resolved issues', () => {
it('returns new and fixed issues text', () => {
const newState = state();
newState.sastContainer.paths.head = 'foo';
newState.sastContainer.paths.base = 'foo';
newState.sastContainer.newIssues = [{}];
newState.sastContainer.resolvedIssues = [{}];
expect(removeBreakLine(groupedSastContainerText(newState))).toEqual(
'Container scanning detected 1 new vulnerability and 1 fixed vulnerability',
);
});
});
describe('with only resolved issues', () => {
it('returns fixed issues text', () => {
const newState = state();
newState.sastContainer.paths.head = 'foo';
newState.sastContainer.paths.base = 'foo';
newState.sastContainer.resolvedIssues = [{}];
expect(groupedSastContainerText(newState)).toEqual(
'Container scanning detected 1 fixed vulnerability',
);
});
});
});
});
describe('groupedDastText', () => {
describe('with no issues', () => {
it('returns no issues text', () => {
expect(groupedDastText(state())).toEqual('DAST detected no security vulnerabilities');
});
});
describe('with new issues and without base', () => {
it('returns unable to compare text', () => {
const newState = state();
newState.dast.paths.head = 'foo';
newState.dast.newIssues = [{}];
expect(groupedDastText(newState)).toEqual(
'DAST was unable to compare existing and new vulnerabilities. It detected 1 vulnerability',
);
});
});
describe('with base and head', () => {
describe('with only new issues', () => {
it('returns new issues text', () => {
const newState = state();
newState.dast.paths.head = 'foo';
newState.dast.paths.base = 'foo';
newState.dast.newIssues = [{}];
expect(groupedDastText(newState)).toEqual('DAST detected 1 new vulnerability');
});
});
describe('with new and resolved issues', () => {
it('returns new and fixed issues text', () => {
const newState = state();
newState.dast.paths.head = 'foo';
newState.dast.paths.base = 'foo';
newState.dast.newIssues = [{}];
newState.dast.resolvedIssues = [{}];
expect(removeBreakLine(groupedDastText(newState))).toEqual(
'DAST detected 1 new vulnerability and 1 fixed vulnerability',
);
});
});
describe('with only resolved issues', () => {
it('returns fixed issues text', () => {
const newState = state();
newState.dast.paths.head = 'foo';
newState.dast.paths.base = 'foo';
newState.dast.resolvedIssues = [{}];
expect(groupedDastText(newState)).toEqual('DAST detected 1 fixed vulnerability');
});
});
});
});
describe('groupedDependencyText', () => {
describe('with no issues', () => {
it('returns no issues text', () => {
expect(groupedDependencyText(state())).toEqual(
'Dependency scanning detected no security vulnerabilities',
);
});
});
describe('with new issues and without base', () => {
it('returns unable to compare text', () => {
const newState = state();
newState.dependencyScanning.paths.head = 'foo';
newState.dependencyScanning.newIssues = [{}];
expect(groupedDependencyText(newState)).toEqual(
'Dependency scanning was unable to compare existing and new vulnerabilities. It detected 1 vulnerability',
);
});
});
describe('with base and head', () => {
describe('with only new issues', () => {
it('returns new issues text', () => {
const newState = state();
newState.dependencyScanning.paths.head = 'foo';
newState.dependencyScanning.paths.base = 'foo';
newState.dependencyScanning.newIssues = [{}];
expect(groupedDependencyText(newState)).toEqual(
'Dependency scanning detected 1 new vulnerability',
);
});
});
describe('with new and resolved issues', () => {
it('returns new and fixed issues text', () => {
const newState = state();
newState.dependencyScanning.paths.head = 'foo';
newState.dependencyScanning.paths.base = 'foo';
newState.dependencyScanning.newIssues = [{}];
newState.dependencyScanning.resolvedIssues = [{}];
expect(removeBreakLine(groupedDependencyText(newState))).toEqual(
'Dependency scanning detected 1 new vulnerability and 1 fixed vulnerability',
);
});
});
describe('with only resolved issues', () => {
it('returns fixed issues text', () => {
const newState = state();
newState.dependencyScanning.paths.head = 'foo';
newState.dependencyScanning.paths.base = 'foo';
newState.dependencyScanning.resolvedIssues = [{}];
expect(groupedDependencyText(newState)).toEqual(
'Dependency scanning detected 1 fixed vulnerability',
);
});
});
});
});
describe('groupedSummaryText', () => {
it('returns failed text', () => {
expect(
groupedSummaryText(state(), {
allReportsHaveError: true,
noBaseInAllReports: false,
areReportsLoading: false,
}),
).toEqual('Security scanning failed loading any results');
});
it('returns no compare text', () => {
expect(
groupedSummaryText(state(), {
allReportsHaveError: false,
noBaseInAllReports: true,
areReportsLoading: false,
}),
).toEqual(
'Security scanning was unable to compare existing and new vulnerabilities. It detected no vulnerabilities.',
);
});
it('returns in progress text', () => {
expect(
groupedSummaryText(state(), {
allReportsHaveError: false,
noBaseInAllReports: false,
areReportsLoading: true,
}),
).toContain('(in progress)');
});
it('returns added and fixed text', () => {
const newState = state();
newState.summaryCounts = {
added: 2,
fixed: 4,
};
expect(
groupedSummaryText(newState, {
allReportsHaveError: false,
noBaseInAllReports: false,
areReportsLoading: false,
}),
).toContain('Security scanning detected 2 new vulnerabilities and 4 fixed vulnerabilities');
});
it('returns added text', () => {
const newState = state();
newState.summaryCounts = {
added: 2,
fixed: 0,
};
expect(
groupedSummaryText(newState, {
allReportsHaveError: false,
noBaseInAllReports: false,
areReportsLoading: false,
}),
).toContain('Security scanning detected 2 new vulnerabilities');
});
it('returns fixed text', () => {
const newState = Object.assign({}, state());
newState.summaryCounts = {
added: 0,
fixed: 4,
};
expect(
groupedSummaryText(newState, {
allReportsHaveError: false,
noBaseInAllReports: false,
areReportsLoading: false,
}),
).toContain('Security scanning detected 4 fixed vulnerabilities');
});
it('returns added and fixed while loading text', () => {
const newState = Object.assign({}, state());
newState.summaryCounts = {
added: 2,
fixed: 4,
};
expect(
groupedSummaryText(newState, {
allReportsHaveError: false,
noBaseInAllReports: false,
areReportsLoading: true,
}),
).toContain(
'Security scanning (in progress) detected 2 new vulnerabilities and 4 fixed vulnerabilities',
);
});
});
describe('sastStatusIcon', () => {
it('returns warning with new issues', () => {
const newState = Object.assign({}, state());
newState.sast.newIssues = [{}];
expect(sastStatusIcon(newState)).toEqual('warning');
});
it('returns warning with failed report', () => {
const newState = Object.assign({}, state());
newState.sast.hasError = true;
expect(sastStatusIcon(newState)).toEqual('warning');
});
it('returns success with no new issues or failed report', () => {
expect(sastStatusIcon(state())).toEqual('success');
});
});
describe('dastStatusIcon', () => {
it('returns warning with new issues', () => {
const newState = Object.assign({}, state());
newState.dast.newIssues = [{}];
expect(dastStatusIcon(newState)).toEqual('warning');
});
it('returns warning with failed report', () => {
const newState = Object.assign({}, state());
newState.dast.hasError = true;
expect(dastStatusIcon(newState)).toEqual('warning');
});
it('returns success with no new issues or failed report', () => {
expect(dastStatusIcon(state())).toEqual('success');
});
});
describe('sastContainerStatusIcon', () => {
it('returns warning with new issues', () => {
const newState = Object.assign({}, state());
newState.sastContainer.newIssues = [{}];
expect(sastContainerStatusIcon(newState)).toEqual('warning');
});
it('returns warning with failed report', () => {
const newState = Object.assign({}, state());
newState.sastContainer.hasError = true;
expect(sastContainerStatusIcon(newState)).toEqual('warning');
});
it('returns success with no new issues or failed report', () => {
expect(sastContainerStatusIcon(state())).toEqual('success');
});
});
describe('dependencyScanningStatusIcon', () => {
it('returns warning with new issues', () => {
const newState = Object.assign({}, state());
newState.dependencyScanning.newIssues = [{}];
expect(dependencyScanningStatusIcon(newState)).toEqual('warning');
});
it('returns warning with failed report', () => {
const newState = Object.assign({}, state());
newState.dependencyScanning.hasError = true;
expect(dependencyScanningStatusIcon(newState)).toEqual('warning');
});
it('returns success with no new issues or failed report', () => {
expect(dependencyScanningStatusIcon(state())).toEqual('success');
});
});
describe('areReportsLoading', () => {
it('returns true when any report is loading', () => {
const newState = Object.assign({}, state());
newState.sast.isLoading = true;
expect(areReportsLoading(newState)).toEqual(true);
});
it('returns false when none of the reports are loading', () => {
expect(areReportsLoading(state())).toEqual(false);
});
});
describe('allReportsHaveError', () => {
it('returns true when all reports have error', () => {
const newState = Object.assign({}, state());
newState.sast.hasError = true;
newState.dast.hasError = true;
newState.sastContainer.hasError = true;
newState.dependencyScanning.hasError = true;
expect(allReportsHaveError(newState)).toEqual(true);
});
it('returns false when none of the reports has error', () => {
expect(allReportsHaveError(state())).toEqual(false);
});
});
describe('anyReportHasError', () => {
it('returns true when any of the reports has error', () => {
const newState = Object.assign({}, state());
newState.sast.hasError = true;
expect(anyReportHasError(newState)).toEqual(true);
});
it('returns false when none of the reports has error', () => {
expect(anyReportHasError(state())).toEqual(false);
});
});
describe('noBaseInAllReports', () => {
it('returns true when none reports have base', () => {
expect(noBaseInAllReports(state())).toEqual(true);
});
it('returns false when any of the reports has base', () => {
const newState = Object.assign({}, state());
newState.sast.paths.base = 'foo';
expect(noBaseInAllReports(newState)).toEqual(false);
});
});
});
import state from 'ee/vue_shared/security_reports/store/state';
import mutations from 'ee/vue_shared/security_reports/store/mutations';
import * as types from 'ee/vue_shared/security_reports/store/mutation_types';
import {
sastIssues,
sastIssuesBase,
parsedSastIssuesHead,
parsedSastBaseStore,
dockerReport,
dockerBaseReport,
dockerNewIssues,
dockerOnlyHeadParsed,
dast,
dastBase,
parsedDastNewIssues,
parsedDast,
parsedSastIssuesStore,
} from '../mock_data';
describe('security reports mutations', () => {
let stateCopy;
beforeEach(() => {
stateCopy = state();
});
describe('SET_HEAD_BLOB_PATH', () => {
it('should set head blob path', () => {
mutations[types.SET_HEAD_BLOB_PATH](stateCopy, 'head_blob_path');
expect(stateCopy.blobPath.head).toEqual('head_blob_path');
});
});
describe('SET_BASE_BLOB_PATH', () => {
it('should set base blob path', () => {
mutations[types.SET_BASE_BLOB_PATH](stateCopy, 'base_blob_path');
expect(stateCopy.blobPath.base).toEqual('base_blob_path');
});
});
describe('SET_SAST_HEAD_PATH', () => {
it('should set sast head path', () => {
mutations[types.SET_SAST_HEAD_PATH](stateCopy, 'sast_head_path');
expect(stateCopy.sast.paths.head).toEqual('sast_head_path');
});
});
describe('SET_SAST_BASE_PATH', () => {
it('sets sast base path', () => {
mutations[types.SET_SAST_BASE_PATH](stateCopy, 'sast_base_path');
expect(stateCopy.sast.paths.base).toEqual('sast_base_path');
});
});
describe('REQUEST_SAST_REPORTS', () => {
it('should set sast loading flag to true', () => {
mutations[types.REQUEST_SAST_REPORTS](stateCopy);
expect(stateCopy.sast.isLoading).toEqual(true);
});
});
describe('RECEIVE_SAST_REPORTS', () => {
describe('with head and base', () => {
it('should set new, fixed and all issues', () => {
mutations[types.SET_BASE_BLOB_PATH](stateCopy, 'path');
mutations[types.SET_HEAD_BLOB_PATH](stateCopy, 'path');
mutations[types.RECEIVE_SAST_REPORTS](stateCopy, {
head: sastIssues,
base: sastIssuesBase,
});
expect(stateCopy.sast.isLoading).toEqual(false);
expect(stateCopy.sast.newIssues).toEqual(parsedSastIssuesHead);
expect(stateCopy.sast.resolvedIssues).toEqual(parsedSastBaseStore);
});
});
describe('with head', () => {
it('should set new issues', () => {
mutations[types.SET_HEAD_BLOB_PATH](stateCopy, 'path');
mutations[types.RECEIVE_SAST_REPORTS](stateCopy, {
head: sastIssues,
});
expect(stateCopy.sast.isLoading).toEqual(false);
expect(stateCopy.sast.newIssues).toEqual(parsedSastIssuesStore);
});
});
});
describe('RECEIVE_SAST_REPORTS_ERROR', () => {
it('should set loading flag to false and error flag to true for sast', () => {
mutations[types.RECEIVE_SAST_REPORTS_ERROR](stateCopy);
expect(stateCopy.sast.isLoading).toEqual(false);
expect(stateCopy.sast.hasError).toEqual(true);
});
});
describe('SET_SAST_CONTAINER_HEAD_PATH', () => {
it('should set sast container head path', () => {
mutations[types.SET_SAST_CONTAINER_HEAD_PATH](stateCopy, 'head_path');
expect(stateCopy.sastContainer.paths.head).toEqual('head_path');
});
});
describe('SET_SAST_CONTAINER_BASE_PATH', () => {
it('should set sast container base path', () => {
mutations[types.SET_SAST_CONTAINER_BASE_PATH](stateCopy, 'base_path');
expect(stateCopy.sastContainer.paths.base).toEqual('base_path');
});
});
describe('REQUEST_SAST_CONTAINER_REPORTS', () => {
it('should set sast container loading flag to true', () => {
mutations[types.REQUEST_SAST_CONTAINER_REPORTS](stateCopy);
expect(stateCopy.sastContainer.isLoading).toEqual(true);
});
});
describe('RECEIVE_SAST_CONTAINER_REPORTS', () => {
describe('with head and base', () => {
it('should set new and resolved issues', () => {
mutations[types.RECEIVE_SAST_CONTAINER_REPORTS](stateCopy, {
head: dockerReport,
base: dockerBaseReport,
});
expect(stateCopy.sastContainer.isLoading).toEqual(false);
expect(stateCopy.sastContainer.newIssues).toEqual(dockerNewIssues);
expect(stateCopy.sastContainer.resolvedIssues).toEqual([]);
});
});
describe('with head', () => {
it('should set new issues', () => {
mutations[types.RECEIVE_SAST_CONTAINER_REPORTS](stateCopy, {
head: dockerReport,
});
expect(stateCopy.sastContainer.isLoading).toEqual(false);
expect(stateCopy.sastContainer.newIssues).toEqual(dockerOnlyHeadParsed);
});
});
});
describe('RECEIVE_SAST_CONTAINER_ERROR', () => {
it('should set sast container loading flag to false and error flag to true', () => {
mutations[types.RECEIVE_SAST_CONTAINER_ERROR](stateCopy);
expect(stateCopy.sastContainer.isLoading).toEqual(false);
expect(stateCopy.sastContainer.hasError).toEqual(true);
});
});
describe('SET_DAST_HEAD_PATH', () => {
it('should set dast head path', () => {
mutations[types.SET_DAST_HEAD_PATH](stateCopy, 'head_path');
expect(stateCopy.dast.paths.head).toEqual('head_path');
});
});
describe('SET_DAST_BASE_PATH', () => {
it('should set dast base path', () => {
mutations[types.SET_DAST_BASE_PATH](stateCopy, 'base_path');
expect(stateCopy.dast.paths.base).toEqual('base_path');
});
});
describe('REQUEST_DAST_REPORTS', () => {
it('should set dast loading flag to true', () => {
mutations[types.REQUEST_DAST_REPORTS](stateCopy);
expect(stateCopy.dast.isLoading).toEqual(true);
});
});
describe('RECEIVE_DAST_REPORTS', () => {
describe('with head and base', () => {
it('sets new and resolved issues with the given data', () => {
mutations[types.RECEIVE_DAST_REPORTS](stateCopy, {
head: dast,
base: dastBase,
});
expect(stateCopy.dast.isLoading).toEqual(false);
expect(stateCopy.dast.newIssues).toEqual(parsedDastNewIssues);
expect(stateCopy.dast.resolvedIssues).toEqual([]);
});
});
describe('with head', () => {
it('sets new issues with the given data', () => {
mutations[types.RECEIVE_DAST_REPORTS](stateCopy, {
head: dast,
});
expect(stateCopy.dast.isLoading).toEqual(false);
expect(stateCopy.dast.newIssues).toEqual(parsedDast);
});
});
});
describe('RECEIVE_DAST_ERROR', () => {
it('should set dast loading flag to false and error flag to true', () => {
mutations[types.RECEIVE_DAST_ERROR](stateCopy);
expect(stateCopy.dast.isLoading).toEqual(false);
expect(stateCopy.dast.hasError).toEqual(true);
});
});
describe('SET_DEPENDENCY_SCANNING_HEAD_PATH', () => {
it('should set dependency scanning head path', () => {
mutations[types.SET_DEPENDENCY_SCANNING_HEAD_PATH](stateCopy, 'head_path');
expect(stateCopy.dependencyScanning.paths.head).toEqual('head_path');
});
});
describe('SET_DEPENDENCY_SCANNING_BASE_PATH', () => {
it('should set dependency scanning base path', () => {
mutations[types.SET_DEPENDENCY_SCANNING_BASE_PATH](stateCopy, 'base_path');
expect(stateCopy.dependencyScanning.paths.base).toEqual('base_path');
});
});
describe('REQUEST_DEPENDENCY_SCANNING_REPORTS', () => {
it('should set dependency scanning loading flag to true', () => {
mutations[types.REQUEST_DEPENDENCY_SCANNING_REPORTS](stateCopy);
expect(stateCopy.dependencyScanning.isLoading).toEqual(true);
});
});
describe('RECEIVE_DEPENDENCY_SCANNING_REPORTS', () => {
describe('with head and base', () => {
it('should set new, fixed and all issues', () => {
mutations[types.SET_BASE_BLOB_PATH](stateCopy, 'path');
mutations[types.SET_HEAD_BLOB_PATH](stateCopy, 'path');
mutations[types.RECEIVE_DEPENDENCY_SCANNING_REPORTS](stateCopy, {
head: sastIssues,
base: sastIssuesBase,
});
expect(stateCopy.dependencyScanning.isLoading).toEqual(false);
expect(stateCopy.dependencyScanning.newIssues).toEqual(parsedSastIssuesHead);
expect(stateCopy.dependencyScanning.resolvedIssues).toEqual(parsedSastBaseStore);
});
});
describe('with head', () => {
it('should set new issues', () => {
mutations[types.SET_HEAD_BLOB_PATH](stateCopy, 'path');
mutations[types.RECEIVE_DEPENDENCY_SCANNING_REPORTS](stateCopy, {
head: sastIssues,
});
expect(stateCopy.dependencyScanning.isLoading).toEqual(false);
expect(stateCopy.dependencyScanning.newIssues).toEqual(parsedSastIssuesStore);
});
});
});
describe('RECEIVE_DEPENDENCY_SCANNING_ERROR', () => {
it('should set dependency scanning loading flag to false and error flag to true', () => {
mutations[types.RECEIVE_DEPENDENCY_SCANNING_ERROR](stateCopy);
expect(stateCopy.dependencyScanning.isLoading).toEqual(false);
expect(stateCopy.dependencyScanning.hasError).toEqual(true);
});
});
});
import {
parseSastIssues,
parseSastContainer,
parseDastIssues,
filterByKey,
getUnapprovedVulnerabilities,
textBuilder,
statusIcon,
} from 'ee/vue_shared/security_reports/store/utils';
import { sastIssues, dockerReport, dast, parsedDast } from '../mock_data';
describe('security reports utils', () => {
describe('parseSastIssues', () => {
it('should parse the received issues', () => {
const security = parseSastIssues(sastIssues, 'path')[0];
expect(security.name).toEqual(sastIssues[0].message);
expect(security.path).toEqual(sastIssues[0].file);
});
});
describe('parseSastContainer', () => {
it('parses sast container issues', () => {
const parsed = parseSastContainer(dockerReport.vulnerabilities)[0];
expect(parsed.name).toEqual(dockerReport.vulnerabilities[0].vulnerability);
expect(parsed.priority).toEqual(dockerReport.vulnerabilities[0].severity);
expect(parsed.path).toEqual(dockerReport.vulnerabilities[0].namespace);
expect(parsed.nameLink).toEqual(
`https://cve.mitre.org/cgi-bin/cvename.cgi?name=${
dockerReport.vulnerabilities[0].vulnerability
}`,
);
});
});
describe('parseDastIssues', () => {
it('parsed dast report', () => {
expect(parseDastIssues(dast.site.alerts)).toEqual(parsedDast);
});
});
describe('filterByKey', () => {
it('filters the array with the provided key', () => {
const array1 = [{ id: '1234' }, { id: 'abg543' }, { id: '214swfA' }];
const array2 = [{ id: '1234' }, { id: 'abg543' }, { id: '453OJKs' }];
expect(filterByKey(array1, array2, 'id')).toEqual([{ id: '214swfA' }]);
});
});
describe('getUnapprovedVulnerabilities', () => {
it('return unapproved vulnerabilities', () => {
const unapproved = getUnapprovedVulnerabilities(
dockerReport.vulnerabilities,
dockerReport.unapproved,
);
expect(unapproved.length).toEqual(dockerReport.unapproved.length);
expect(unapproved[0].vulnerability).toEqual(dockerReport.unapproved[0]);
expect(unapproved[1].vulnerability).toEqual(dockerReport.unapproved[1]);
});
});
describe('textBuilder', () => {
describe('with no issues', () => {
it('should return no vulnerabiltities text', () => {
expect(textBuilder()).toEqual(' detected no security vulnerabilities');
});
});
describe('with only `all` issues', () => {
it('should return no new vulnerabiltities text', () => {
expect(textBuilder('', {}, 0, 0, 1)).toEqual(' detected no new security vulnerabilities');
});
});
describe('with new issues and without base', () => {
it('should return unable to compare text', () => {
expect(textBuilder('', { head: 'foo' }, 1, 0, 0)).toEqual(
' was unable to compare existing and new vulnerabilities. It detected 1 vulnerability',
);
});
});
describe('with base and head', () => {
describe('with only new issues', () => {
it('should return new issues text', () => {
expect(textBuilder('', { head: 'foo', base: 'foo' }, 1, 0, 0)).toEqual(
' detected 1 new vulnerability',
);
});
});
describe('with new and resolved issues', () => {
it('should return new and fixed issues text', () => {
expect(
textBuilder('', { head: 'foo', base: 'foo' }, 1, 1, 0).replace(/\n+\s+/m, ' '),
).toEqual(' detected 1 new vulnerability and 1 fixed vulnerability');
});
});
describe('with only resolved issues', () => {
it('should return fixed issues text', () => {
expect(textBuilder('', { head: 'foo', base: 'foo' }, 0, 1, 0)).toEqual(
' detected 1 fixed vulnerability',
);
});
});
});
});
describe('statusIcon', () => {
describe('with failed report', () => {
it('returns warning', () => {
expect(statusIcon(true)).toEqual('warning');
});
});
describe('with new issues', () => {
it('returns warning', () => {
expect(statusIcon(false, 1)).toEqual('warning');
});
});
describe('with neutral issues', () => {
it('returns warning', () => {
expect(statusIcon(false, 0, 1)).toEqual('warning');
});
});
describe('without new or neutal issues', () => {
it('returns success', () => {
expect(statusIcon()).toEqual('success');
});
});
});
});
...@@ -51,6 +51,28 @@ describe Gitlab::Database do ...@@ -51,6 +51,28 @@ describe Gitlab::Database do
end end
end end
describe '.postgresql_9_or_less?' do
it 'returns false when using MySQL' do
allow(described_class).to receive(:postgresql?).and_return(false)
expect(described_class.postgresql_9_or_less?).to eq(false)
end
it 'returns true when using PostgreSQL 9.6' do
allow(described_class).to receive(:postgresql?).and_return(true)
allow(described_class).to receive(:version).and_return('9.6')
expect(described_class.postgresql_9_or_less?).to eq(true)
end
it 'returns false when using PostgreSQL 10 or newer' do
allow(described_class).to receive(:postgresql?).and_return(true)
allow(described_class).to receive(:version).and_return('10')
expect(described_class.postgresql_9_or_less?).to eq(false)
end
end
describe '.join_lateral_supported?' do describe '.join_lateral_supported?' do
it 'returns false when using MySQL' do it 'returns false when using MySQL' do
allow(described_class).to receive(:postgresql?).and_return(false) allow(described_class).to receive(:postgresql?).and_return(false)
...@@ -95,6 +117,70 @@ describe Gitlab::Database do ...@@ -95,6 +117,70 @@ describe Gitlab::Database do
end end
end end
describe '.pg_wal_lsn_diff' do
it 'returns old name when using PostgreSQL 9.6' do
allow(described_class).to receive(:postgresql?).and_return(true)
allow(described_class).to receive(:version).and_return('9.6')
expect(described_class.pg_wal_lsn_diff).to eq('pg_xlog_location_diff')
end
it 'returns new name when using PostgreSQL 10 or newer' do
allow(described_class).to receive(:postgresql?).and_return(true)
allow(described_class).to receive(:version).and_return('10')
expect(described_class.pg_wal_lsn_diff).to eq('pg_wal_lsn_diff')
end
end
describe '.pg_current_wal_insert_lsn' do
it 'returns old name when using PostgreSQL 9.6' do
allow(described_class).to receive(:postgresql?).and_return(true)
allow(described_class).to receive(:version).and_return('9.6')
expect(described_class.pg_current_wal_insert_lsn).to eq('pg_current_xlog_insert_location')
end
it 'returns new name when using PostgreSQL 10 or newer' do
allow(described_class).to receive(:postgresql?).and_return(true)
allow(described_class).to receive(:version).and_return('10')
expect(described_class.pg_current_wal_insert_lsn).to eq('pg_current_wal_insert_lsn')
end
end
describe '.pg_last_wal_receive_lsn' do
it 'returns old name when using PostgreSQL 9.6' do
allow(described_class).to receive(:postgresql?).and_return(true)
allow(described_class).to receive(:version).and_return('9.6')
expect(described_class.pg_last_wal_receive_lsn).to eq('pg_last_xlog_receive_location')
end
it 'returns new name when using PostgreSQL 10 or newer' do
allow(described_class).to receive(:postgresql?).and_return(true)
allow(described_class).to receive(:version).and_return('10')
expect(described_class.pg_last_wal_receive_lsn).to eq('pg_last_wal_receive_lsn')
end
end
describe '.pg_last_wal_replay_lsn' do
it 'returns old name when using PostgreSQL 9.6' do
allow(described_class).to receive(:postgresql?).and_return(true)
allow(described_class).to receive(:version).and_return('9.6')
expect(described_class.pg_last_wal_replay_lsn).to eq('pg_last_xlog_replay_location')
end
it 'returns new name when using PostgreSQL 10 or newer' do
allow(described_class).to receive(:postgresql?).and_return(true)
allow(described_class).to receive(:version).and_return('10')
expect(described_class.pg_last_wal_replay_lsn).to eq('pg_last_wal_replay_lsn')
end
end
describe '.nulls_last_order' do describe '.nulls_last_order' do
context 'when using PostgreSQL' do context 'when using PostgreSQL' do
before do before do
......
require 'spec_helper'
describe Boards::DestroyService do
describe '#execute' do
let(:project) { create(:project) }
let!(:board) { create(:board, project: project) }
subject(:service) { described_class.new(project, double) }
context 'when project have more than one board' do
it 'removes board from project' do
create(:board, project: project)
expect { service.execute(board) }.to change(project.boards, :count).by(-1)
end
end
context 'when project have one board' do
it 'does not remove board from project' do
expect { service.execute(board) }.not_to change(project.boards, :count)
end
end
end
end
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment