Commit 9cda7958 authored by Sean McGivern's avatar Sean McGivern

Merge branch 'ce-to-ee-2018-09-03' into 'master'

CE upstream - 2018-09-03 12:23 UTC

Closes #2635

See merge request gitlab-org/gitlab-ee!7210
parents fdd10597 8f498bea
......@@ -417,10 +417,13 @@ review-docs-cleanup:
environment:
name: review-docs/$CI_COMMIT_REF_SLUG
action: stop
when: manual
script:
- gem install gitlab --no-ri --no-rdoc
- ./$SCRIPT_NAME cleanup
when: manual
only:
- branches@gitlab-org/gitlab-ce
- branches@gitlab-org/gitlab-ee
##
# Trigger a docker image build in CNG (Cloud Native GitLab) repository
......
import Visibility from 'visibilityjs';
import * as types from './mutation_types';
import axios from '../../lib/utils/axios_utils';
import Poll from '../../lib/utils/poll';
import { setCiStatusFavicon } from '../../lib/utils/common_utils';
import flash from '../../flash';
import { __ } from '../../locale';
export const setJobEndpoint = ({ commit }, endpoint) => commit(types.SET_JOB_ENDPOINT, endpoint);
export const setTraceEndpoint = ({ commit }, endpoint) =>
commit(types.SET_TRACE_ENDPOINT, endpoint);
export const setStagesEndpoint = ({ commit }, endpoint) =>
commit(types.SET_STAGES_ENDPOINT, endpoint);
export const setJobsEndpoint = ({ commit }, endpoint) => commit(types.SET_JOBS_ENDPOINT, endpoint);
let eTagPoll;
export const clearEtagPoll = () => {
eTagPoll = null;
};
export const stopPolling = () => {
if (eTagPoll) eTagPoll.stop();
};
export const restartPolling = () => {
if (eTagPoll) eTagPoll.restart();
};
export const requestJob = ({ commit }) => commit(types.REQUEST_JOB);
export const fetchJob = ({ state, dispatch }) => {
dispatch('requestJob');
eTagPoll = new Poll({
resource: {
getJob(endpoint) {
return axios.get(endpoint);
},
},
data: state.jobEndpoint,
method: 'getJob',
successCallback: ({ data }) => dispatch('receiveJobSuccess', data),
errorCallback: () => dispatch('receiveJobError'),
});
if (!Visibility.hidden()) {
eTagPoll.makeRequest();
} else {
axios
.get(state.jobEndpoint)
.then(({ data }) => dispatch('receiveJobSuccess', data))
.catch(() => dispatch('receiveJobError'));
}
Visibility.change(() => {
if (!Visibility.hidden()) {
dispatch('restartPolling');
} else {
dispatch('stopPolling');
}
});
};
export const receiveJobSuccess = ({ commit }, data) => commit(types.RECEIVE_JOB_SUCCESS, data);
export const receiveJobError = ({ commit }) => {
commit(types.RECEIVE_JOB_ERROR);
flash(__('An error occurred while fetching the job.'));
};
/**
* Job's Trace
*/
export const scrollTop = ({ commit }) => {
commit(types.SCROLL_TO_TOP);
window.scrollTo({ top: 0 });
};
export const scrollBottom = ({ commit }) => {
commit(types.SCROLL_TO_BOTTOM);
window.scrollTo({ top: document.height });
};
export const requestTrace = ({ commit }) => commit(types.REQUEST_TRACE);
let traceTimeout;
export const fetchTrace = ({ dispatch, state }) => {
dispatch('requestTrace');
axios
.get(`${state.traceEndpoint}/trace.json`, {
params: { state: state.traceState },
})
.then(({ data }) => {
if (!state.fetchingStatusFavicon) {
dispatch('fetchFavicon');
}
dispatch('receiveTraceSuccess', data);
if (!data.complete) {
traceTimeout = setTimeout(() => {
dispatch('fetchTrace');
}, 4000);
} else {
dispatch('stopPollingTrace');
}
})
.catch(() => dispatch('receiveTraceError'));
};
export const stopPollingTrace = ({ commit }) => {
commit(types.STOP_POLLING_TRACE);
clearTimeout(traceTimeout);
};
export const receiveTraceSuccess = ({ commit }, log) => commit(types.RECEIVE_TRACE_SUCCESS, log);
export const receiveTraceError = ({ commit }) => {
commit(types.RECEIVE_TRACE_ERROR);
clearTimeout(traceTimeout);
flash(__('An error occurred while fetching the job log.'));
};
export const fetchFavicon = ({ state, dispatch }) => {
dispatch('requestStatusFavicon');
setCiStatusFavicon(`${state.pagePath}/status.json`)
.then(() => dispatch('receiveStatusFaviconSuccess'))
.catch(() => dispatch('requestStatusFaviconError'));
};
export const requestStatusFavicon = ({ commit }) => commit(types.REQUEST_STATUS_FAVICON);
export const receiveStatusFaviconSuccess = ({ commit }) =>
commit(types.RECEIVE_STATUS_FAVICON_SUCCESS);
export const requestStatusFaviconError = ({ commit }) => commit(types.RECEIVE_STATUS_FAVICON_ERROR);
/**
* Stages dropdown on sidebar
*/
export const requestStages = ({ commit }) => commit(types.REQUEST_STAGES);
export const fetchStages = ({ state, dispatch }) => {
dispatch('requestStages');
axios
.get(state.stagesEndpoint)
.then(({ data }) => dispatch('receiveStagesSuccess', data))
.catch(() => dispatch('receiveStagesError'));
};
export const receiveStagesSuccess = ({ commit }, data) =>
commit(types.RECEIVE_STAGES_SUCCESS, data);
export const receiveStagesError = ({ commit }) => {
commit(types.RECEIVE_STAGES_ERROR);
flash(__('An error occurred while fetching stages.'));
};
/**
* Jobs list on sidebar - depend on stages dropdown
*/
export const requestJobsForStage = ({ commit }) => commit(types.REQUEST_JOBS_FOR_STAGE);
export const setSelectedStage = ({ commit }, stage) => commit(types.SET_SELECTED_STAGE, stage);
// On stage click, set selected stage + fetch job
export const fetchJobsForStage = ({ state, dispatch }, stage) => {
dispatch('setSelectedStage', stage);
dispatch('requestJobsForStage');
axios
.get(state.stageJobsEndpoint)
.then(({ data }) => dispatch('receiveJobsForStageSuccess', data))
.catch(() => dispatch('receiveJobsForStageError'));
};
export const receiveJobsForStageSuccess = ({ commit }, data) =>
commit(types.RECEIVE_JOBS_FOR_STAGE_SUCCESS, data);
export const receiveJobsForStageError = ({ commit }) => {
commit(types.RECEIVE_JOBS_FOR_STAGE_ERROR);
flash(__('An error occurred while fetching the jobs.'));
};
// prevent babel-plugin-rewire from generating an invalid default during karma tests
export default () => {};
import Vue from 'vue';
import Vuex from 'vuex';
import state from './state';
import * as actions from './actions';
import mutations from './mutations';
Vue.use(Vuex);
export default () => new Vuex.Store({
actions,
mutations,
state: state(),
});
export const SET_JOB_ENDPOINT = 'SET_JOB_ENDPOINT';
export const SET_TRACE_ENDPOINT = 'SET_TRACE_ENDPOINT';
export const SET_STAGES_ENDPOINT = 'SET_STAGES_ENDPOINT';
export const SET_JOBS_ENDPOINT = 'SET_JOBS_ENDPOINT';
export const SCROLL_TO_TOP = 'SCROLL_TO_TOP';
export const SCROLL_TO_BOTTOM = 'SCROLL_TO_BOTTOM';
export const REQUEST_JOB = 'REQUEST_JOB';
export const RECEIVE_JOB_SUCCESS = 'RECEIVE_JOB_SUCCESS';
export const RECEIVE_JOB_ERROR = 'RECEIVE_JOB_ERROR';
export const REQUEST_TRACE = 'REQUEST_TRACE';
export const STOP_POLLING_TRACE = 'STOP_POLLING_TRACE';
export const RECEIVE_TRACE_SUCCESS = 'RECEIVE_TRACE_SUCCESS';
export const RECEIVE_TRACE_ERROR = 'RECEIVE_TRACE_ERROR';
export const REQUEST_STATUS_FAVICON = 'REQUEST_STATUS_FAVICON';
export const RECEIVE_STATUS_FAVICON_SUCCESS = 'RECEIVE_STATUS_FAVICON_SUCCESS';
export const RECEIVE_STATUS_FAVICON_ERROR = 'RECEIVE_STATUS_FAVICON_ERROR';
export const REQUEST_STAGES = 'REQUEST_STAGES';
export const RECEIVE_STAGES_SUCCESS = 'RECEIVE_STAGES_SUCCESS';
export const RECEIVE_STAGES_ERROR = 'RECEIVE_STAGES_ERROR';
export const SET_SELECTED_STAGE = 'SET_SELECTED_STAGE';
export const REQUEST_JOBS_FOR_STAGE = 'REQUEST_JOBS_FOR_STAGE';
export const RECEIVE_JOBS_FOR_STAGE_SUCCESS = 'RECEIVE_JOBS_FOR_STAGE_SUCCESS';
export const RECEIVE_JOBS_FOR_STAGE_ERROR = 'RECEIVE_JOBS_FOR_STAGE_ERROR';
/* eslint-disable no-param-reassign */
import * as types from './mutation_types';
export default {
[types.REQUEST_STATUS_FAVICON](state) {
state.fetchingStatusFavicon = true;
},
[types.RECEIVE_STATUS_FAVICON_SUCCESS](state) {
state.fetchingStatusFavicon = false;
},
[types.RECEIVE_STATUS_FAVICON_ERROR](state) {
state.fetchingStatusFavicon = false;
},
[types.RECEIVE_TRACE_SUCCESS](state, log) {
if (log.state) {
state.traceState = log.state;
}
if (log.append) {
state.trace += log.html;
state.traceSize += log.size;
} else {
state.trace = log.html;
state.traceSize = log.size;
}
if (state.traceSize < log.total) {
state.isTraceSizeVisible = true;
} else {
state.isTraceSizeVisible = false;
}
state.isTraceComplete = log.complete;
state.hasTraceError = false;
},
[types.STOP_POLLING_TRACE](state) {
state.isTraceComplete = true;
},
// todo_fl: check this.
[types.RECEIVE_TRACE_ERROR](state) {
state.isLoadingTrace = false;
state.isTraceComplete = true;
state.hasTraceError = true;
},
[types.REQUEST_JOB](state) {
state.isLoading = true;
},
[types.RECEIVE_JOB_SUCCESS](state, job) {
state.isLoading = false;
state.hasError = false;
state.job = job;
},
[types.RECEIVE_JOB_ERROR](state) {
state.isLoading = false;
state.hasError = true;
state.job = {};
},
[types.SCROLL_TO_TOP](state) {
state.isTraceScrolledToBottom = false;
state.hasBeenScrolled = true;
},
[types.SCROLL_TO_BOTTOM](state) {
state.isTraceScrolledToBottom = true;
state.hasBeenScrolled = true;
},
[types.REQUEST_STAGES](state) {
state.isLoadingStages = true;
},
[types.RECEIVE_STAGES_SUCCESS](state, stages) {
state.isLoadingStages = false;
state.stages = stages;
},
[types.RECEIVE_STAGES_ERROR](state) {
state.isLoadingStages = false;
state.stages = [];
},
[types.REQUEST_JOBS_FOR_STAGE](state) {
state.isLoadingJobs = true;
},
[types.RECEIVE_JOBS_FOR_STAGE_SUCCESS](state, jobs) {
state.isLoadingJobs = false;
state.jobs = jobs;
},
[types.RECEIVE_JOBS_FOR_STAGE_ERROR](state) {
state.isLoadingJobs = false;
state.jobs = [];
},
};
export default () => ({
jobEndpoint: null,
traceEndpoint: null,
// dropdown options
stagesEndpoint: null,
// list of jobs on sidebard
stageJobsEndpoint: null,
// job log
isLoading: false,
hasError: false,
job: {},
// trace
isLoadingTrace: false,
hasTraceError: false,
trace: '',
isTraceScrolledToBottom: false,
hasBeenScrolled: false,
isTraceComplete: false,
traceSize: 0, // todo_fl: needs to be converted into human readable format in components
isTraceSizeVisible: false,
fetchingStatusFavicon: false,
// used as a query parameter
traceState: null,
// used to check if we need to redirect the user - todo_fl: check if actually needed
traceStatus: null,
// sidebar dropdown
isLoadingStages: false,
isLoadingJobs: false,
selectedStage: null,
stages: [],
jobs: [],
});
......@@ -512,7 +512,10 @@ export const setCiStatusFavicon = pageUrl =>
}
return resetFavicon();
})
.catch(resetFavicon);
.catch((error) => {
resetFavicon();
throw error;
});
export const spriteIcon = (icon, className = '') => {
const classAttribute = className.length > 0 ? `class="${className}"` : '';
......
......@@ -127,7 +127,7 @@ class Projects::BlobController < Projects::ApplicationController
add_match_line
render json: @lines
render json: DiffLineSerializer.new.represent(@lines)
end
def add_match_line
......
......@@ -17,6 +17,11 @@ class Projects::TagsController < Projects::ApplicationController
tag_names = @tags.map(&:name)
@tags_pipelines = @project.pipelines.latest_successful_for_refs(tag_names)
@releases = project.releases.where(tag: tag_names)
respond_to do |format|
format.html
format.atom { render layout: 'xml.atom' }
end
end
def show
......
......@@ -68,7 +68,7 @@ module ButtonHelper
def http_dropdown_description(protocol)
if current_user.try(:require_password_creation_for_git?)
_("Set a password on your account to pull or push via %{protocol}.") % { protocol: protocol }
else
elsif current_user.try(:require_personal_access_token_creation_for_git_auth?)
_("Create a personal access token on your account to pull or push via %{protocol}.") % { protocol: protocol }
end
end
......
......@@ -57,7 +57,7 @@ module NamespacesHelper
# group if one exists by that name to prevent duplicates.
def dedup_extra_group(extra_group)
unless extra_group.persisted?
existing_group = Group.find_by(name: extra_group.name)
existing_group = Group.find_by(path: extra_group.path)
extra_group = existing_group if existing_group&.persisted?
end
......
......@@ -135,12 +135,12 @@ class DiffFileEntity < Grape::Entity
end
# Used for inline diffs
expose :highlighted_diff_lines, if: -> (diff_file, _) { diff_file.text? } do |diff_file|
expose :highlighted_diff_lines, using: DiffLineEntity, if: -> (diff_file, _) { diff_file.text? } do |diff_file|
diff_file.diff_lines_for_serializer
end
# Used for parallel diffs
expose :parallel_diff_lines, if: -> (diff_file, _) { diff_file.text? }
expose :parallel_diff_lines, using: DiffLineParallelEntity, if: -> (diff_file, _) { diff_file.text? }
def current_user
request.current_user
......
# frozen_string_literal: true
class DiffLineEntity < Grape::Entity
expose :line_code
expose :type
expose :old_line
expose :new_line
expose :text
expose :meta_positions, as: :meta_data
expose :rich_text do |line|
line.rich_text || CGI.escapeHTML(line.text)
end
end
# frozen_string_literal: true
class DiffLineParallelEntity < Grape::Entity
expose :left, using: DiffLineEntity
expose :right, using: DiffLineEntity
end
# frozen_string_literal: true
class DiffLineSerializer < BaseSerializer
entity DiffLineEntity
end
......@@ -43,7 +43,7 @@ class DiscussionEntity < Grape::Entity
project_merge_request_discussion_path(discussion.project, discussion.noteable, discussion)
end
expose :truncated_diff_lines, if: -> (d, _) { d.diff_discussion? && d.on_text? && (d.expanded? || render_truncated_diff_lines?) }
expose :truncated_diff_lines, using: DiffLineEntity, if: -> (d, _) { d.diff_discussion? && d.on_text? && (d.expanded? || render_truncated_diff_lines?) }
expose :image_diff_html, if: -> (d, _) { d.diff_discussion? && d.on_image? } do |discussion|
diff_file = discussion.diff_file
......
commit = @repository.commit(tag.dereferenced_target)
release = @releases.find { |r| r.tag == tag.name }
tag_url = project_tag_url(@project, tag.name)
if commit
xml.entry do
xml.id tag_url
xml.link href: tag_url
xml.title truncate(tag.name, length: 80)
xml.summary strip_gpg_signature(tag.message)
xml.content markdown_field(release, :description), type: 'html'
xml.updated release.updated_at.xmlschema if release
xml.media :thumbnail, width: '40', height: '40', url: image_url(avatar_icon_for_email(commit.author_email))
xml.author do |author|
xml.name commit.author_name
xml.email commit.author_email
end
end
end
xml.title "#{@project.name} tags"
xml.link href: project_tags_url(@project, @ref, rss_url_options), rel: 'self', type: 'application/atom+xml'
xml.link href: project_tags_url(@project, @ref), rel: 'alternate', type: 'text/html'
xml.id project_tags_url(@project, @ref)
xml.updated @releases.first.updated_at.xmlschema if @releases.any?
xml << render(partial: 'tag', collection: @tags) if @tags.any?
- @no_container = true
- @sort ||= sort_value_recently_updated
- page_title s_('TagsPage|Tags')
= content_for :meta_tags do
= auto_discovery_link_tag(:atom, project_tags_url(@project, rss_url_options), title: "#{@project.name} tags")
.flex-list{ class: container_class }
.top-area.adjust
......@@ -25,6 +27,8 @@
- if can?(current_user, :push_code, @project)
= link_to new_project_tag_path(@project), class: 'btn btn-create new-tag-btn' do
= s_('TagsPage|New tag')
= link_to project_tags_path(@project, rss_url_options), title: _("Tags feed"), class: 'btn rss-btn has-tooltip' do
= icon("rss")
= render_if_exists 'projects/commits/mirror_status'
......
---
title: Use sample data for push event when no commits created
merge_request: 21440
author: Takuya Noguchi
type: fixed
---
title: 'Auto-DevOps.gitlab-ci.yml: fix redeploying deleted app gives helm error'
merge_request: 21429
author:
type: fixed
---
title: Run review-docs-cleanup job for gitlab-org repos only
merge_request: 21463
author: Takuya Noguchi
type: other
---
title: Hide PAT creation advice for HTTP clone if PAT exists
merge_request: 18208
author: George Thomas @thegeorgeous
type: fixed
---
title: Added atom feed for tags
merge_request: 21428
author:
type: added
---
title: Bump GitLab Pages to v1.1.0
merge_request: 21419
author:
type: fixed
---
title: Bump unauthenticated session time from 1 hour to 2 hours
merge_request: 21453
author:
type: other
---
title: Disable the Sidekiq Admin Rack session
merge_request: 21441
author:
type: security
---
title: Fix importers not assigning a new default group
merge_request: 21456
author:
type: fixed
......@@ -570,3 +570,10 @@
:why: https://github.com/codesandbox-app/codesandbox-importers/blob/master/packages/import-utils/LICENSE
:versions: []
:when: 2018-08-03 12:23:24.083046000 Z
- - :ignore_group
- devDependencies
- :who: Winnie Hellmann
:why: NPM packages used for development are not distributed with the final product and are therefore
exempt.
:versions: []
:when: 2018-08-30 12:06:35.668181000 Z
......@@ -148,7 +148,7 @@ Settings.gitlab['default_projects_features'] ||= {}
Settings.gitlab['webhook_timeout'] ||= 10
Settings.gitlab['max_attachment_size'] ||= 10
Settings.gitlab['session_expire_delay'] ||= 10080
Settings.gitlab['unauthenticated_session_expire_delay'] ||= 1.hour.to_i
Settings.gitlab['unauthenticated_session_expire_delay'] ||= 2.hours.to_i
Settings.gitlab['mirror_max_delay'] ||= 300
Settings.gitlab['mirror_max_capacity'] ||= 30
Settings.gitlab['mirror_capacity_threshold'] ||= 15
......
require 'sidekiq/web'
# Disable the Sidekiq Rack session since GitLab already has its own session store.
# CSRF protection still works (https://github.com/mperham/sidekiq/commit/315504e766c4fd88a29b7772169060afc4c40329).
Sidekiq::Web.set :sessions, false
# Custom Queues configuration
queues_config_hash = Gitlab::Redis::Queues.params
queues_config_hash[:namespace] = Gitlab::Redis::Queues::SIDEKIQ_NAMESPACE
......
......@@ -149,7 +149,7 @@ listen_addr = '0.0.0.0:8075'
[auth]
token = 'abc123secret'
[[storage]
[[storage]]
name = 'default'
path = '/mnt/gitlab/default/repositories'
......
......@@ -47,7 +47,7 @@ there because this will also affect performance. We recommend that the log files
stored on a local volume.
For more details on another person's experience with EFS, see
[Amazon's Elastic File System: Burst Credits](https://www.rawkode.io/2017/04/amazons-elastic-file-system-burst-credits/)
[Amazon's Elastic File System: Burst Credits](https://rawkode.com/2017/04/16/amazons-elastic-file-system-burst-credits/)
## NFS Client mount options
......
......@@ -190,7 +190,7 @@ _The artifacts are stored by default in
remote_directory: "artifacts" # The bucket name
connection:
provider: AWS # Only AWS supported at the moment
aws_access_key_id: AWS_ACESS_KEY_ID
aws_access_key_id: AWS_ACCESS_KEY_ID
aws_secret_access_key: AWS_SECRET_ACCESS_KEY
region: eu-central-1
```
......
......@@ -11,6 +11,7 @@ description: 'Learn how to administer GitLab Pages.'
- This guide is for Omnibus GitLab installations. If you have installed
GitLab from source, follow the [Pages source installation document](source.md).
- To learn how to use GitLab Pages, read the [user documentation][pages-userguide].
- Does NOT support subgroups. See [this issue](https://gitlab.com/gitlab-org/gitlab-ce/issues/30548) for more information and status.
This document describes how to set up the _latest_ GitLab Pages feature. Make
sure to read the [changelog](#changelog) if you are upgrading to a new GitLab
......@@ -73,8 +74,8 @@ among other things.
Follow [these instructions](https://publicsuffix.org/submit/) to submit your
GitLab Pages subdomain. For instance, if your domain is `example.io`, you should
request that `*.example.io` is added to the Public Suffix List. GitLab.com
added `*.gitlab.io` [in 2016](https://gitlab.com/gitlab-com/infrastructure/issues/230).
request that `example.io` is added to the Public Suffix List. GitLab.com
added `gitlab.io` [in 2016](https://gitlab.com/gitlab-com/infrastructure/issues/230).
### DNS configuration
......
......@@ -82,7 +82,7 @@ You can filter by [custom attributes](custom_attributes.md) with:
GET /groups?custom_attributes[key]=value&custom_attributes[other_key]=other_value
```
## List a groups's subgroups
## List a group's subgroups
> [Introduced][ce-15142] in GitLab 10.3.
......
......@@ -731,7 +731,7 @@ PUT /projects/:id
| Attribute | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `id` | integer/string | yes | The ID or [URL-encoded path of the project](README.md#namespaced-path-encoding) |
| `name` | string | yes | The name of the project |
| `name` | string | no | The name of the project |
| `path` | string | no | Custom repository name for the project. By default generated based on name |
| `default_branch` | string | no | `master` by default |
| `description` | string | no | Short project description |
......
......@@ -174,8 +174,19 @@ Parameters:
- `id` (required) - The ID or [URL-encoded path of the project](README.md#namespaced-path-encoding) owned by the authenticated user
- `tag_name` (required) - The name of a tag
Request body:
- `description` (required) - Release notes with markdown support
```json
{
"description": "Amazing release. Wow"
}
```
Response:
```json
{
"tag_name": "1.0.0",
......@@ -195,8 +206,19 @@ Parameters:
- `id` (required) - The ID or [URL-encoded path of the project](README.md#namespaced-path-encoding) owned by the authenticated user
- `tag_name` (required) - The name of a tag
Request body:
- `description` (required) - Release notes with markdown support
```json
{
"description": "Amazing release. Wow"
}
```
Response:
```json
{
"tag_name": "1.0.0",
......
......@@ -87,7 +87,7 @@ you can use the same key for all of them:
```yaml
cache:
key: one-key-to-rull-them-all
key: one-key-to-rule-them-all
```
To share the same cache between branches, but separate them by job:
......@@ -489,7 +489,15 @@ needed to compile the project:
Artifacts were designed to upload some compiled/generated bits of the build,
and they can be fetched by any number of concurrent Runners. They are
guaranteed to be available and are there to pass data between jobs. They are
also exposed to be downloaded from the UI.
also exposed to be downloaded from the UI. **Artifacts can only exist in
directories relative to the build directory** and specifying paths which don't
comply to this rule trigger an unintuitive and unlogical error message (an
enhancement is discussed at
https://gitlab.com/gitlab-org/gitlab-ce/issues/15530). Artifacts need to be
uploaded to the GitLab instance (not only the GitLab runner) before the next
stage job(s) can start, so you need to evaluate carefully whether your
bandwidth allows you to profit from parallelization with stages and shared
artifacts before investing time in changes to the setup.
It's sometimes confusing because the name artifact sounds like something that
is only useful outside of the job, like for downloading a final image. But
......
......@@ -100,3 +100,42 @@ golang:
reports:
junit: report.xml
```
### Java examples
There are a few tools that can produce JUnit reports in Java.
#### Gradle
In the following example, `gradle` is used to generate the test reports.
If there are multiple test tasks defined, `gradle` will generate multiple
directories under `build/test-results/`. In that case, you can leverage regex
matching by defining the following path: `build/test-results/test/TEST-*.xml`:
```yaml
java:
stage: test
script:
- gradle test
artifacts:
reports:
junit: build/test-results/test/TEST-*.xml
```
#### Maven
For parsing [Surefire](https://maven.apache.org/surefire/maven-surefire-plugin/)
and [Failsafe](https://maven.apache.org/surefire/maven-failsafe-plugin/) test
reports, use the following job in `.gitlab-ci.yml`:
```yaml
java:
stage: test
script:
- mvn verify
artifacts:
reports:
junit:
- target/surefire-reports/TEST-*.xml
- target/failsafe-reports/TEST-*.xml
```
......@@ -390,6 +390,28 @@ job:
The specification above, will make sure that `job` is built by a Runner that
has both `ruby` AND `postgres` tags defined.
Tags are also a great way to run different jobs on different platforms, for
example, given an OS X Runner with tag `osx` and Windows Runner with tag
`windows`, the following jobs run on respective platforms:
```yaml
windows job:
stage:
- build
tags:
- windows
script:
- echo Hello, %USERNAME%!
osx job:
stage:
- build
tags:
- osx
script:
- echo "Hello, $USER!"
```
## `allow_failure`
`allow_failure` is used when you want to allow a job to fail without impacting
......
......@@ -100,7 +100,7 @@ If a gem uses a license which is not listed above, open an issue and ask. If a l
Keep in mind that each license has its own restrictions (typically defined in their body text). Please make sure to comply with those restrictions at all times whenever an external library is used.
Gems which are included only in the "development" or "test" groups by Bundler are exempt from license requirements, as they're not distributed for use in production.
Dependencies which are only used in development or test environment are exempt from license requirements, as they're not distributed for use in production.
**NOTE:** This document is **not** legal advice, nor is it comprehensive. It should not be taken as such.
......
......@@ -52,7 +52,7 @@ In order to deploy GitLab on Kubernetes, the following are required:
To deploy GitLab, the following three parameters are required:
- `global.hosts.domain`: the [base domain](preparation/networking.md) of the
wildcard host entry. For example, `exampe.com` if the wild card entry is
wildcard host entry. For example, `example.com` if the wild card entry is
`*.example.com`.
- `global.hosts.externalIP`: the [external IP](preparation/networking.md) which
the wildcard DNS resolves to.
......@@ -123,6 +123,8 @@ To deploy the Community Edition, include these options in your `helm install` co
--set gitlab.migrations.image.repository=registry.gitlab.com/gitlab-org/build/cng/gitlab-rails-ce
--set gitlab.sidekiq.image.repository=registry.gitlab.com/gitlab-org/build/cng/gitlab-sidekiq-ce
--set gitlab.unicorn.image.repository=registry.gitlab.com/gitlab-org/build/cng/gitlab-unicorn-ce
--set gitlab.unicorn.workhorse.image=registry.gitlab.com/gitlab-org/build/cng/gitlab-workhorse-ce
--set gitlab.task-runner.image.repository=registry.gitlab.com/gitlab-org/build/cng/gitlab-task-runner-ce
```
## Updating GitLab using the Helm Chart
......
......@@ -101,7 +101,7 @@ Other common configuration options:
- `provider`: Optimizes the deployment for a cloud provider. The default is `gke` for [Google Kubernetes Engine](https://cloud.google.com/kubernetes-engine/), with `acs` also supported for the [Azure Container Service](https://azure.microsoft.com/en-us/services/container-service/).
For additional configuration options, consult the
[`values.yaml`](https://gitlab.com/charts/charts.gitlab.io/blob/master/charts/gitlab-omnibus/values.yaml).
[`values.yaml`](https://gitlab.com/charts/gitlab-omnibus/blob/master/values.yaml).
### Choosing a different GitLab release version
......@@ -228,7 +228,7 @@ helm upgrade gitlab --set gitlab=ee,gitlabEEImage=gitlab/gitlab-ee:9.5.5-ee.0 gi
To uninstall the GitLab Chart, run the following:
```bash
helm delete gitlab
helm delete --purge gitlab
```
## Troubleshooting
......
......@@ -124,7 +124,7 @@ To use the `copy` strategy instead of the default streaming strategy, specify
### Excluding specific directories from the backup
You can choose what should be backed up by adding the environment variable `SKIP`.
You can choose what should be exempt from the backup up by adding the environment variable `SKIP`.
The available options are:
- `db` (database)
......@@ -138,6 +138,9 @@ The available options are:
Use a comma to specify several options at the same time:
All wikis will be backed up as part of the `repositories` group. Non-existent wikis
will be skipped during a backup.
```
# use this command if you've installed GitLab with the Omnibus package
sudo gitlab-rake gitlab:backup:create SKIP=db,uploads
......
......@@ -10,6 +10,7 @@ comments: false
- [Webhooks and insecure internal web services](webhooks.md)
- [Information exclusivity](information_exclusivity.md)
- [Reset your root password](reset_root_password.md)
- [Unlock a locked user](unlock_user.md)
- [User File Uploads](user_file_uploads.md)
- [How we manage the CRIME vulnerability](crime_vulnerability.md)
- [Enforce Two-factor authentication](two_factor_authentication.md)
......
# How to unlock a locked user
Log into your server with root privileges. Then start a Ruby on Rails console.
Start the console with this command:
```bash
gitlab-rails console production
```
Wait until the console has loaded.
There are multiple ways to find your user. You can search for email or username.
```bash
user = User.where(id: 1).first
```
or
```bash
user = User.find_by(email: 'admin@local.host')
```
Unlock the user:
```bash
user.unlock_access!
```
Exit the console, the user should now be able to log in again.
......@@ -4,4 +4,4 @@ Gitlab admin can enable email confirmation on sign-up, if you want to confirm al
user emails before they are able to sign-in.
In the Admin area under **Settings** (`/admin/application_settings`), go to section
**Sign-in Restrictions** and look for **Send confirmation email on sign-up** option.
**Sign-up Restrictions** and look for **Send confirmation email on sign-up** option.
......@@ -48,9 +48,11 @@ Note that Public SSH key may also be named as follows:
**Git Bash on Windows / GNU/Linux / macOS:**
```bash
ssh-keygen -t rsa -C "your.email@example.com" -b 4096
ssh-keygen -o -t rsa -C "your.email@example.com" -b 4096
```
(Note: the `-o` option was introduced in 2014; if this command does not work for you, simply remove the `-o` option and try again)
**Windows:**
Alternatively on Windows you can download
......@@ -75,7 +77,9 @@ Note that Public SSH key may also be named as follows:
NOTE: **Note:**
If you want to change the password of your SSH key pair, you can use
`ssh-keygen -p <keyname>`.
`ssh-keygen -p -o -f <keyname>`.
The `-o` option was added in 2014, so if this command does not work for you,
simply remove the `-o` option and try again.
## Adding a SSH key to your GitLab account
......
......@@ -574,13 +574,13 @@ postgres://user:password@postgres-host:postgres-port/postgres-database
### Environment variables
The following variables can be used for setting up the Auto DevOps domain,
providing a custom Helm chart, or scaling your application. PostgreSQL can be
providing a custom Helm chart, or scaling your application. PostgreSQL can
also be customized, and you can easily use a [custom buildpack](#custom-buildpacks).
| **Variable** | **Description** |
| ------------ | --------------- |
| `AUTO_DEVOPS_DOMAIN` | The [Auto DevOps domain](#auto-devops-domain); by default set automatically by the [Auto DevOps setting](#enabling-auto-devops). |
| `AUTO_DEVOPS_CHART` | The Helm Chart used to deploy your apps; defaults to the one [provided by GitLab](https://gitlab.com/charts/charts.gitlab.io/tree/master/charts/auto-deploy-app). |
| `AUTO_DEVOPS_CHART` | The Helm Chart used to deploy your apps; defaults to the one [provided by GitLab](https://gitlab.com/charts/auto-deploy-app). |
| `REPLICAS` | The number of replicas to deploy; defaults to 1. |
| `PRODUCTION_REPLICAS` | The number of replicas to deploy in the production environment. This takes precedence over `REPLICAS`; defaults to 1. |
| `CANARY_REPLICAS` | The number of canary replicas to deploy for [Canary Deployments](https://docs.gitlab.com/ee/user/project/canary_deployments.html); defaults to 1 |
......
......@@ -143,7 +143,7 @@ In the next section we'll break down the pipeline and explain what each job does
By now you should see the pipeline running, but what is it running exactly?
To navigate inside the pipeline, click its status badge. (It's status should be "running").
To navigate inside the pipeline, click its status badge. (Its status should be "running").
The pipeline is split into 4 stages, each running a couple of jobs.
![Pipeline stages](img/guide_pipeline_stages.png)
......@@ -194,7 +194,7 @@ applications. In the rightmost column for the production environment, you can ma
- The first icon will open the URL of the application that is deployed in
production. It's a very simple page, but the important part is that it works!
- The next icon with the small graph will take you to the metrics page where
- The next icon, with the small graph, will take you to the metrics page where
Prometheus collects data about the Kubernetes cluster and how the application
affects it (in terms of memory/CPU usage, latency, etc.).
......@@ -217,7 +217,7 @@ under **Settings > CI/CD > Variables**.
### Working with branches
Following the [GitLab flow](../../workflow/gitlab_flow.md#working-with-feature-branches)
Following the [GitLab flow](../../workflow/gitlab_flow.md#working-with-feature-branches),
let's create a feature branch that will add some content to the application.
Under your repository, navigate to the following file: `app/views/welcome/index.html.erb`.
......@@ -235,7 +235,7 @@ by clicking **Commit**.
![Web IDE commit](img/guide_ide_commit.png)
Once you submit the merge request, you'll see the pipeline running. This will
run all the jobs as [described previously](#deploying-the-application), as well
run all the jobs as [described previously](#deploying-the-application), as well as
a few more that run only on branches other than `master`.
![Merge request](img/guide_merge_request.png)
......@@ -278,7 +278,7 @@ and the application will be eventually deployed straight to production.
After implementing this project, you should now have a solid understanding of the basics of Auto DevOps.
We started from building and testing to deploying and monitoring an application
all within GitLab. Despite its automatic nature, Audo DevOps can also be configured
all within GitLab. Despite its automatic nature, Auto DevOps can also be configured
and customized to fit your workflow. Here are some helpful resources for further reading:
1. [Auto DevOps](index.md)
......
......@@ -8,7 +8,7 @@ between the two, for more information consult your favorite search engine.
There are two approaches to SVN to Git migration:
1. [Git/SVN Mirror](#smooth-migration-with-a-gitsvn-mirror-using-subgit) which:
1. [Git/SVN Mirror](#smooth-migration-with-a-git-svn-mirror-using-subgit) which:
- Makes the GitLab repository to mirror the SVN project.
- Git and SVN repositories are kept in sync; you can use either one.
- Smoothens the migration process and allows to manage migration risks.
......
......@@ -26,8 +26,10 @@ used:
```
Note that `%{issue_ref}` is a complex regular expression defined inside GitLab's
source code that can match a reference to 1) a local issue (`#123`),
2) a cross-project issue (`group/project#123`) or 3) a link to an issue
source code that can match references to:
1. a local issue (`#123`),
2. a cross-project issue (`group/project#123`)
3. a link to an issue
(`https://gitlab.example.com/group/project/issues/123`).
---
......
......@@ -94,7 +94,7 @@ where you'll find its default URL.
>
> - GitLab Pages [supports any SSG](https://about.gitlab.com/2016/06/17/ssg-overview-gitlab-pages-part-3-examples-ci/), but,
if you don't find yours among the templates, you'll need
to configure your own `.gitlab-ci.yml`. Do do that, please
to configure your own `.gitlab-ci.yml`. To do that, please
read through the article [Creating and Tweaking GitLab CI/CD for GitLab Pages](getting_started_part_four.md). New SSGs are very welcome among
the [example projects](https://gitlab.com/pages). If you set
up a new one, please
......
......@@ -56,6 +56,8 @@ started:
gpg --full-gen-key
```
_NOTE: In some cases like Gpg4win on Windows and other Mac OS versions the command here may be ` gpg --gen-key`_
This will spawn a series of questions.
1. The first question is which algorithm can be used. Select the kind you want
......
......@@ -24,7 +24,7 @@ There are various configuration options to help GitLab server administrators:
In `/etc/gitlab/gitlab.rb`:
```ruby
# Change to true to enable lfs
# Change to true to enable lfs - enabled by default if not defined
gitlab_rails['lfs_enabled'] = false
# Optionally, change the storage path location. Defaults to
......
......@@ -9,6 +9,7 @@ Uncomment and customize if you want to change the default time zone of GitLab ap
To see all available time zones, run `bundle exec rake time:zones:all`.
With Omnibus installations, run `gitlab-rake time:zones:all`.
## Changing time zone in omnibus installations
......
......@@ -97,11 +97,15 @@ module Gitlab
}
end
# This method provide a sample data generated with
# This method provides a sample data generated with
# existing project and commits to test webhooks
def build_sample(project, user)
# Use sample data if repo has no commit
# (expect the case of test service configuration settings)
return sample_data if project.empty_repo?
ref = "#{Gitlab::Git::BRANCH_REF_PREFIX}#{project.default_branch}"
commits = project.repository.commits(project.default_branch.to_s, limit: 3) rescue []
commits = project.repository.commits(project.default_branch.to_s, limit: 3)
build(project, user, commits.last&.id, commits.first&.id, ref, commits)
end
......
......@@ -79,16 +79,10 @@ module Gitlab
}
end
# We have to keep this here since it is still used for conflict resolution
# Conflict::File#as_json renders json diff lines in sections
def as_json(opts = nil)
{
line_code: line_code,
type: type,
old_line: old_line,
new_line: new_line,
text: text,
rich_text: rich_text || CGI.escapeHTML(text),
meta_data: meta_positions
}
DiffLineSerializer.new.represent(self)
end
private
......
......@@ -612,6 +612,18 @@ msgstr ""
msgid "An error occurred while fetching sidebar data"
msgstr ""
msgid "An error occurred while fetching stages."
msgstr ""
msgid "An error occurred while fetching the job log."
msgstr ""
msgid "An error occurred while fetching the job."
msgstr ""
msgid "An error occurred while fetching the jobs."
msgstr ""
msgid "An error occurred while fetching the pipeline."
msgstr ""
......@@ -6866,6 +6878,9 @@ msgstr[1] ""
msgid "Tags"
msgstr ""
msgid "Tags feed"
msgstr ""
msgid "Tags:"
msgstr ""
......
......@@ -94,6 +94,7 @@ module QA
autoload :LDAP, 'qa/scenario/test/integration/ldap'
autoload :Kubernetes, 'qa/scenario/test/integration/kubernetes'
autoload :Mattermost, 'qa/scenario/test/integration/mattermost'
autoload :ObjectStorage, 'qa/scenario/test/integration/object_storage'
end
module Sanity
......
......@@ -7,7 +7,7 @@ module QA
# including staging and on-premises installation.
#
class Mattermost < Test::Instance::All
tags :core, :mattermost
tags :mattermost
def perform(address, mattermost, *rspec_options)
Runtime::Scenario.define(:mattermost_address, mattermost)
......
# frozen_string_literal: true
module QA
module Scenario
module Test
module Integration
class ObjectStorage < Test::Instance
tags :object_storage
end
end
end
end
end
......@@ -5,18 +5,46 @@ module QA
describe 'Issue creation' do
let(:issue_title) { 'issue title' }
it 'user creates an issue' do
def create_issue
Runtime::Browser.visit(:gitlab, Page::Main::Login)
Page::Main::Login.act { sign_in_using_credentials }
Factory::Resource::Issue.fabricate! do |issue|
issue.title = issue_title
end
end
it 'user creates an issue' do
create_issue
Page::Menu::Side.act { click_issues }
expect(page).to have_content(issue_title)
end
context 'when using attachments in comments', :object_storage do
let(:file_to_attach) do
File.absolute_path(File.join('spec', 'fixtures', 'banana_sample.gif'))
end
it 'user comments on an issue with an attachment' do
create_issue
Page::Project::Issue::Show.perform do |show|
show.comment('See attached banana for scale', attachment: file_to_attach)
show.refresh
image_url = find('a[href$="banana_sample.gif"]')[:href]
found = show.wait(reload: false) do
show.asset_exists?(image_url)
end
expect(found).to be_truthy
end
end
end
end
end
end
# frozen_string_literal: true
module QA
context :create, :core do
context :create do
describe 'Files management' do
it 'user creates, edits and deletes a file via the Web' do
Runtime::Browser.visit(:gitlab, Page::Main::Login)
......
......@@ -5,7 +5,7 @@ cd "$(dirname "$0")/.."
# Use long options (e.g. --header instead of -H) for curl examples in documentation.
echo '=> Checking for cURL short options...'
grep --extended-regexp --recursive --color=auto 'curl (.+ )?-[^- ].*' doc/ >/dev/null 2>&1
if [ $? == 0 ]
if [ $? -eq 0 ]
then
echo '✖ ERROR: Short options for curl should not be used in documentation!
Use long options (e.g., --header instead of -H):' >&2
......
# frozen_string_literal: true
require 'spec_helper'
describe 'User views tags', :feature do
context 'rss' do
shared_examples 'has access to the tags RSS feed' do
it do
visit project_tags_path(project, format: :atom)
expect(page).to have_gitlab_http_status(200)
end
end
shared_examples 'does not have access to the tags RSS feed' do
it do
visit project_tags_path(project, format: :atom)
expect(page).to have_gitlab_http_status(401)
end
end
context 'when project public' do
let(:project) { create(:project, :repository, visibility_level: Gitlab::VisibilityLevel::PUBLIC) }
context 'when user signed in' do
let(:user) { create(:user) }
before do
project.add_developer(user)
sign_in(user)
visit project_tags_path(project)
end
it_behaves_like "it has an RSS button with current_user's feed token"
it_behaves_like "an autodiscoverable RSS feed with current_user's feed token"
it_behaves_like 'has access to the tags RSS feed'
end
context 'when user signed out' do
before do
visit project_tags_path(project)
end
it_behaves_like 'it has an RSS button without a feed token'
it_behaves_like 'an autodiscoverable RSS feed without a feed token'
it_behaves_like 'has access to the tags RSS feed'
end
end
context 'when project is not public' do
let(:project) { create(:project, :repository, visibility_level: Gitlab::VisibilityLevel::PRIVATE) }
context 'when user signed in' do
let(:user) { create(:user) }
before do
project.add_developer(user)
sign_in(user)
end
it_behaves_like 'has access to the tags RSS feed'
end
context 'when user signed out' do
it_behaves_like 'does not have access to the tags RSS feed'
end
end
end
end
{
"type": "object",
"required": ["type"],
"properties": {
"line_code": { "type": ["string", "null"] },
"type": { "type": ["string", "null"] },
"old_line": { "type": ["integer", "null"] },
"new_line": { "type": ["integer", "null"] },
"text": { "type": ["string"] },
"rich_text": { "type": ["string"] },
"meta_data": { "type": ["object", "null"] }
},
"additionalProperties": false
}
{
"required" : [
"left",
"right"
],
"properties" : {
"left": { "$ref": "diff_line.json" },
"right": { "$ref": "diff_line.json" }
},
"additionalProperties": false
}
......@@ -40,12 +40,24 @@ describe ButtonHelper do
end
context 'when user has no personal access tokens' do
it 'has a personal access token text on the dropdown description ' do
it 'has a personal access token text on the dropdown description' do
description = element.search('.dropdown-menu-inner-content').first
expect(description.inner_text).to eq 'Create a personal access token on your account to pull or push via HTTP.'
end
end
context 'when user has personal access tokens' do
before do
create(:personal_access_token, user: user)
end
it 'does not have a personal access token text on the dropdown description' do
description = element.search('.dropdown-menu-inner-content').first
expect(description).to be_nil
end
end
end
context 'when user is ldap user' do
......
......@@ -50,9 +50,12 @@ describe NamespacesHelper do
end
it 'selects the new group by default' do
# Ensure we don't select a group with the same name
create(:group, name: 'new-group', path: 'another-path')
allow(helper).to receive(:current_user).and_return(user)
options = helper.namespaces_options(:extra_group, display_path: true, extra_group: build(:group, name: 'new-group'))
options = helper.namespaces_options(:extra_group, display_path: true, extra_group: build(:group, name: 'new-group', path: 'new-group'))
expect(options).to include(user_group.name)
expect(options).not_to include(admin_group.name)
......
This diff is collapsed.
import state from '~/jobs/store/state';
import mutations from '~/jobs/store/mutations';
import * as types from '~/jobs/store/mutation_types';
describe('Jobs Store Mutations', () => {
let stateCopy;
const html =
'I, [2018-08-17T22:57:45.707325 #1841] INFO -- : Writing /builds/ab89e95b0fa0b9272ea0c797b76908f24d36992630e9325273a4ce3.png<br>I';
beforeEach(() => {
stateCopy = state();
});
describe('REQUEST_STATUS_FAVICON', () => {
it('should set fetchingStatusFavicon to true', () => {
mutations[types.REQUEST_STATUS_FAVICON](stateCopy);
expect(stateCopy.fetchingStatusFavicon).toEqual(true);
});
});
describe('RECEIVE_STATUS_FAVICON_SUCCESS', () => {
it('should set fetchingStatusFavicon to false', () => {
mutations[types.RECEIVE_STATUS_FAVICON_SUCCESS](stateCopy);
expect(stateCopy.fetchingStatusFavicon).toEqual(false);
});
});
describe('RECEIVE_STATUS_FAVICON_ERROR', () => {
it('should set fetchingStatusFavicon to false', () => {
mutations[types.RECEIVE_STATUS_FAVICON_ERROR](stateCopy);
expect(stateCopy.fetchingStatusFavicon).toEqual(false);
});
});
describe('RECEIVE_TRACE_SUCCESS', () => {
describe('when trace has state', () => {
it('sets traceState', () => {
const stateLog =
'eyJvZmZzZXQiOjczNDQ1MSwibl9vcGVuX3RhZ3MiOjAsImZnX2NvbG9yIjpudWxsLCJiZ19jb2xvciI6bnVsbCwic3R5bGVfbWFzayI6MH0=';
mutations[types.RECEIVE_TRACE_SUCCESS](stateCopy, {
state: stateLog,
});
expect(stateCopy.traceState).toEqual(stateLog);
});
});
describe('when traceSize is smaller than the total size', () => {
it('sets isTraceSizeVisible to true', () => {
mutations[types.RECEIVE_TRACE_SUCCESS](stateCopy, { total: 51184600, size: 1231 });
expect(stateCopy.isTraceSizeVisible).toEqual(true);
});
});
describe('when traceSize is bigger than the total size', () => {
it('sets isTraceSizeVisible to false', () => {
const copy = Object.assign({}, stateCopy, { traceSize: 5118460, size: 2321312 });
mutations[types.RECEIVE_TRACE_SUCCESS](copy, { total: 511846 });
expect(copy.isTraceSizeVisible).toEqual(false);
});
});
it('sets trace, trace size and isTraceComplete', () => {
mutations[types.RECEIVE_TRACE_SUCCESS](stateCopy, {
append: true,
html,
size: 511846,
complete: true,
});
expect(stateCopy.trace).toEqual(html);
expect(stateCopy.traceSize).toEqual(511846);
expect(stateCopy.isTraceComplete).toEqual(true);
});
});
describe('STOP_POLLING_TRACE', () => {
it('sets isTraceComplete to true', () => {
mutations[types.STOP_POLLING_TRACE](stateCopy);
expect(stateCopy.isTraceComplete).toEqual(true);
});
});
describe('RECEIVE_TRACE_ERROR', () => {
it('resets trace state and sets error to true', () => {
mutations[types.RECEIVE_TRACE_ERROR](stateCopy);
expect(stateCopy.isLoadingTrace).toEqual(false);
expect(stateCopy.isTraceComplete).toEqual(true);
expect(stateCopy.hasTraceError).toEqual(true);
});
});
describe('REQUEST_JOB', () => {
it('sets isLoading to true', () => {
mutations[types.REQUEST_JOB](stateCopy);
expect(stateCopy.isLoading).toEqual(true);
});
});
describe('RECEIVE_JOB_SUCCESS', () => {
beforeEach(() => {
mutations[types.RECEIVE_JOB_SUCCESS](stateCopy, { id: 1312321 });
});
it('sets is loading to false', () => {
expect(stateCopy.isLoading).toEqual(false);
});
it('sets hasError to false', () => {
expect(stateCopy.hasError).toEqual(false);
});
it('sets job data', () => {
expect(stateCopy.job).toEqual({ id: 1312321 });
});
});
describe('RECEIVE_JOB_ERROR', () => {
it('resets job data', () => {
mutations[types.RECEIVE_JOB_ERROR](stateCopy);
expect(stateCopy.isLoading).toEqual(false);
expect(stateCopy.hasError).toEqual(true);
expect(stateCopy.job).toEqual({});
});
});
describe('SCROLL_TO_TOP', () => {
beforeEach(() => {
mutations[types.SCROLL_TO_TOP](stateCopy);
});
it('sets isTraceScrolledToBottom to false', () => {
expect(stateCopy.isTraceScrolledToBottom).toEqual(false);
});
it('sets hasBeenScrolled to true', () => {
expect(stateCopy.hasBeenScrolled).toEqual(true);
});
});
describe('SCROLL_TO_BOTTOM', () => {
beforeEach(() => {
mutations[types.SCROLL_TO_BOTTOM](stateCopy);
});
it('sets isTraceScrolledToBottom to true', () => {
expect(stateCopy.isTraceScrolledToBottom).toEqual(true);
});
it('sets hasBeenScrolled to true', () => {
expect(stateCopy.hasBeenScrolled).toEqual(true);
});
});
describe('REQUEST_STAGES', () => {
it('sets isLoadingStages to true', () => {
mutations[types.REQUEST_STAGES](stateCopy);
expect(stateCopy.isLoadingStages).toEqual(true);
});
});
describe('RECEIVE_STAGES_SUCCESS', () => {
beforeEach(() => {
mutations[types.RECEIVE_STAGES_SUCCESS](stateCopy, [{ name: 'build' }]);
});
it('sets isLoadingStages to false', () => {
expect(stateCopy.isLoadingStages).toEqual(false);
});
it('sets stages', () => {
expect(stateCopy.stages).toEqual([{ name: 'build' }]);
});
});
describe('RECEIVE_STAGES_ERROR', () => {
beforeEach(() => {
mutations[types.RECEIVE_STAGES_ERROR](stateCopy);
});
it('sets isLoadingStages to false', () => {
expect(stateCopy.isLoadingStages).toEqual(false);
});
it('resets stages', () => {
expect(stateCopy.stages).toEqual([]);
});
});
describe('REQUEST_JOBS_FOR_STAGE', () => {
it('sets isLoadingStages to true', () => {
mutations[types.REQUEST_JOBS_FOR_STAGE](stateCopy);
expect(stateCopy.isLoadingJobs).toEqual(true);
});
});
describe('RECEIVE_JOBS_FOR_STAGE_SUCCESS', () => {
beforeEach(() => {
mutations[types.RECEIVE_JOBS_FOR_STAGE_SUCCESS](stateCopy, [{ name: 'karma' }]);
});
it('sets isLoadingJobs to false', () => {
expect(stateCopy.isLoadingJobs).toEqual(false);
});
it('sets jobs', () => {
expect(stateCopy.jobs).toEqual([{ name: 'karma' }]);
});
});
describe('RECEIVE_JOBS_FOR_STAGE_ERROR', () => {
beforeEach(() => {
mutations[types.RECEIVE_JOBS_FOR_STAGE_ERROR](stateCopy);
});
it('sets isLoadingJobs to false', () => {
expect(stateCopy.isLoadingJobs).toEqual(false);
});
it('resets jobs', () => {
expect(stateCopy.jobs).toEqual([]);
});
});
});
......@@ -403,6 +403,7 @@ describe('common_utils', () => {
afterEach(() => {
document.body.removeChild(document.getElementById('favicon'));
});
it('should set page favicon to provided favicon', () => {
const faviconPath = '//custom_favicon';
commonUtils.setFavicon(faviconPath);
......@@ -479,17 +480,14 @@ describe('common_utils', () => {
});
it('should reset favicon in case of error', (done) => {
mock.onGet(BUILD_URL).networkError();
mock.onGet(BUILD_URL).replyOnce(500);
commonUtils.setCiStatusFavicon(BUILD_URL)
.then(() => {
.catch(() => {
const favicon = document.getElementById('favicon');
expect(favicon.getAttribute('href')).toEqual(faviconDataUrl);
done();
})
// Error is already caught in catch() block of setCiStatusFavicon,
// It won't throw another error for us to catch
.catch(done.fail);
});
});
it('should set page favicon to CI status favicon based on provided status', (done) => {
......
......@@ -26,4 +26,54 @@ describe ChatNotificationService do
end
end
end
describe '#execute' do
let(:chat_service) { described_class.new }
let(:user) { create(:user) }
let(:project) { create(:project, :repository) }
let(:webhook_url) { 'https://example.gitlab.com/' }
before do
allow(chat_service).to receive_messages(
project: project,
project_id: project.id,
service_hook: true,
webhook: webhook_url
)
WebMock.stub_request(:post, webhook_url)
subject.active = true
end
context 'with a repository' do
it 'returns true' do
subject.project = project
data = Gitlab::DataBuilder::Push.build_sample(project, user)
expect(Slack::Notifier).to receive(:new)
.with(webhook_url, {})
.and_return(
double(:slack_service).as_null_object
)
expect(chat_service.execute(data)).to be true
end
end
context 'with an empty repository' do
it 'returns true' do
subject.project = create(:project, :empty_repo)
data = Gitlab::DataBuilder::Push.build_sample(subject.project, user)
expect(Slack::Notifier).to receive(:new)
.with(webhook_url, {})
.and_return(
double(:slack_service).as_null_object
)
expect(chat_service.execute(data)).to be true
end
end
end
end
......@@ -67,4 +67,21 @@ describe DiffFileEntity do
end
end
end
context '#parallel_diff_lines' do
it 'exposes parallel diff lines correctly' do
response = subject
lines = response[:parallel_diff_lines]
# make sure at least one line is present for each side
expect(lines.map { |line| line[:right] }.compact).to be_present
expect(lines.map { |line| line[:left] }.compact).to be_present
# make sure all lines are in correct format
lines.each do |parallel_line|
expect(parallel_line[:left].as_json).to match_schema('entities/diff_line') if parallel_line[:left]
expect(parallel_line[:right].as_json).to match_schema('entities/diff_line') if parallel_line[:right]
end
end
end
end
require 'spec_helper'
describe DiffLineSerializer do
let(:line) { Gitlab::Diff::Line.new('hello world', 'new', 1, nil, 1) }
let(:serializer) { described_class.new.represent(line) }
describe '#to_json' do
subject { serializer.to_json }
it 'matches the schema' do
expect(subject).to match_schema('entities/diff_line')
end
context 'when lines are parallel' do
let(:right_line) { Gitlab::Diff::Line.new('right line', 'new', 1, nil, 1) }
let(:left_line) { Gitlab::Diff::Line.new('left line', 'match', 1, nil, 1) }
let(:parallel_line) { [{ right: right_line, left: left_line }] }
let(:serializer) { described_class.new.represent(parallel_line, {}, DiffLineParallelEntity) }
it 'matches the schema' do
expect(subject).to match_schema('entities/diff_line_parallel')
end
end
end
end
......@@ -825,7 +825,7 @@ rollout 100%:
fi
if [[ -n "$(helm ls -q "^$name$")" ]]; then
helm delete "$name"
helm delete --purge "$name"
fi
}
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment