Commit 2c89e169 authored by GitLab Bot's avatar GitLab Bot

Add latest changes from gitlab-org/gitlab@master

parent bd497e35
......@@ -93,7 +93,7 @@
DOCKER_DRIVER: overlay2
DOCKER_HOST: tcp://docker:2375
cache:
key: "assets-compile:production:vendor_ruby:.yarn-cache:tmp_cache_assets_sprockets:tmp_cache_webpack:v7"
key: "assets-compile:production:vendor_ruby:.yarn-cache:tmp_cache_assets_sprockets:tmp_cache_webpack:v9"
artifacts:
name: webpack-report
expire_in: 31d
......@@ -152,7 +152,7 @@ gitlab:assets:compile pull-cache:
# we override the max_old_space_size to prevent OOM errors
NODE_OPTIONS: --max_old_space_size=3584
cache:
key: "assets-compile:v8"
key: "assets-compile:v9"
artifacts:
expire_in: 7d
paths:
......@@ -180,7 +180,7 @@ compile-assets pull-push-cache foss:
when: on_success
cache:
policy: pull-push
key: "assets-compile:v8:foss"
key: "assets-compile:v9:foss"
compile-assets pull-cache:
extends: .compile-assets-metadata
......@@ -203,7 +203,7 @@ compile-assets pull-cache foss:
when: on_success
cache:
policy: pull
key: "assets-compile:v8:foss"
key: "assets-compile:v9:foss"
.frontend-job-base:
extends:
......
......@@ -25,7 +25,7 @@
# Jobs that only need to pull cache
.default-cache:
cache:
key: "debian-stretch-ruby-2.6.5-node-12.x"
key: "debian-stretch-ruby-2.6.5-pg9.6-node-12.x"
paths:
- .go/pkg/mod
- vendor/ruby
......
......@@ -143,7 +143,7 @@ static-analysis:
script:
- scripts/static-analysis
cache:
key: "debian-stretch-ruby-2.6-and-rubocop"
key: "debian-stretch-ruby-2.6-pg9.6-rubocop"
paths:
- vendor/ruby
- tmp/rubocop_cache
......
......@@ -25,6 +25,39 @@
- ".dockerignore"
- "qa/**/*"
.review:rules:mr-and-schedule:
rules:
- <<: *if-canonical-gitlab-merge-request
changes: *code-qa-patterns
when: on_success
- <<: *if-canonical-dot-com-gitlab-org-group-schedule
when: on_success
.review:rules:mr-only-auto:
rules:
- <<: *if-canonical-gitlab-merge-request
changes: *code-qa-patterns
when: on_success
.review:rules:mr-only-manual:
rules:
- <<: *if-canonical-gitlab-merge-request
changes: *code-qa-patterns
when: manual
.review:rules:review-cleanup:
rules:
- <<: *if-canonical-gitlab-merge-request
changes: *code-qa-patterns
when: manual
- <<: *if-canonical-dot-com-gitlab-org-group-schedule
when: on_success
.review:rules:danger:
rules:
- if: '$DANGER_GITLAB_API_TOKEN && $CI_MERGE_REQUEST_IID'
when: on_success
.review-docker:
extends:
- .default-tags
......@@ -41,14 +74,10 @@
GITLAB_EDITION: "ce"
build-qa-image:
extends: .review-docker
extends:
- .review-docker
- .review:rules:mr-and-schedule
stage: prepare
rules:
- <<: *if-canonical-gitlab-merge-request
changes: *code-qa-patterns
when: on_success
- <<: *if-canonical-dot-com-gitlab-org-group-schedule
when: on_success
script:
- '[[ ! -d "ee/" ]] || export GITLAB_EDITION="ee"'
- export QA_MASTER_IMAGE="${CI_REGISTRY}/${CI_PROJECT_PATH}/gitlab/gitlab-${GITLAB_EDITION}-qa:master"
......@@ -58,11 +87,11 @@ build-qa-image:
- time docker build --cache-from "${QA_MASTER_IMAGE}" --tag ${QA_IMAGE} --file ./qa/Dockerfile ./
- time docker push ${QA_IMAGE}
.base-review-cleanup:
review-cleanup:
extends:
- .default-tags
- .default-retry
- .default-only
- .review:rules:review-cleanup
stage: prepare
image: registry.gitlab.com/gitlab-org/gitlab-build-images:gitlab-charts-build-base
allow_failure: true
......@@ -75,45 +104,23 @@ build-qa-image:
script:
- ruby -rrubygems scripts/review_apps/automated_cleanup.rb
schedule:review-cleanup:
extends:
- .base-review-cleanup
- .only-review-schedules
manual:review-cleanup:
extends:
- .base-review-cleanup
- .only:changes-code-qa
when: manual
.review-build-cng-base:
review-build-cng:
extends:
- .default-tags
- .default-retry
- .default-only
- .review:rules:mr-and-schedule
image: ruby:2.6-alpine
stage: review-prepare
before_script:
- source scripts/utils.sh
- install_api_client_dependencies_with_apk
- install_gitlab_gem
dependencies: []
needs:
- job: gitlab:assets:compile pull-cache
artifacts: false
script:
- BUILD_TRIGGER_TOKEN=$REVIEW_APPS_BUILD_TRIGGER_TOKEN ./scripts/trigger-build cng
review-build-cng:
extends:
- .review-build-cng-base
- .only-review
- .only:changes-code-qa
needs: ["gitlab:assets:compile pull-cache"]
schedule:review-build-cng:
extends:
- .review-build-cng-base
- .only-review-schedules
needs: ["gitlab:assets:compile pull-cache"]
.review-workflow-base:
extends:
- .default-tags
......@@ -130,8 +137,10 @@ schedule:review-build-cng:
url: https://gitlab-${CI_ENVIRONMENT_SLUG}.${REVIEW_APPS_DOMAIN}
on_stop: review-stop
.review-deploy-base:
extends: .review-workflow-base
review-deploy:
extends:
- .review-workflow-base
- .review:rules:mr-and-schedule
stage: review
allow_failure: true
before_script:
......@@ -140,7 +149,7 @@ schedule:review-build-cng:
- export GITALY_VERSION=$(<GITALY_SERVER_VERSION)
- export GITLAB_WORKHORSE_VERSION=$(<GITLAB_WORKHORSE_VERSION)
- echo "${CI_ENVIRONMENT_URL}" > review_app_url.txt
- source scripts/utils.sh
- source ./scripts/utils.sh
- install_api_client_dependencies_with_apk
- source scripts/review_apps/review-apps.sh
script:
......@@ -156,19 +165,7 @@ schedule:review-build-cng:
expire_in: 2 days
when: always
review-deploy:
extends: .review-deploy-base
rules:
- <<: *if-canonical-gitlab-merge-request
changes: *code-qa-patterns
when: on_success
schedule:review-deploy:
extends:
- .review-deploy-base
- .only-review-schedules
.base-review-stop:
.review-stop-base:
extends: .review-workflow-base
environment:
action: stop
......@@ -183,22 +180,18 @@ schedule:review-deploy:
- source ./scripts/review_apps/review-apps.sh
review-stop-failed-deployment:
extends: .base-review-stop
extends:
- .review-stop-base
- .review:rules:mr-only-auto
stage: prepare
rules:
- <<: *if-canonical-gitlab-merge-request
changes: *code-qa-patterns
when: on_success
script:
- delete_failed_release
review-stop:
extends: .base-review-stop
extends:
- .review-stop-base
- .review:rules:mr-only-manual
stage: review
rules:
- <<: *if-canonical-gitlab-merge-request
changes: *code-qa-patterns
when: manual
allow_failure: true
script:
- delete_release
......@@ -206,8 +199,9 @@ review-stop:
.review-qa-base:
extends: .review-docker
stage: qa
needs: ["review-deploy"]
dependencies: ["review-deploy"]
needs:
- job: review-deploy
artifacts: true
allow_failure: true
variables:
QA_ARTIFACTS_DIR: "${CI_PROJECT_DIR}/qa"
......@@ -235,29 +229,30 @@ review-stop:
when: always
review-qa-smoke:
extends: .review-qa-base
rules:
- <<: *if-canonical-gitlab-merge-request
changes: *code-qa-patterns
when: on_success
extends:
- .review-qa-base
- .review:rules:mr-only-auto
script:
- gitlab-qa Test::Instance::Smoke "${QA_IMAGE}" "${CI_ENVIRONMENT_URL}"
review-qa-all:
extends: .review-qa-base
rules:
- <<: *if-canonical-gitlab-merge-request
changes: *code-qa-patterns
when: manual
extends:
- .review-qa-base
- .review:rules:mr-only-manual
parallel: 5
script:
- export KNAPSACK_REPORT_PATH=knapsack/master_report.json
- export KNAPSACK_TEST_FILE_PATTERN=qa/specs/features/**/*_spec.rb
- gitlab-qa Test::Instance::Any "${QA_IMAGE}" "${CI_ENVIRONMENT_URL}" -- --format RspecJunitFormatter --out tmp/rspec-${CI_JOB_ID}.xml --format html --out tmp/rspec.htm --color --format documentation
.review-performance-base:
extends: .review-docker
review-performance:
extends:
- .review-docker
- .review:rules:mr-and-schedule
stage: qa
needs:
- job: review-deploy
artifacts: true
allow_failure: true
before_script:
- export CI_ENVIRONMENT_URL="$(cat review_app_url.txt)"
......@@ -275,58 +270,17 @@ review-qa-all:
reports:
performance: performance.json
review-performance:
extends: .review-performance-base
rules:
- <<: *if-canonical-gitlab-merge-request
changes: *code-qa-patterns
when: on_success
needs: ["review-deploy"]
dependencies: ["review-deploy"]
before_script:
- export CI_ENVIRONMENT_URL="$(cat review_app_url.txt)"
- echo "${CI_ENVIRONMENT_URL}"
- mkdir -p gitlab-exporter
- wget -O ./gitlab-exporter/index.js https://gitlab.com/gitlab-org/gl-performance/raw/master/index.js
- mkdir -p sitespeed-results
script:
- docker run --shm-size=1g --rm -v "$(pwd)":/sitespeed.io sitespeedio/sitespeed.io:6.3.1 --plugins.add ./gitlab-exporter --outputFolder sitespeed-results "${CI_ENVIRONMENT_URL}"
after_script:
- mv sitespeed-results/data/performance.json performance.json
artifacts:
paths:
- sitespeed-results/
reports:
performance: performance.json
schedule:review-performance:
extends:
- .review-performance-base
- .only-review-schedules
needs: ["schedule:review-deploy"]
dependencies: ["schedule:review-deploy"]
parallel-spec-reports:
extends:
- .default-tags
- .default-only
- .only-review
- .only:changes-code-qa
- .review:rules:mr-only-manual
image: ruby:2.6-alpine
stage: post-qa
dependencies: ["review-qa-all"]
allow_failure: true
variables:
NEW_PARALLEL_SPECS_REPORT: qa/report-new.html
BASE_ARTIFACT_URL: "${CI_PROJECT_URL}/-/jobs/${CI_JOB_ID}/artifacts/file/qa/"
allow_failure: true
when: manual
artifacts:
when: always
paths:
- qa/report-new.html
- qa/gitlab-qa-run-*
reports:
junit: qa/gitlab-qa-run-*/**/rspec-*.xml
script:
- apk add --update build-base libxml2-dev libxslt-dev && rm -rf /var/cache/apk/*
- gem install nokogiri --no-document
......@@ -335,20 +289,23 @@ parallel-spec-reports:
- cd -
- '[[ -f $NEW_PARALLEL_SPECS_REPORT ]] || echo "{}" > ${NEW_PARALLEL_SPECS_REPORT}'
- scripts/merge-html-reports ${NEW_PARALLEL_SPECS_REPORT} ${BASE_ARTIFACT_URL}${ARTIFACT_DIRS} qa/gitlab-qa-run-*/**/rspec.htm
artifacts:
when: always
paths:
- qa/report-new.html
- qa/gitlab-qa-run-*
reports:
junit: qa/gitlab-qa-run-*/**/rspec-*.xml
danger-review:
extends:
- .default-tags
- .default-retry
- .default-cache
- .default-only
- .except:refs-master-tags-stable-deploy
- .review:rules:danger
image: registry.gitlab.com/gitlab-org/gitlab-build-images:danger
stage: test
dependencies: []
only:
variables:
- $DANGER_GITLAB_API_TOKEN
script:
- git version
- node --version
......
# Make sure to update all the similar conditions in other CI config files if you modify these conditions
.if-not-canonical-namespace: &if-not-canonical-namespace
if: '$CI_PROJECT_NAMESPACE !~ /^gitlab(-org)?($|\/)/'
# Make sure to update all the similar conditions in other CI config files if you modify these conditions
.if-not-foss: &if-not-foss
if: '$CI_PROJECT_NAME != "gitlab-foss" && $CI_PROJECT_NAME != "gitlab-ce" && $CI_PROJECT_NAME != "gitlabhq"'
# Make sure to update all the similar conditions in other CI config files if you modify these conditions
.if-master-or-tag: &if-master-or-tag
if: '$CI_COMMIT_REF_NAME == "master" || $CI_COMMIT_TAG'
# Make sure to update all the similar conditions in other CI config files if you modify these conditions
.if-default-refs: &if-default-refs
if: '$CI_COMMIT_REF_NAME == "master" || $CI_COMMIT_REF_NAME =~ /^[\d-]+-stable(-ee)?$/ || $CI_COMMIT_REF_NAME =~ /^\d+-\d+-auto-deploy-\d+$/ || $CI_COMMIT_REF_NAME =~ /^security\// || $CI_MERGE_REQUEST_IID || $CI_COMMIT_TAG'
# Make sure to update all the similar patterns in other CI config files if you modify these patterns
.code-backstage-patterns: &code-backstage-patterns
- ".gitlab/ci/**/*"
- ".{eslintignore,gitattributes,nvmrc,prettierrc,stylelintrc,yamllint}"
- ".{codeclimate,eslintrc,gitlab-ci,haml-lint,haml-lint_todo,rubocop,rubocop_todo,scss-lint}.yml"
- ".csscomb.json"
- "Dockerfile.assets"
- "*_VERSION"
- "Gemfile{,.lock}"
- "Rakefile"
- "{babel.config,jest.config}.js"
- "config.ru"
- "{package.json,yarn.lock}"
- "{,ee/}{app,bin,config,db,haml_lint,lib,locale,public,scripts,symbol,vendor}/**/*"
- "doc/api/graphql/reference/*" # Files in this folder are auto-generated
# Backstage changes
- "Dangerfile"
- "danger/**/*"
- "{,ee/}fixtures/**/*"
- "{,ee/}rubocop/**/*"
- "{,ee/}spec/**/*"
- "doc/README.md" # Some RSpec test rely on this file
# Make sure to update all the similar patterns in other CI config files if you modify these patterns
.code-backstage-qa-patterns: &code-backstage-qa-patterns
- ".gitlab/ci/**/*"
- ".{eslintignore,gitattributes,nvmrc,prettierrc,stylelintrc,yamllint}"
- ".{codeclimate,eslintrc,gitlab-ci,haml-lint,haml-lint_todo,rubocop,rubocop_todo,scss-lint}.yml"
- ".csscomb.json"
- "Dockerfile.assets"
- "*_VERSION"
- "Gemfile{,.lock}"
- "Rakefile"
- "{babel.config,jest.config}.js"
- "config.ru"
- "{package.json,yarn.lock}"
- "{,ee/}{app,bin,config,db,haml_lint,lib,locale,public,scripts,symbol,vendor}/**/*"
- "doc/api/graphql/reference/*" # Files in this folder are auto-generated
# Backstage changes
- "Dangerfile"
- "danger/**/*"
- "{,ee/}fixtures/**/*"
- "{,ee/}rubocop/**/*"
- "{,ee/}spec/**/*"
- "doc/README.md" # Some RSpec test rely on this file
# QA changes
- ".dockerignore"
- "qa/**/*"
.setup:rules:cache-gems:
rules:
- <<: *if-not-canonical-namespace
when: never
- <<: *if-master-or-tag
changes: *code-backstage-qa-patterns
when: on_success
.setup:rules:gitlab_git_test:
rules:
- <<: *if-default-refs
changes: *code-backstage-patterns
when: on_success
.setup:rules:no_ee_check:
rules:
- <<: *if-not-foss
when: never
- <<: *if-default-refs
changes: *code-backstage-patterns
when: on_success
# Insurance in case a gem needed by one of our releases gets yanked from
# rubygems.org in the future.
cache gems:
......@@ -6,11 +93,11 @@ cache gems:
- .default-retry
- .default-cache
- .default-before_script
- .only:variables-canonical-dot-com
- .only:changes-code-backstage-qa
- .setup:rules:cache-gems
stage: test
dependencies: ["setup-test-env"]
needs: ["setup-test-env"]
needs:
- job: setup-test-env
artifacts: true
variables:
SETUP_DB: "false"
script:
......@@ -18,30 +105,23 @@ cache gems:
artifacts:
paths:
- vendor/cache
only:
refs:
- master
- tags
.minimal-job:
extends:
- .default-tags
- .default-retry
- .default-only
- .only:changes-code-backstage
dependencies: []
gitlab_git_test:
extends: .minimal-job
extends:
- .minimal-job
- .setup:rules:gitlab_git_test
script:
- spec/support/prepare-gitlab-git-test-for-commit --check-for-changes
no_ee_check:
extends: .minimal-job
extends:
- .minimal-job
- .setup:rules:no_ee_check
script:
- scripts/no-ee-check
only:
variables:
- $CI_PROJECT_NAME == "gitlab-foss"
- $CI_PROJECT_NAME == "gitlab-ce" # Support former project name for forks/mirrors
- $CI_PROJECT_NAME == "gitlabhq" # Support former project name for dev
# Make sure to update all the similar conditions in other CI config files if you modify these conditions
.if-default-refs: &if-default-refs
if: '$CI_COMMIT_REF_NAME == "master" || $CI_COMMIT_REF_NAME =~ /^[\d-]+-stable(-ee)?$/ || $CI_COMMIT_REF_NAME =~ /^\d+-\d+-auto-deploy-\d+$/ || $CI_COMMIT_REF_NAME =~ /^security\// || $CI_MERGE_REQUEST_IID || $CI_COMMIT_TAG'
# Make sure to update all the similar conditions in other CI config files if you modify these conditions
.if-merge-request: &if-merge-request
if: '$CI_MERGE_REQUEST_IID'
# Make sure to update all the similar conditions in other CI config files if you modify these conditions
.if-canonical-dot-com-gitlab-schedule: &if-canonical-dot-com-gitlab-schedule
if: '$CI_SERVER_HOST == "gitlab.com" && $CI_PROJECT_PATH == "gitlab-org/gitlab" && $CI_PIPELINE_SOURCE == "schedule"'
# Make sure to update all the similar patterns in other CI config files if you modify these patterns
.code-backstage-patterns: &code-backstage-patterns
- ".gitlab/ci/**/*"
- ".{eslintignore,gitattributes,nvmrc,prettierrc,stylelintrc,yamllint}"
- ".{codeclimate,eslintrc,gitlab-ci,haml-lint,haml-lint_todo,rubocop,rubocop_todo,scss-lint}.yml"
- ".csscomb.json"
- "Dockerfile.assets"
- "*_VERSION"
- "Gemfile{,.lock}"
- "Rakefile"
- "{babel.config,jest.config}.js"
- "config.ru"
- "{package.json,yarn.lock}"
- "{,ee/}{app,bin,config,db,haml_lint,lib,locale,public,scripts,symbol,vendor}/**/*"
- "doc/api/graphql/reference/*" # Files in this folder are auto-generated
# Backstage changes
- "Dangerfile"
- "danger/**/*"
- "{,ee/}fixtures/**/*"
- "{,ee/}rubocop/**/*"
- "{,ee/}spec/**/*"
- "doc/README.md" # Some RSpec test rely on this file
.test-metadata:rules:retrieve-tests-metadata:
rules:
- <<: *if-default-refs
changes: *code-backstage-patterns
when: on_success
.test-metadata:rules:update-tests-metadata:
rules:
- <<: *if-canonical-dot-com-gitlab-schedule
changes: *code-backstage-patterns
when: on_success
.test-metadata:rules:flaky-examples-check:
rules:
- <<: *if-merge-request
changes: *code-backstage-patterns
when: on_success
.tests-metadata-state:
extends:
- .default-only
- .only:changes-code-backstage
variables:
TESTS_METADATA_S3_BUCKET: "gitlab-ce-cache"
before_script:
......@@ -19,7 +69,9 @@
- rspec_profiling/
retrieve-tests-metadata:
extends: .tests-metadata-state
extends:
- .tests-metadata-state
- .test-metadata:rules:retrieve-tests-metadata
stage: prepare
cache:
policy: pull
......@@ -28,7 +80,9 @@ retrieve-tests-metadata:
- retrieve_tests_metadata
update-tests-metadata:
extends: .tests-metadata-state
extends:
- .tests-metadata-state
- .test-metadata:rules:update-tests-metadata
stage: post-test
cache:
policy: push
......@@ -36,27 +90,17 @@ update-tests-metadata:
- retry gem install fog-aws mime-types activesupport rspec_profiling postgres-copy --no-document
- source scripts/rspec_helpers.sh
- update_tests_metadata
only:
refs:
- schedules
variables:
# Only update the Knapsack metadata on GitLab.com/gitlab-org/gitlab
- $CI_SERVER_HOST == "gitlab.com" && $CI_PROJECT_PATH == "gitlab-org/gitlab"
flaky-examples-check:
extends:
- .default-tags
- .default-retry
- .default-only
- .only:changes-code-backstage
- .test-metadata:rules:flaky-examples-check
image: ruby:2.6-alpine
stage: post-test
variables:
NEW_FLAKY_SPECS_REPORT: rspec_flaky/report-new.json
allow_failure: true
only:
refs:
- merge_requests
artifacts:
expire_in: 30d
paths:
......
......@@ -2,8 +2,7 @@
import ViewerSwitcher from './blob_header_viewer_switcher.vue';
import DefaultActions from './blob_header_default_actions.vue';
import BlobFilepath from './blob_header_filepath.vue';
import eventHub from '../event_hub';
import { RICH_BLOB_VIEWER, SIMPLE_BLOB_VIEWER } from './constants';
import { SIMPLE_BLOB_VIEWER } from './constants';
export default {
components: {
......@@ -26,10 +25,15 @@ export default {
required: false,
default: false,
},
activeViewerType: {
type: String,
required: false,
default: SIMPLE_BLOB_VIEWER,
},
},
data() {
return {
activeViewer: this.blob.richViewer ? RICH_BLOB_VIEWER : SIMPLE_BLOB_VIEWER,
viewer: this.hideViewerSwitcher ? null : this.activeViewerType,
};
},
computed: {
......@@ -40,19 +44,16 @@ export default {
return !this.hideDefaultActions;
},
},
created() {
if (this.showViewerSwitcher) {
eventHub.$on('switch-viewer', this.setActiveViewer);
}
},
beforeDestroy() {
if (this.showViewerSwitcher) {
eventHub.$off('switch-viewer', this.setActiveViewer);
}
watch: {
viewer(newVal, oldVal) {
if (!this.hideViewerSwitcher && newVal !== oldVal) {
this.$emit('viewer-changed', newVal);
}
},
},
methods: {
setActiveViewer(viewer) {
this.activeViewer = viewer;
proxyCopyRequest() {
this.$emit('copy');
},
},
};
......@@ -66,11 +67,16 @@ export default {
</blob-filepath>
<div class="file-actions d-none d-sm-block">
<viewer-switcher v-if="showViewerSwitcher" :blob="blob" :active-viewer="activeViewer" />
<viewer-switcher v-if="showViewerSwitcher" v-model="viewer" />
<slot name="actions"></slot>
<default-actions v-if="showDefaultActions" :blob="blob" :active-viewer="activeViewer" />
<default-actions
v-if="showDefaultActions"
:raw-path="blob.rawPath"
:active-viewer="viewer"
@copy="proxyCopyRequest"
/>
</div>
</div>
</template>
......@@ -7,7 +7,6 @@ import {
RICH_BLOB_VIEWER,
SIMPLE_BLOB_VIEWER,
} from './constants';
import eventHub from '../event_hub';
export default {
components: {
......@@ -19,8 +18,8 @@ export default {
GlTooltip: GlTooltipDirective,
},
props: {
blob: {
type: Object,
rawPath: {
type: String,
required: true,
},
activeViewer: {
......@@ -30,11 +29,8 @@ export default {
},
},
computed: {
rawUrl() {
return this.blob.rawPath;
},
downloadUrl() {
return `${this.blob.rawPath}?inline=false`;
return `${this.rawPath}?inline=false`;
},
copyDisabled() {
return this.activeViewer === RICH_BLOB_VIEWER;
......@@ -42,7 +38,7 @@ export default {
},
methods: {
requestCopyContents() {
eventHub.$emit('copy');
this.$emit('copy');
},
},
BTN_COPY_CONTENTS_TITLE,
......@@ -65,7 +61,7 @@ export default {
v-gl-tooltip.hover
:aria-label="$options.BTN_RAW_TITLE"
:title="$options.BTN_RAW_TITLE"
:href="rawUrl"
:href="rawPath"
target="_blank"
>
<gl-icon name="doc-code" :size="14" />
......
......@@ -6,7 +6,6 @@ import {
SIMPLE_BLOB_VIEWER,
SIMPLE_BLOB_VIEWER_TITLE,
} from './constants';
import eventHub from '../event_hub';
export default {
components: {
......@@ -18,11 +17,7 @@ export default {
GlTooltip: GlTooltipDirective,
},
props: {
blob: {
type: Object,
required: true,
},
activeViewer: {
value: {
type: String,
default: SIMPLE_BLOB_VIEWER,
required: false,
......@@ -30,16 +25,16 @@ export default {
},
computed: {
isSimpleViewer() {
return this.activeViewer === SIMPLE_BLOB_VIEWER;
return this.value === SIMPLE_BLOB_VIEWER;
},
isRichViewer() {
return this.activeViewer === RICH_BLOB_VIEWER;
return this.value === RICH_BLOB_VIEWER;
},
},
methods: {
switchToViewer(viewer) {
if (viewer !== this.activeViewer) {
eventHub.$emit('switch-viewer', viewer);
if (viewer !== this.value) {
this.$emit('input', viewer);
}
},
},
......
import Vue from 'vue';
export default new Vue();
import Vue from 'vue';
import VueRouter from 'vue-router';
import IdeRouter from '~/ide/ide_router_extension';
import { joinPaths } from '~/lib/utils/url_utility';
import flash from '~/flash';
import store from './stores';
import { __ } from '~/locale';
Vue.use(VueRouter);
Vue.use(IdeRouter);
/**
* Routes below /-/ide/:
......@@ -33,7 +33,7 @@ const EmptyRouterComponent = {
},
};
const router = new VueRouter({
const router = new IdeRouter({
mode: 'history',
base: joinPaths(gon.relative_url_root || '', '/-/ide/'),
routes: [
......
import VueRouter from 'vue-router';
import { escapeFileUrl } from '~/lib/utils/url_utility';
// To allow special characters (like "#," for example) in the branch names, we
// should encode all the locations before those get processed by History API.
// Otherwise, paths get messed up so that the router receives incorrect
// branchid. The only way to do it consistently and in a more or less
// future-proof manner is, unfortunately, to monkey-patch VueRouter or, as
// suggested here, achieve the same more reliably by subclassing VueRouter and
// update the methods, used in WebIDE.
//
// More context: https://gitlab.com/gitlab-org/gitlab/issues/35473
export default class IDERouter extends VueRouter {
push(location, onComplete, onAbort) {
super.push(escapeFileUrl(location), onComplete, onAbort);
}
resolve(to, current, append) {
return super.resolve(escapeFileUrl(to), current, append);
}
}
......@@ -194,12 +194,14 @@ export function redirectTo(url) {
return window.location.assign(url);
}
export const escapeFileUrl = fileUrl => encodeURIComponent(fileUrl).replace(/%2F/g, '/');
export function webIDEUrl(route = undefined) {
let returnUrl = `${gon.relative_url_root || ''}/-/ide/`;
if (route) {
returnUrl += `project${route.replace(new RegExp(`^${gon.relative_url_root || ''}`), '')}`;
}
return returnUrl;
return escapeFileUrl(returnUrl);
}
/**
......@@ -313,8 +315,6 @@ export const setUrlParams = (params, url = window.location.href, clearParams = f
return urlObj.toString();
};
export const escapeFileUrl = fileUrl => encodeURIComponent(fileUrl).replace(/%2F/g, '/');
export function urlIsDifferent(url, compare = String(window.location)) {
return url !== compare;
}
import $ from 'jquery';
import Chart from 'chart.js';
import { barChartOptions, pieChartOptions } from '~/lib/utils/chart_utils';
import Vue from 'vue';
import { __ } from '~/locale';
import { GlColumnChart } from '@gitlab/ui/dist/charts';
import SeriesDataMixin from './series_data_mixin';
document.addEventListener('DOMContentLoaded', () => {
const projectChartData = JSON.parse(document.getElementById('projectChartData').innerHTML);
const languagesContainer = document.getElementById('js-languages-chart');
const monthContainer = document.getElementById('js-month-chart');
const weekdayContainer = document.getElementById('js-weekday-chart');
const hourContainer = document.getElementById('js-hour-chart');
const barChart = (selector, data) => {
// get selector by context
const ctx = selector.get(0).getContext('2d');
// pointing parent container to make chart.js inherit its width
const container = $(selector).parent();
selector.attr('width', $(container).width());
// Scale fonts if window width lower than 768px (iPad portrait)
const shouldAdjustFontSize = window.innerWidth < 768;
return new Chart(ctx, {
type: 'bar',
data,
options: barChartOptions(shouldAdjustFontSize),
});
};
const pieChart = (context, data) => {
const options = pieChartOptions();
return new Chart(context, {
type: 'pie',
data,
options,
});
};
const chartData = data => ({
labels: Object.keys(data),
datasets: [
{
backgroundColor: 'rgba(220,220,220,0.5)',
borderColor: 'rgba(220,220,220,1)',
borderWidth: 1,
data: Object.values(data),
},
],
});
const LANGUAGE_CHART_HEIGHT = 300;
const reorderWeekDays = (weekDays, firstDayOfWeek = 0) => {
if (firstDayOfWeek === 0) {
......@@ -58,28 +26,115 @@ document.addEventListener('DOMContentLoaded', () => {
}, {});
};
const hourData = chartData(projectChartData.hour);
barChart($('#hour-chart'), hourData);
const weekDays = reorderWeekDays(projectChartData.weekDays, gon.first_day_of_week);
const dayData = chartData(weekDays);
barChart($('#weekday-chart'), dayData);
// eslint-disable-next-line no-new
new Vue({
el: languagesContainer,
components: {
GlColumnChart,
},
data() {
return {
chartData: JSON.parse(languagesContainer.dataset.chartData),
};
},
computed: {
seriesData() {
return { full: this.chartData.map(d => [d.label, d.value]) };
},
},
render(h) {
return h(GlColumnChart, {
props: {
data: this.seriesData,
xAxisTitle: __('Used programming language'),
yAxisTitle: __('Percentage'),
xAxisType: 'category',
},
attrs: {
height: LANGUAGE_CHART_HEIGHT,
},
});
},
});
const monthData = chartData(projectChartData.month);
barChart($('#month-chart'), monthData);
// eslint-disable-next-line no-new
new Vue({
el: monthContainer,
components: {
GlColumnChart,
},
mixins: [SeriesDataMixin],
data() {
return {
chartData: JSON.parse(monthContainer.dataset.chartData),
};
},
render(h) {
return h(GlColumnChart, {
props: {
data: this.seriesData,
xAxisTitle: __('Day of month'),
yAxisTitle: __('No. of commits'),
xAxisType: 'category',
},
});
},
});
const data = {
datasets: [
{
data: projectChartData.languages.map(x => x.value),
backgroundColor: projectChartData.languages.map(x => x.color),
hoverBackgroundColor: projectChartData.languages.map(x => x.highlight),
// eslint-disable-next-line no-new
new Vue({
el: weekdayContainer,
components: {
GlColumnChart,
},
data() {
return {
chartData: JSON.parse(weekdayContainer.dataset.chartData),
};
},
computed: {
seriesData() {
const weekDays = reorderWeekDays(this.chartData, gon.first_day_of_week);
const data = Object.keys(weekDays).reduce((acc, key) => {
acc.push([key, weekDays[key]]);
return acc;
}, []);
return { full: data };
},
],
labels: projectChartData.languages.map(x => x.label),
};
const ctx = $('#languages-chart')
.get(0)
.getContext('2d');
pieChart(ctx, data);
},
render(h) {
return h(GlColumnChart, {
props: {
data: this.seriesData,
xAxisTitle: __('Weekday'),
yAxisTitle: __('No. of commits'),
xAxisType: 'category',
},
});
},
});
// eslint-disable-next-line no-new
new Vue({
el: hourContainer,
components: {
GlColumnChart,
},
mixins: [SeriesDataMixin],
data() {
return {
chartData: JSON.parse(hourContainer.dataset.chartData),
};
},
render(h) {
return h(GlColumnChart, {
props: {
data: this.seriesData,
xAxisTitle: __('Hour (UTC)'),
yAxisTitle: __('No. of commits'),
xAxisType: 'category',
},
});
},
});
});
export default {
computed: {
seriesData() {
const data = Object.keys(this.chartData).reduce((acc, key) => {
acc.push([key, this.chartData[key]]);
return acc;
}, []);
return { full: data };
},
},
};
# frozen_string_literal: true
module Projects
module Alerting
class NotificationsController < Projects::ApplicationController
respond_to :json
skip_before_action :verify_authenticity_token
skip_before_action :project
prepend_before_action :repository, :project_without_auth
def create
token = extract_alert_manager_token(request)
result = notify_service.execute(token)
head(response_status(result))
end
private
def project_without_auth
@project ||= Project
.find_by_full_path("#{params[:namespace_id]}/#{params[:project_id]}")
end
def extract_alert_manager_token(request)
Doorkeeper::OAuth::Token.from_bearer_authorization(request)
end
def notify_service
Projects::Alerting::NotifyService
.new(project, current_user, notification_payload)
end
def response_status(result)
return :ok if result.success?
result.http_status
end
def notification_payload
params.permit![:notification]
end
end
end
end
......@@ -27,7 +27,7 @@ module BlobHelper
"#{current_user.namespace.full_path}/#{project.path}"
end
segments = [ide_path, 'project', project_path, 'edit', ref]
segments = [ide_path, 'project', project_path, 'edit', encode_ide_path(ref)]
segments.concat(['-', encode_ide_path(path)]) if path.present?
File.join(segments)
end
......
......@@ -77,7 +77,11 @@ class ContainerRepository < ApplicationRecord
end
def delete_tag_by_digest(digest)
client.delete_repository_tag(self.path, digest)
client.delete_repository_tag_by_digest(self.path, digest)
end
def delete_tag_by_name(name)
client.delete_repository_tag_by_name(self.path, name)
end
def self.build_from_path(path)
......
......@@ -138,6 +138,7 @@ class Project < ApplicationRecord
has_many :boards
# Project services
has_one :alerts_service
has_one :campfire_service
has_one :discord_service
has_one :drone_ci_service
......@@ -2330,6 +2331,10 @@ class Project < ApplicationRecord
protected_branches.limit(limit)
end
def alerts_service_activated?
false
end
private
def closest_namespace_setting(name)
......
# frozen_string_literal: true
require 'securerandom'
class AlertsService < Service
has_one :data, class_name: 'AlertsServiceData', autosave: true,
inverse_of: :service, foreign_key: :service_id
attribute :token, :string
delegate :token, :token=, :token_changed?, :token_was, to: :data
validates :token, presence: true, if: :activated?
before_validation :prevent_token_assignment
before_validation :ensure_token, if: :activated?
def url
url_helpers.project_alerts_notify_url(project, format: :json)
end
def json_fields
super + %w(token)
end
def editable?
false
end
def show_active_box?
false
end
def can_test?
false
end
def title
_('Alerts endpoint')
end
def description
_('Receive alerts on GitLab from any source')
end
def detailed_description
description
end
def self.to_param
'alerts'
end
def self.supported_events
%w()
end
def data
super || build_data
end
private
def prevent_token_assignment
self.token = token_was if token.present? && token_changed?
end
def ensure_token
self.token = generate_token if token.blank?
end
def generate_token
SecureRandom.hex
end
def url_helpers
Gitlab::Routing.url_helpers
end
end
# frozen_string_literal: true
require 'securerandom'
class AlertsServiceData < ApplicationRecord
belongs_to :service, class_name: 'AlertsService'
validates :service, presence: true
attr_encrypted :token,
mode: :per_attribute_iv,
key: Settings.attr_encrypted_db_key_base_truncated,
algorithm: 'aes-256-gcm'
end
# frozen_string_literal: true
module Projects
module Alerting
class NotifyService < BaseService
include Gitlab::Utils::StrongMemoize
def execute(token)
return forbidden unless alerts_service_activated?
return unauthorized unless valid_token?(token)
process_incident_issues
ServiceResponse.success
rescue Gitlab::Alerting::NotificationPayloadParser::BadPayloadError
bad_request
end
private
delegate :alerts_service, :alerts_service_activated?, to: :project
def process_incident_issues
IncidentManagement::ProcessAlertWorker
.perform_async(project.id, parsed_payload)
end
def parsed_payload
Gitlab::Alerting::NotificationPayloadParser.call(params.to_h)
end
def valid_token?(token)
token == alerts_service.token
end
def bad_request
ServiceResponse.error(message: 'Bad Request', http_status: 400)
end
def unauthorized
ServiceResponse.error(message: 'Unauthorized', http_status: 401)
end
def forbidden
ServiceResponse.error(message: 'Forbidden', http_status: 403)
end
end
end
end
......@@ -14,12 +14,25 @@ module Projects
private
# Delete tags by name with a single DELETE request. This is only supported
# by the GitLab Container Registry fork. See
# https://gitlab.com/gitlab-org/gitlab/-/merge_requests/23325 for details.
def fast_delete(container_repository, tag_names)
deleted_tags = tag_names.select do |name|
container_repository.delete_tag_by_name(name)
end
deleted_tags.any? ? success(deleted: deleted_tags) : error('could not delete tags')
end
# Replace a tag on the registry with a dummy tag.
# This is a hack as the registry doesn't support deleting individual
# tags. This code effectively pushes a dummy image and assigns the tag to it.
# This way when the tag is deleted only the dummy image is affected.
# This is used to preverse compatibility with third-party registries that
# don't support fast delete.
# See https://gitlab.com/gitlab-org/gitlab/issues/15737 for a discussion
def smart_delete(container_repository, tag_names)
def slow_delete(container_repository, tag_names)
# generates the blobs for the dummy image
dummy_manifest = container_repository.client.generate_empty_manifest(container_repository.path)
return error('could not generate manifest') if dummy_manifest.nil?
......@@ -36,6 +49,15 @@ module Projects
end
end
def smart_delete(container_repository, tag_names)
fast_delete_enabled = Feature.enabled?(:container_registry_fast_tag_delete, default_enabled: true)
if fast_delete_enabled && container_repository.client.supports_tag_delete?
fast_delete(container_repository, tag_names)
else
slow_delete(container_repository, tag_names)
end
end
# update the manifests of the tags with the new dummy image
def replace_tag_manifests(container_repository, dummy_manifest, tag_names)
deleted_tags = {}
......
......@@ -2,7 +2,8 @@
module Projects
class LsifDataService
attr_reader :file, :project, :path, :commit_id
attr_reader :file, :project, :path, :commit_id,
:docs, :doc_ranges, :ranges, :def_refs
CACHE_EXPIRE_IN = 1.hour
......@@ -14,19 +15,18 @@ module Projects
end
def execute
docs, doc_ranges, ranges =
fetch_data.values_at('docs', 'doc_ranges', 'ranges')
doc_id = doc_id_from(docs)
fetch_data!
doc_ranges[doc_id]&.map do |range_id|
line_data, column_data = ranges[range_id]['loc']
location, ref_id = ranges[range_id].values_at('loc', 'ref_id')
line_data, column_data = location
{
start_line: line_data.first,
end_line: line_data.last,
start_char: column_data.first,
end_char: column_data.last
end_char: column_data.last,
definition_url: definition_url_for(def_refs[ref_id])
}
end
end
......@@ -47,8 +47,17 @@ module Projects
end
end
def doc_id_from(docs)
docs.reduce(nil) do |doc_id, (id, doc_path)|
def fetch_data!
data = fetch_data
@docs = data['docs']
@doc_ranges = data['doc_ranges']
@ranges = data['ranges']
@def_refs = data['def_refs']
end
def doc_id
@doc_id ||= docs.reduce(nil) do |doc_id, (id, doc_path)|
next doc_id unless doc_path =~ /#{path}$/
if doc_id.nil? || docs[doc_id].size > doc_path.size
......@@ -58,5 +67,24 @@ module Projects
doc_id
end
end
def dir_absolute_path
@dir_absolute_path ||= docs[doc_id]&.delete_suffix(path)
end
def definition_url_for(ref_id)
return unless range = ranges[ref_id]
def_doc_id, location = range.values_at('doc_id', 'loc')
localized_doc_url = docs[def_doc_id].delete_prefix(dir_absolute_path)
# location is stored as [[start_line, end_line], [start_char, end_char]]
start_line = location.first.first
line_anchor = "L#{start_line + 1}"
definition_ref_path = [commit_id, localized_doc_url].join('/')
Gitlab::Routing.url_helpers.project_blob_path(project, definition_ref_path, anchor: line_anchor)
end
end
end
......@@ -7,20 +7,7 @@
%p
= _("Measured in bytes of code. Excludes generated and vendored code.")
.row
.col-md-4
%ul.bordered-list
- @languages.each do |language|
%li
%span{ style: "color: #{language[:color]}" }
= icon('circle')
&nbsp;
= language[:label]
.float-right
= language[:value]
\%
.col-md-8
%canvas#languages-chart{ height: 400 }
#js-languages-chart{ data: { chart_data: @languages.to_json.html_safe } }
.repo-charts
.sub-header-block.border-top
......@@ -60,27 +47,18 @@
%p.slead
= _("Commits per day of month")
%div
%canvas#month-chart
#js-month-chart{ data: { chart_data: @commits_per_month.to_json.html_safe } }
.row
.col-md-6
.col-md-6
%p.slead
= _("Commits per weekday")
%div
%canvas#weekday-chart
#js-weekday-chart{ data: { chart_data: @commits_per_week_days.to_json.html_safe } }
.row
.col-md-6
.col-md-6
%p.slead
= _("Commits per day hour (UTC)")
%div
%canvas#hour-chart
-# haml-lint:disable InlineJavaScript
%script#projectChartData{ type: "application/json" }
- projectChartData = {};
- projectChartData['hour'] = @commits_per_time
- projectChartData['weekDays'] = @commits_per_week_days
- projectChartData['month'] = @commits_per_month
- projectChartData['languages'] = @languages
= projectChartData.to_json.html_safe
#js-hour-chart{ data: { chart_data: @commits_per_time.to_json.html_safe } }
---
title: Improve performance of the Container Registry delete tags API
merge_request: 23325
author:
type: performance
---
title: 'WebIDE: Support # in branch names'
merge_request: 24717
author:
type: changed
---
title: Add more accurate way of counting remaining background migrations before upgrading
merge_request:
author:
type: fixed
......@@ -12,6 +12,8 @@
if Gitlab::Runtime.puma? && !Rails.env.test?
require 'rack/timeout/base'
Rack::Timeout::Logger.level = Logger::ERROR
Gitlab::Application.configure do |config|
config.middleware.insert_before(Rack::Runtime, Rack::Timeout,
service_timeout: ENV.fetch('GITLAB_RAILS_RACK_TIMEOUT', 60).to_i,
......
......@@ -323,6 +323,8 @@ constraints(::Constraints::ProjectUrlConstrainer.new) do
end
end
post 'alerts/notify', to: 'alerting/notifications#create'
resources :pipelines, only: [:index, :new, :create, :show, :destroy] do
collection do
resource :pipelines_settings, path: 'settings', only: [:show, :update]
......
......@@ -11,6 +11,8 @@ Updating Geo nodes involves performing:
Depending on which version of Geo you are updating to/from, there may be
different steps.
- [Updating to GitLab 12.7](version_specific_updates.md#updating-to-gitlab-127)
- [Updating to GitLab 12.2](version_specific_updates.md#updating-to-gitlab-122)
- [Updating to GitLab 12.1](version_specific_updates.md#updating-to-gitlab-121)
- [Updating to GitLab 10.8](version_specific_updates.md#updating-to-gitlab-108)
- [Updating to GitLab 10.6](version_specific_updates.md#updating-to-gitlab-106)
......
......@@ -116,14 +116,14 @@ following command:
**For Omnibus installations**
```shell
sudo gitlab-rails runner -e production 'puts Sidekiq::Queue.new("background_migration").size'
sudo gitlab-rails runner -e production 'puts Gitlab::BackgroundMigration.remaining'
```
**For installations from source**
```
cd /home/git/gitlab
sudo -u git -H bundle exec rails runner -e production 'puts Sidekiq::Queue.new("background_migration").size'
sudo -u git -H bundle exec rails runner -e production 'puts Gitlab::BackgroundMigration.remaining'
```
## Upgrading to a new major version
......
......@@ -6,6 +6,8 @@ require 'digest'
module ContainerRegistry
class Client
include Gitlab::Utils::StrongMemoize
attr_accessor :uri
DOCKER_DISTRIBUTION_MANIFEST_V2_TYPE = 'application/vnd.docker.distribution.manifest.v2+json'
......@@ -35,10 +37,25 @@ module ContainerRegistry
response.headers['docker-content-digest'] if response.success?
end
def delete_repository_tag(name, reference)
result = faraday.delete("/v2/#{name}/manifests/#{reference}")
def delete_repository_tag_by_digest(name, reference)
delete_if_exists("/v2/#{name}/manifests/#{reference}")
end
result.success? || result.status == 404
def delete_repository_tag_by_name(name, reference)
delete_if_exists("/v2/#{name}/tags/reference/#{reference}")
end
# Check if the registry supports tag deletion. This is only supported by the
# GitLab registry fork. The fastest and safest way to check this is to send
# an OPTIONS request to /v2/<name>/tags/reference/<tag>, using a random
# repository name and tag (the registry won't check if they exist).
# Registries that support tag deletion will reply with a 200 OK and include
# the DELETE method in the Allow header. Others reply with an 404 Not Found.
def supports_tag_delete?
strong_memoize(:supports_tag_delete) do
response = faraday.run_request(:options, '/v2/name/tags/reference/tag', '', {})
response.success? && response.headers['allow']&.include?('DELETE')
end
end
def upload_raw_blob(path, blob)
......@@ -86,9 +103,7 @@ module ContainerRegistry
end
def delete_blob(name, digest)
result = faraday.delete("/v2/#{name}/blobs/#{digest}")
result.success? || result.status == 404
delete_if_exists("/v2/#{name}/blobs/#{digest}")
end
def put_tag(name, reference, manifest)
......@@ -163,6 +178,12 @@ module ContainerRegistry
conn.adapter :net_http
end
end
def delete_if_exists(path)
result = faraday.delete(path)
result.success? || result.status == 404
end
end
end
......
......@@ -118,7 +118,7 @@ module ContainerRegistry
def unsafe_delete
return unless digest
client.delete_repository_tag(repository.path, digest)
client.delete_repository_tag_by_digest(repository.path, digest)
end
end
end
......@@ -58,6 +58,14 @@ module Gitlab
migration_class_for(class_name).new.perform(*arguments)
end
def self.remaining
scheduled = Sidekiq::ScheduledSet.new.count do |job|
job.queue == self.queue
end
scheduled + Sidekiq::Queue.new(self.queue).size
end
def self.exists?(migration_class, additional_queues = [])
enqueued = Sidekiq::Queue.new(self.queue)
scheduled = Sidekiq::ScheduledSet.new
......
......@@ -5996,6 +5996,9 @@ msgstr ""
msgid "Date range cannot exceed %{maxDateRange} days."
msgstr ""
msgid "Day of month"
msgstr ""
msgid "DayTitle|F"
msgstr ""
......@@ -10004,6 +10007,9 @@ msgstr ""
msgid "Hook was successfully updated."
msgstr ""
msgid "Hour (UTC)"
msgstr ""
msgid "Housekeeping"
msgstr ""
......@@ -12803,6 +12809,9 @@ msgstr ""
msgid "No, not interested right now"
msgstr ""
msgid "No. of commits"
msgstr ""
msgid "Nobody has starred this repository yet"
msgstr ""
......@@ -13531,6 +13540,9 @@ msgstr ""
msgid "People without permission will never get a notification."
msgstr ""
msgid "Percentage"
msgstr ""
msgid "Perform advanced options such as changing path, transferring, or removing the group."
msgstr ""
......@@ -20797,6 +20809,9 @@ msgstr ""
msgid "Used by members to sign in to your group in GitLab"
msgstr ""
msgid "Used programming language"
msgstr ""
msgid "Used to help configure your identity provider"
msgstr ""
......@@ -21486,6 +21501,9 @@ msgstr ""
msgid "Wednesday"
msgstr ""
msgid "Weekday"
msgstr ""
msgid "Weeks"
msgstr ""
......
# frozen_string_literal: true
require 'spec_helper'
describe Projects::Alerting::NotificationsController do
let_it_be(:project) { create(:project) }
let_it_be(:environment) { create(:environment, project: project) }
describe 'POST #create' do
let(:service_response) { ServiceResponse.success }
let(:notify_service) { instance_double(Projects::Alerting::NotifyService, execute: service_response) }
around do |example|
ForgeryProtection.with_forgery_protection { example.run }
end
before do
allow(Projects::Alerting::NotifyService).to receive(:new).and_return(notify_service)
end
def make_request(body = {})
post :create, params: project_params, body: body.to_json, as: :json
end
context 'when notification service succeeds' do
let(:payload) do
{
title: 'Alert title',
hosts: 'https://gitlab.com'
}
end
let(:permitted_params) { ActionController::Parameters.new(payload).permit! }
it 'responds with ok' do
make_request
expect(response).to have_gitlab_http_status(:ok)
end
it 'does not pass excluded parameters to the notify service' do
make_request(payload)
expect(Projects::Alerting::NotifyService)
.to have_received(:new)
.with(project, nil, permitted_params)
end
end
context 'when notification service fails' do
let(:service_response) { ServiceResponse.error(message: 'Unauthorized', http_status: 401) }
it 'responds with the service response' do
make_request
expect(response).to have_gitlab_http_status(:unauthorized)
end
end
context 'bearer token' do
context 'when set' do
it 'extracts bearer token' do
request.headers['HTTP_AUTHORIZATION'] = 'Bearer some token'
expect(notify_service).to receive(:execute).with('some token')
make_request
end
it 'pass nil if cannot extract a non-bearer token' do
request.headers['HTTP_AUTHORIZATION'] = 'some token'
expect(notify_service).to receive(:execute).with(nil)
make_request
end
end
context 'when missing' do
it 'passes nil' do
expect(notify_service).to receive(:execute).with(nil)
make_request
end
end
end
end
def project_params(opts = {})
opts.reverse_merge(namespace_id: project.namespace, project_id: project)
end
end
......@@ -44,6 +44,16 @@ FactoryBot.define do
end
end
factory :alerts_service do
project
type { 'AlertsService' }
active { true }
trait :inactive do
active { false }
end
end
factory :drone_ci_service do
project
active { true }
......
......@@ -22,20 +22,12 @@ describe 'Project Graph', :js do
end
end
shared_examples 'page should have languages graphs' do
it 'renders languages' do
expect(page).to have_content(/Ruby 66.* %/)
expect(page).to have_content(/JavaScript 22.* %/)
end
end
context 'commits graph' do
before do
visit commits_project_graph_path(project, 'master')
end
it_behaves_like 'page should have commits graphs'
it_behaves_like 'page should have languages graphs'
end
context 'languages graph' do
......@@ -44,7 +36,6 @@ describe 'Project Graph', :js do
end
it_behaves_like 'page should have commits graphs'
it_behaves_like 'page should have languages graphs'
end
context 'charts graph' do
......@@ -53,7 +44,6 @@ describe 'Project Graph', :js do
end
it_behaves_like 'page should have commits graphs'
it_behaves_like 'page should have languages graphs'
end
context 'chart graph with HTML escaped branch name' do
......
......@@ -12,13 +12,12 @@ exports[`Blob Header Default Actions rendering matches the snapshot 1`] = `
class="file-actions d-none d-sm-block"
>
<viewer-switcher-stub
activeviewer="rich"
blob="[object Object]"
value="simple"
/>
<default-actions-stub
activeviewer="rich"
blob="[object Object]"
activeviewer="simple"
rawpath="/flightjs/flight/snippets/51/raw"
/>
</div>
</div>
......
......@@ -8,7 +8,6 @@ import {
} from '~/blob/components/constants';
import { GlButtonGroup, GlButton } from '@gitlab/ui';
import { Blob } from './mock_data';
import eventHub from '~/blob/event_hub';
describe('Blob Header Default Actions', () => {
let wrapper;
......@@ -16,10 +15,10 @@ describe('Blob Header Default Actions', () => {
let buttons;
const hrefPrefix = 'http://localhost';
function createComponent(blobProps = {}, propsData = {}) {
function createComponent(propsData = {}) {
wrapper = mount(BlobHeaderActions, {
propsData: {
blob: Object.assign({}, Blob, blobProps),
rawPath: Blob.rawPath,
...propsData,
},
});
......@@ -60,12 +59,9 @@ describe('Blob Header Default Actions', () => {
});
it('renders "Copy file contents" button as disables if the viewer is Rich', () => {
createComponent(
{},
{
activeViewer: RICH_BLOB_VIEWER,
},
);
createComponent({
activeViewer: RICH_BLOB_VIEWER,
});
buttons = wrapper.findAll(GlButton);
expect(buttons.at(0).attributes('disabled')).toBeTruthy();
......@@ -74,10 +70,10 @@ describe('Blob Header Default Actions', () => {
describe('functionally', () => {
it('emits an event when a Copy Contents button is clicked', () => {
jest.spyOn(eventHub, '$emit');
jest.spyOn(wrapper.vm, '$emit');
buttons.at(0).vm.$emit('click');
expect(eventHub.$emit).toHaveBeenCalledWith('copy');
expect(wrapper.vm.$emit).toHaveBeenCalledWith('copy');
});
});
});
......@@ -3,7 +3,6 @@ import BlobHeader from '~/blob/components/blob_header.vue';
import ViewerSwitcher from '~/blob/components/blob_header_viewer_switcher.vue';
import DefaultActions from '~/blob/components/blob_header_default_actions.vue';
import BlobFilepath from '~/blob/components/blob_header_filepath.vue';
import eventHub from '~/blob/event_hub';
import { Blob } from './mock_data';
......@@ -21,10 +20,6 @@ describe('Blob Header Default Actions', () => {
});
}
beforeEach(() => {
createComponent();
});
afterEach(() => {
wrapper.destroy();
});
......@@ -96,37 +91,48 @@ describe('Blob Header Default Actions', () => {
describe('functionality', () => {
const newViewer = 'Foo Bar';
const activeViewerType = 'Alpha Beta';
it('listens to "switch-view" event when viewer switcher is shown and updates activeViewer', () => {
expect(wrapper.vm.showViewerSwitcher).toBe(true);
eventHub.$emit('switch-viewer', newViewer);
return wrapper.vm.$nextTick().then(() => {
expect(wrapper.vm.activeViewer).toBe(newViewer);
});
});
it('does not update active viewer if the switcher is not shown', () => {
const activeViewer = 'Alpha Beta';
const factory = (hideViewerSwitcher = false) => {
createComponent(
{},
{},
{
data() {
return {
activeViewer,
};
},
},
{
hideViewerSwitcher: true,
activeViewerType,
hideViewerSwitcher,
},
);
};
it('by default sets viewer data based on activeViewerType', () => {
factory();
expect(wrapper.vm.viewer).toBe(activeViewerType);
});
it('sets viewer to null if the viewer switcher should be hidden', () => {
factory(true);
expect(wrapper.vm.viewer).toBe(null);
});
it('watches the changes in viewer data and emits event when the change is registered', () => {
factory();
jest.spyOn(wrapper.vm, '$emit');
wrapper.vm.viewer = newViewer;
return wrapper.vm.$nextTick().then(() => {
expect(wrapper.vm.$emit).toHaveBeenCalledWith('viewer-changed', newViewer);
});
});
it('does not emit event if the switcher is not rendered', () => {
factory(true);
expect(wrapper.vm.showViewerSwitcher).toBe(false);
eventHub.$emit('switch-viewer', newViewer);
jest.spyOn(wrapper.vm, '$emit');
wrapper.vm.viewer = newViewer;
return wrapper.vm.$nextTick().then(() => {
expect(wrapper.vm.activeViewer).toBe(activeViewer);
expect(wrapper.vm.$emit).not.toHaveBeenCalled();
});
});
});
......
......@@ -7,18 +7,13 @@ import {
SIMPLE_BLOB_VIEWER_TITLE,
} from '~/blob/components/constants';
import { GlButtonGroup, GlButton } from '@gitlab/ui';
import { Blob } from './mock_data';
import eventHub from '~/blob/event_hub';
describe('Blob Header Viewer Switcher', () => {
let wrapper;
function createComponent(blobProps = {}, propsData = {}) {
function createComponent(propsData = {}) {
wrapper = mount(BlobHeaderViewerSwitcher, {
propsData: {
blob: Object.assign({}, Blob, blobProps),
...propsData,
},
propsData,
});
}
......@@ -29,7 +24,7 @@ describe('Blob Header Viewer Switcher', () => {
describe('intiialization', () => {
it('is initialized with simple viewer as active', () => {
createComponent();
expect(wrapper.vm.activeViewer).toBe(SIMPLE_BLOB_VIEWER);
expect(wrapper.vm.value).toBe(SIMPLE_BLOB_VIEWER);
});
});
......@@ -60,42 +55,42 @@ describe('Blob Header Viewer Switcher', () => {
let simpleBtn;
let richBtn;
function factory(propsOptions = {}) {
createComponent({}, propsOptions);
function factory(propsData = {}) {
createComponent(propsData);
buttons = wrapper.findAll(GlButton);
simpleBtn = buttons.at(0);
richBtn = buttons.at(1);
jest.spyOn(eventHub, '$emit');
jest.spyOn(wrapper.vm, '$emit');
}
it('does not switch the viewer if the selected one is already active', () => {
factory();
expect(wrapper.vm.activeViewer).toBe(SIMPLE_BLOB_VIEWER);
expect(wrapper.vm.value).toBe(SIMPLE_BLOB_VIEWER);
simpleBtn.vm.$emit('click');
expect(wrapper.vm.activeViewer).toBe(SIMPLE_BLOB_VIEWER);
expect(eventHub.$emit).not.toHaveBeenCalled();
expect(wrapper.vm.value).toBe(SIMPLE_BLOB_VIEWER);
expect(wrapper.vm.$emit).not.toHaveBeenCalled();
});
it('emits an event when a Rich Viewer button is clicked', () => {
factory();
expect(wrapper.vm.activeViewer).toBe(SIMPLE_BLOB_VIEWER);
expect(wrapper.vm.value).toBe(SIMPLE_BLOB_VIEWER);
richBtn.vm.$emit('click');
return wrapper.vm.$nextTick().then(() => {
expect(eventHub.$emit).toHaveBeenCalledWith('switch-viewer', RICH_BLOB_VIEWER);
expect(wrapper.vm.$emit).toHaveBeenCalledWith('input', RICH_BLOB_VIEWER);
});
});
it('emits an event when a Simple Viewer button is clicked', () => {
factory({
activeViewer: RICH_BLOB_VIEWER,
value: RICH_BLOB_VIEWER,
});
simpleBtn.vm.$emit('click');
return wrapper.vm.$nextTick().then(() => {
expect(eventHub.$emit).toHaveBeenCalledWith('switch-viewer', SIMPLE_BLOB_VIEWER);
expect(wrapper.vm.$emit).toHaveBeenCalledWith('input', SIMPLE_BLOB_VIEWER);
});
});
});
......
import VueRouter from 'vue-router';
import IdeRouter from '~/ide/ide_router_extension';
jest.mock('vue-router');
describe('IDE overrides of VueRouter', () => {
const paths = branch => [
`${branch}`,
`/${branch}`,
`/${branch}/-/`,
`/edit/${branch}`,
`/edit/${branch}/-/`,
`/blob/${branch}`,
`/blob/${branch}/-/`,
`/blob/${branch}/-/src/merge_requests/2`,
`/blob/${branch}/-/src/blob/`,
`/tree/${branch}/-/src/blob/`,
`/tree/${branch}/-/src/tree/`,
];
let router;
beforeEach(() => {
VueRouter.mockClear();
router = new IdeRouter({
mode: 'history',
});
});
it.each`
path | expected
${'#-test'} | ${'%23-test'}
${'#test'} | ${'%23test'}
${'test#'} | ${'test%23'}
${'test-#'} | ${'test-%23'}
${'test-#-hash'} | ${'test-%23-hash'}
${'test/hash#123'} | ${'test/hash%23123'}
`('finds project path when route is $path', ({ path, expected }) => {
paths(path).forEach(route => {
const expectedPath = route.replace(path, expected);
router.push(route);
expect(VueRouter.prototype.push).toHaveBeenCalledWith(expectedPath, undefined, undefined);
router.resolve(route);
expect(VueRouter.prototype.resolve).toHaveBeenCalledWith(expectedPath, undefined, undefined);
});
});
});
......@@ -28,6 +28,12 @@ describe('URL utility', () => {
gon.relative_url_root = '';
});
it('escapes special characters', () => {
expect(urlUtils.webIDEUrl('/gitlab-org/gitlab-#-foss/merge_requests/1')).toBe(
'/-/ide/project/gitlab-org/gitlab-%23-foss/merge_requests/1',
);
});
describe('without relative_url_root', () => {
it('returns IDE path with route', () => {
expect(urlUtils.webIDEUrl('/gitlab-org/gitlab-foss/merge_requests/1')).toBe(
......
......@@ -244,8 +244,8 @@ describe BlobHelper do
it 'escapes special characters' do
Rails.application.routes.default_url_options[:script_name] = nil
expect(helper.ide_edit_path(project, "testing/#hashes", "readme.md#test")).to eq("/-/ide/project/#{project.namespace.path}/#{project.path}/edit/testing/#hashes/-/readme.md%23test")
expect(helper.ide_edit_path(project, "testing/#hashes", "src#/readme.md#test")).to eq("/-/ide/project/#{project.namespace.path}/#{project.path}/edit/testing/#hashes/-/src%23/readme.md%23test")
expect(helper.ide_edit_path(project, "testing/#hashes", "readme.md#test")).to eq("/-/ide/project/#{project.full_path}/edit/testing/%23hashes/-/readme.md%23test")
expect(helper.ide_edit_path(project, "testing/#hashes", "src#/readme.md#test")).to eq("/-/ide/project/#{project.full_path}/edit/testing/%23hashes/-/src%23/readme.md%23test")
end
it 'does not escape "/" character' do
......
......@@ -146,4 +146,57 @@ describe ContainerRegistry::Client do
expect(subject).to eq 'sha256:123'
end
end
describe '#delete_repository_tag_by_name' do
subject { client.delete_repository_tag_by_name('group/test', 'a') }
context 'when the tag exists' do
before do
stub_request(:delete, "http://container-registry/v2/group/test/tags/reference/a")
.to_return(status: 200, body: "")
end
it { is_expected.to be_truthy }
end
context 'when the tag does not exist' do
before do
stub_request(:delete, "http://container-registry/v2/group/test/tags/reference/a")
.to_return(status: 404, body: "")
end
it { is_expected.to be_truthy }
end
context 'when an error occurs' do
before do
stub_request(:delete, "http://container-registry/v2/group/test/tags/reference/a")
.to_return(status: 500, body: "")
end
it { is_expected.to be_falsey }
end
end
describe '#supports_tag_delete?' do
subject { client.supports_tag_delete? }
context 'when the server supports tag deletion' do
before do
stub_request(:options, "http://container-registry/v2/name/tags/reference/tag")
.to_return(status: 200, body: "", headers: { 'Allow' => 'DELETE' })
end
it { is_expected.to be_truthy }
end
context 'when the server does not support tag deletion' do
before do
stub_request(:options, "http://container-registry/v2/name/tags/reference/tag")
.to_return(status: 404, body: "")
end
it { is_expected.to be_falsey }
end
end
end
......@@ -165,6 +165,32 @@ describe Gitlab::BackgroundMigration do
end
end
describe '.remaining', :redis do
context 'when there are jobs remaining' do
let(:queue) { Array.new(12) }
before do
allow(Sidekiq::Queue).to receive(:new)
.with(described_class.queue)
.and_return(Array.new(12))
Sidekiq::Testing.disable! do
BackgroundMigrationWorker.perform_in(10.minutes, 'Foo')
end
end
it 'returns the enqueued jobs plus the scheduled jobs' do
expect(described_class.remaining).to eq(13)
end
end
context 'when there are no jobs remaining' do
it 'returns zero' do
expect(described_class.remaining).to be_zero
end
end
end
describe '.exists?' do
context 'when there are enqueued jobs present' do
let(:queue) do
......
......@@ -3,7 +3,7 @@
require 'spec_helper'
describe ChatName do
set(:chat_name) { create(:chat_name) }
let_it_be(:chat_name) { create(:chat_name) }
subject { chat_name }
it { is_expected.to belong_to(:service) }
......
......@@ -3,7 +3,7 @@
require 'spec_helper'
describe ChatTeam do
set(:chat_team) { create(:chat_team) }
let_it_be(:chat_team) { create(:chat_team) }
subject { chat_team }
# Associations
......
......@@ -3,9 +3,9 @@
require 'spec_helper'
describe Ci::Bridge do
set(:project) { create(:project) }
set(:target_project) { create(:project, name: 'project', namespace: create(:namespace, name: 'my')) }
set(:pipeline) { create(:ci_pipeline, project: project) }
let_it_be(:project) { create(:project) }
let_it_be(:target_project) { create(:project, name: 'project', namespace: create(:namespace, name: 'my')) }
let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
let(:bridge) do
create(:ci_bridge, :variables, status: :created,
......
......@@ -3,11 +3,11 @@
require 'spec_helper'
describe Ci::BuildMetadata do
set(:user) { create(:user) }
set(:group) { create(:group) }
set(:project) { create(:project, :repository, group: group, build_timeout: 2000) }
let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project, :repository, group: group, build_timeout: 2000) }
set(:pipeline) do
let_it_be(:pipeline) do
create(:ci_pipeline, project: project,
sha: project.commit.id,
ref: project.default_branch,
......
......@@ -3,11 +3,11 @@
require 'spec_helper'
describe Ci::Build do
set(:user) { create(:user) }
set(:group) { create(:group) }
set(:project) { create(:project, :repository, group: group) }
let_it_be(:user) { create(:user) }
let_it_be(:group, reload: true) { create(:group) }
let_it_be(:project, reload: true) { create(:project, :repository, group: group) }
set(:pipeline) do
let_it_be(:pipeline, reload: true) do
create(:ci_pipeline, project: project,
sha: project.commit.id,
ref: project.default_branch,
......@@ -3612,7 +3612,7 @@ describe Ci::Build do
end
describe '.matches_tag_ids' do
set(:build) { create(:ci_build, project: project, user: user) }
let_it_be(:build, reload: true) { create(:ci_build, project: project, user: user) }
let(:tag_ids) { ::ActsAsTaggableOn::Tag.named_any(tag_list).ids }
subject { described_class.where(id: build).matches_tag_ids(tag_ids) }
......@@ -3659,7 +3659,7 @@ describe Ci::Build do
end
describe '.matches_tags' do
set(:build) { create(:ci_build, project: project, user: user) }
let_it_be(:build, reload: true) { create(:ci_build, project: project, user: user) }
subject { described_class.where(id: build).with_any_tags }
......@@ -3685,7 +3685,7 @@ describe Ci::Build do
end
describe 'pages deployments' do
set(:build) { create(:ci_build, project: project, user: user) }
let_it_be(:build, reload: true) { create(:ci_build, project: project, user: user) }
context 'when job is "pages"' do
before do
......@@ -3852,7 +3852,7 @@ describe Ci::Build do
end
describe '#artifacts_metadata_entry' do
set(:build) { create(:ci_build, project: project) }
let_it_be(:build) { create(:ci_build, project: project) }
let(:path) { 'other_artifacts_0.1.2/another-subdirectory/banana_sample.gif' }
around do |example|
......@@ -3952,7 +3952,7 @@ describe Ci::Build do
end
describe '#supported_runner?' do
set(:build) { create(:ci_build) }
let_it_be(:build) { create(:ci_build) }
subject { build.supported_runner?(runner_features) }
......
......@@ -5,7 +5,7 @@ require 'spec_helper'
describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
include ExclusiveLeaseHelpers
set(:build) { create(:ci_build, :running) }
let_it_be(:build) { create(:ci_build, :running) }
let(:chunk_index) { 0 }
let(:data_store) { :redis }
let(:raw_data) { nil }
......@@ -24,7 +24,7 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
context 'FastDestroyAll' do
let(:parent) { create(:project) }
let(:pipeline) { create(:ci_pipeline, project: parent) }
let(:build) { create(:ci_build, :running, :trace_live, pipeline: pipeline, project: parent) }
let!(:build) { create(:ci_build, :running, :trace_live, pipeline: pipeline, project: parent) }
let(:subjects) { build.trace_chunks }
describe 'Forbid #destroy and #destroy_all' do
......
......@@ -3,8 +3,8 @@
require 'spec_helper'
describe Ci::Processable do
set(:project) { create(:project) }
set(:pipeline) { create(:ci_pipeline, project: project) }
let_it_be(:project) { create(:project) }
let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
describe '#aggregated_needs_names' do
let(:with_aggregated_needs) { pipeline.processables.select_with_aggregated_needs(project) }
......
......@@ -38,8 +38,8 @@ describe Ci::Runner do
end
context 'runner_type validations' do
set(:group) { create(:group) }
set(:project) { create(:project) }
let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project) }
let(:group_runner) { create(:ci_runner, :group, groups: [group]) }
let(:project_runner) { create(:ci_runner, :project, projects: [project]) }
let(:instance_runner) { create(:ci_runner, :instance) }
......@@ -322,7 +322,7 @@ describe Ci::Runner do
end
describe '#can_pick?' do
set(:pipeline) { create(:ci_pipeline) }
let_it_be(:pipeline) { create(:ci_pipeline) }
let(:build) { create(:ci_build, pipeline: pipeline) }
let(:runner_project) { build.project }
let(:runner) { create(:ci_runner, :project, projects: [runner_project], tag_list: tag_list, run_untagged: run_untagged) }
......
......@@ -85,7 +85,7 @@ describe ContainerRepository do
context 'when action succeeds' do
it 'returns status that indicates success' do
expect(repository.client)
.to receive(:delete_repository_tag)
.to receive(:delete_repository_tag_by_digest)
.twice
.and_return(true)
......@@ -96,7 +96,7 @@ describe ContainerRepository do
context 'when action fails' do
it 'returns status that indicates failure' do
expect(repository.client)
.to receive(:delete_repository_tag)
.to receive(:delete_repository_tag_by_digest)
.twice
.and_return(false)
......@@ -105,6 +105,36 @@ describe ContainerRepository do
end
end
describe '#delete_tag_by_name' do
let(:repository) do
create(:container_repository, name: 'my_image',
tags: { latest: '123', rc1: '234' },
project: project)
end
context 'when action succeeds' do
it 'returns status that indicates success' do
expect(repository.client)
.to receive(:delete_repository_tag_by_name)
.with(repository.path, "latest")
.and_return(true)
expect(repository.delete_tag_by_name('latest')).to be_truthy
end
end
context 'when action fails' do
it 'returns status that indicates failure' do
expect(repository.client)
.to receive(:delete_repository_tag_by_name)
.with(repository.path, "latest")
.and_return(false)
expect(repository.delete_tag_by_name('latest')).to be_falsey
end
end
end
describe '#location' do
context 'when registry is running on a custom port' do
before do
......
......@@ -3,7 +3,7 @@
require 'spec_helper'
describe DiffViewer::ServerSide do
set(:project) { create(:project, :repository) }
let_it_be(:project) { create(:project, :repository) }
let(:commit) { project.commit_by(oid: '570e7b2abdd848b95f2f578043fc23bd6f6fd24d') }
let!(:diff_file) { commit.diffs.diff_file_with_new_path('files/ruby/popen.rb') }
......
......@@ -4,10 +4,10 @@ require 'spec_helper'
describe EventCollection do
describe '#to_a' do
set(:group) { create(:group) }
set(:project) { create(:project_empty_repo, group: group) }
set(:projects) { Project.where(id: project.id) }
set(:user) { create(:user) }
let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project_empty_repo, group: group) }
let_it_be(:projects) { Project.where(id: project.id) }
let_it_be(:user) { create(:user) }
context 'with project events' do
before do
......
......@@ -3,9 +3,9 @@
require 'spec_helper'
describe Guest do
set(:public_project) { create(:project, :public) }
set(:private_project) { create(:project, :private) }
set(:internal_project) { create(:project, :internal) }
let_it_be(:public_project, reload: true) { create(:project, :public) }
let_it_be(:private_project) { create(:project, :private) }
let_it_be(:internal_project) { create(:project, :internal) }
describe '.can_pull?' do
context 'when project is private' do
......
......@@ -3,8 +3,8 @@
require 'spec_helper'
describe ListUserPreference do
set(:user) { create(:user) }
set(:list) { create(:list) }
let_it_be(:user) { create(:user) }
let_it_be(:list) { create(:list) }
before do
list.update_preferences_for(user, { collapsed: true })
......
......@@ -352,9 +352,9 @@ describe PagesDomain do
end
context 'configuration updates when attributes change' do
set(:project1) { create(:project) }
set(:project2) { create(:project) }
set(:domain) { create(:pages_domain) }
let_it_be(:project1) { create(:project) }
let_it_be(:project2) { create(:project) }
let_it_be(:domain) { create(:pages_domain) }
where(:attribute, :old_value, :new_value, :update_expected) do
now = Time.now
......@@ -402,8 +402,8 @@ describe PagesDomain do
end
context 'TLS configuration' do
set(:domain_without_tls) { create(:pages_domain, :without_certificate, :without_key) }
set(:domain) { create(:pages_domain) }
let_it_be(:domain_without_tls) { create(:pages_domain, :without_certificate, :without_key) }
let_it_be(:domain) { create(:pages_domain) }
let(:cert1) { domain.certificate }
let(:cert2) { cert1 + ' ' }
......
# frozen_string_literal: true
require 'spec_helper'
describe AlertsService do
let_it_be(:project) { create(:project) }
let(:service_params) { { project: project, active: active } }
let(:active) { true }
let(:service) { described_class.new(service_params) }
shared_context 'when active' do
let(:active) { true }
end
shared_context 'when inactive' do
let(:active) { false }
end
shared_context 'when persisted' do
before do
service.save!
service.reload
end
end
describe '#url' do
include Gitlab::Routing
subject { service.url }
it { is_expected.to eq(project_alerts_notify_url(project, format: :json)) }
end
describe '#json_fields' do
subject { service.json_fields }
it { is_expected.to eq(%w(active token)) }
end
describe '#as_json' do
subject { service.as_json(only: service.json_fields) }
it { is_expected.to eq('active' => true, 'token' => nil) }
end
describe '#token' do
shared_context 'reset token' do
before do
service.token = ''
service.valid?
end
end
shared_context 'assign token' do |token|
before do
service.token = token
service.valid?
end
end
shared_examples 'valid token' do
it { is_expected.to match(/\A\h{32}\z/) }
end
shared_examples 'no token' do
it { is_expected.to be_blank }
end
subject { service.token }
context 'when active' do
include_context 'when active'
context 'when resetting' do
let!(:previous_token) { service.token }
include_context 'reset token'
it_behaves_like 'valid token'
it { is_expected.not_to eq(previous_token) }
end
context 'when assigning' do
include_context 'assign token', 'random token'
it_behaves_like 'valid token'
end
end
context 'when inactive' do
include_context 'when inactive'
context 'when resetting' do
let!(:previous_token) { service.token }
include_context 'reset token'
it_behaves_like 'no token'
end
end
context 'when persisted' do
include_context 'when persisted'
it_behaves_like 'valid token'
end
end
end
......@@ -37,9 +37,9 @@ describe MicrosoftTeamsService do
end
describe "#execute" do
let(:user) { create(:user) }
let(:user) { create(:user) }
set(:project) { create(:project, :repository, :wiki_repo) }
let_it_be(:project) { create(:project, :repository, :wiki_repo) }
before do
allow(chat_service).to receive_messages(
......
......@@ -169,7 +169,7 @@ describe PrometheusService, :use_clean_rails_memory_store_caching do
end
context 'cluster belongs to projects group' do
set(:group) { create(:group) }
let_it_be(:group) { create(:group) }
let(:project) { create(:prometheus_project, group: group) }
let(:cluster) { create(:cluster_for_group, :with_installed_helm, groups: [group]) }
......
......@@ -3901,7 +3901,7 @@ describe Project do
end
context 'legacy storage' do
set(:project) { create(:project, :repository, :legacy_storage) }
let_it_be(:project) { create(:project, :repository, :legacy_storage) }
let(:gitlab_shell) { Gitlab::Shell.new }
let(:project_storage) { project.send(:storage) }
......@@ -4000,7 +4000,7 @@ describe Project do
end
context 'hashed storage' do
set(:project) { create(:project, :repository, skip_disk_validation: true) }
let_it_be(:project) { create(:project, :repository, skip_disk_validation: true) }
let(:gitlab_shell) { Gitlab::Shell.new }
let(:hash) { Digest::SHA2.hexdigest(project.id.to_s) }
let(:hashed_prefix) { File.join('@hashed', hash[0..1], hash[2..3]) }
......@@ -4090,7 +4090,7 @@ describe Project do
end
describe '#has_ci?' do
set(:project) { create(:project) }
let_it_be(:project, reload: true) { create(:project) }
let(:repository) { double }
before do
......@@ -4134,7 +4134,7 @@ describe Project do
Feature.get(:force_autodevops_on_by_default).enable_percentage_of_actors(0)
end
set(:project) { create(:project) }
let_it_be(:project, reload: true) { create(:project) }
subject { project.auto_devops_enabled? }
......@@ -4269,7 +4269,7 @@ describe Project do
end
describe '#has_auto_devops_implicitly_enabled?' do
set(:project) { create(:project) }
let_it_be(:project, reload: true) { create(:project) }
context 'when disabled in settings' do
before do
......@@ -4330,7 +4330,7 @@ describe Project do
end
describe '#has_auto_devops_implicitly_disabled?' do
set(:project) { create(:project) }
let_it_be(:project, reload: true) { create(:project) }
before do
allow(Feature).to receive(:enabled?).and_call_original
......@@ -4408,7 +4408,7 @@ describe Project do
end
describe '#api_variables' do
set(:project) { create(:project) }
let_it_be(:project) { create(:project) }
it 'exposes API v4 URL' do
expect(project.api_variables.first[:key]).to eq 'CI_API_V4_URL'
......@@ -4605,7 +4605,7 @@ describe Project do
end
describe '#write_repository_config' do
set(:project) { create(:project, :repository) }
let_it_be(:project) { create(:project, :repository) }
it 'writes full path in .git/config when key is missing' do
project.write_repository_config
......@@ -4696,7 +4696,7 @@ describe Project do
end
describe '#has_active_hooks?' do
set(:project) { create(:project) }
let_it_be(:project) { create(:project) }
it { expect(project.has_active_hooks?).to be_falsey }
......@@ -4723,7 +4723,7 @@ describe Project do
end
describe '#has_active_services?' do
set(:project) { create(:project) }
let_it_be(:project) { create(:project) }
it { expect(project.has_active_services?).to be_falsey }
......@@ -5009,8 +5009,8 @@ describe Project do
describe '#members_among' do
let(:users) { create_list(:user, 3) }
set(:group) { create(:group) }
set(:project) { create(:project, namespace: group) }
let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project, namespace: group) }
before do
project.add_guest(users.first)
......@@ -5584,6 +5584,14 @@ describe Project do
end
end
describe '#alerts_service_activated?' do
let!(:project) { create(:project) }
subject { project.alerts_service_activated? }
it { is_expected.to be_falsey }
end
def rugged_config
rugged_repo(project.repository).config
end
......
......@@ -3,7 +3,7 @@
require 'spec_helper'
describe Releases::Source do
set(:project) { create(:project, :repository, name: 'finance-cal') }
let_it_be(:project) { create(:project, :repository, name: 'finance-cal') }
let(:tag_name) { 'v1.0' }
describe '.all' do
......
......@@ -60,7 +60,8 @@ describe API::LsifData do
'end_char' => 18,
'end_line' => 8,
'start_char' => 13,
'start_line' => 8
'start_line' => 8,
'definition_url' => project_blob_path(project, "#{commit.id}/morestrings/reverse.go", anchor: 'L5')
})
end
......
# frozen_string_literal: true
require 'spec_helper'
describe Projects::Alerting::NotifyService do
let_it_be(:project, reload: true) { create(:project) }
shared_examples 'does not process incident issues' do |http_status:|
it 'does not process issues' do
expect(IncidentManagement::ProcessAlertWorker)
.not_to receive(:perform_async)
expect(subject.status).to eq(:error)
expect(subject.http_status).to eq(http_status)
end
end
describe '#execute' do
let(:token) { 'invalid-token' }
let(:starts_at) { Time.now.change(usec: 0) }
let(:service) { described_class.new(project, nil, payload) }
let(:payload_raw) do
{
'title' => 'alert title',
'start_time' => starts_at.rfc3339
}
end
let(:payload) { ActionController::Parameters.new(payload_raw).permit! }
subject { service.execute(token) }
it_behaves_like 'does not process incident issues', http_status: 403
end
end
......@@ -41,7 +41,7 @@ describe Projects::ContainerRepository::CleanupTagsService do
let(:params) { {} }
it 'does not remove anything' do
expect_any_instance_of(ContainerRegistry::Client).not_to receive(:delete_repository_tag)
expect_any_instance_of(ContainerRegistry::Client).not_to receive(:delete_repository_tag_by_digest)
is_expected.to include(status: :success, deleted: [])
end
......@@ -156,7 +156,7 @@ describe Projects::ContainerRepository::CleanupTagsService do
def expect_delete(digest)
expect_any_instance_of(ContainerRegistry::Client)
.to receive(:delete_repository_tag)
.to receive(:delete_repository_tag_by_digest)
.with(repository.path, digest) { true }
end
end
......@@ -23,43 +23,51 @@ describe Projects::LsifDataService do
end
context 'for main.go' do
let(:path_prefix) { "/#{project.full_path}/-/blob/#{commit_id}" }
it 'returns lsif ranges for the file' do
expect(service.execute).to eq([
{
end_char: 9,
end_line: 6,
start_char: 5,
start_line: 6
start_line: 6,
definition_url: "#{path_prefix}/main.go#L7"
},
{
end_char: 36,
end_line: 3,
start_char: 1,
start_line: 3
start_line: 3,
definition_url: "#{path_prefix}/main.go#L4"
},
{
end_char: 12,
end_line: 7,
start_char: 1,
start_line: 7
start_line: 7,
definition_url: "#{path_prefix}/main.go#L4"
},
{
end_char: 20,
end_line: 7,
start_char: 13,
start_line: 7
start_line: 7,
definition_url: "#{path_prefix}/morestrings/reverse.go#L11"
},
{
end_char: 12,
end_line: 8,
start_char: 1,
start_line: 8
start_line: 8,
definition_url: "#{path_prefix}/main.go#L4"
},
{
end_char: 18,
end_line: 8,
start_char: 13,
start_line: 8
start_line: 8,
definition_url: "#{path_prefix}/morestrings/reverse.go#L5"
}
])
end
......@@ -73,7 +81,8 @@ describe Projects::LsifDataService do
end_char: 2,
end_line: 11,
start_char: 1,
start_line: 11
start_line: 11,
definition_url: "/#{project.full_path}/-/blob/#{commit_id}/morestrings/reverse.go#L12"
})
end
end
......@@ -87,7 +96,7 @@ describe Projects::LsifDataService do
end
end
describe '#doc_id_from' do
describe '#doc_id' do
context 'when the passed path matches multiple files' do
let(:path) { 'check/main.go' }
let(:docs) do
......@@ -100,7 +109,9 @@ describe Projects::LsifDataService do
end
it 'fetches the document with the shortest absolute path' do
expect(service.__send__(:doc_id_from, docs)).to eq(3)
service.instance_variable_set(:@docs, docs)
expect(service.__send__(:doc_id)).to eq(3)
end
end
end
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment