Commit 2070a2e9 authored by GitLab Bot's avatar GitLab Bot

Merge remote-tracking branch 'upstream/master' into ce-to-ee-2018-05-29

# Conflicts:
#	doc/ci/autodeploy/index.md
#	doc/user/project/integrations/kubernetes.md
#	locale/gitlab.pot

[ci skip]
parents 21419046 c5adf04c
......@@ -182,7 +182,7 @@ Team labels specify what team is responsible for this issue.
Assigning a team label makes sure issues get the attention of the appropriate
people.
The current team labels are ~Build, ~"CI/CD", ~Discussion, ~Documentation, ~Quality,
The current team labels are ~Distribution, ~"CI/CD", ~Discussion, ~Documentation, ~Quality,
~Geo, ~Gitaly, ~Monitoring, ~Platform, ~Release, ~"Security Products" and ~"UX".
The descriptions on the [labels page][labels-page] explain what falls under the
......
import bp from '../../../breakpoints';
import { slugify } from '../../../lib/utils/text_utility';
import { parseQueryStringIntoObject } from '../../../lib/utils/common_utils';
import { mergeUrlParams, redirectTo } from '../../../lib/utils/url_utility';
export default class Wikis {
constructor() {
......@@ -28,7 +30,12 @@ export default class Wikis {
if (slug.length > 0) {
const wikisPath = slugInput.getAttribute('data-wikis-path');
window.location.href = `${wikisPath}/${slug}`;
// If the wiki is empty, we need to merge the current URL params to keep the "create" view.
const params = parseQueryStringIntoObject(window.location.search.substr(1));
const url = mergeUrlParams(params, `${wikisPath}/${slug}`);
redirectTo(url);
e.preventDefault();
}
}
......
......@@ -14,6 +14,8 @@ class Projects::WikisController < Projects::ApplicationController
def show
@page = @project_wiki.find_page(params[:id], params[:version_id])
view_param = @project_wiki.empty? ? params[:view] : 'create'
if @page
render 'show'
elsif file = @project_wiki.find_file(params[:id], params[:version_id])
......@@ -26,12 +28,12 @@ class Projects::WikisController < Projects::ApplicationController
disposition: 'inline',
filename: file.name
)
else
return render('empty') unless can?(current_user, :create_wiki, @project)
elsif can?(current_user, :create_wiki, @project) && view_param == 'create'
@page = build_page(title: params[:id])
render 'edit'
else
render 'empty'
end
end
......
......@@ -39,25 +39,15 @@ class GroupProjectsFinder < ProjectsFinder
end
def collection_with_user
if group.users.include?(current_user)
if only_shared?
[shared_projects]
elsif only_owned?
[owned_projects]
else
[shared_projects, owned_projects]
end
if only_shared?
[shared_projects.public_or_visible_to_user(current_user)]
elsif only_owned?
[owned_projects.public_or_visible_to_user(current_user)]
else
if only_shared?
[shared_projects.public_or_visible_to_user(current_user)]
elsif only_owned?
[owned_projects.public_or_visible_to_user(current_user)]
else
[
owned_projects.public_or_visible_to_user(current_user),
shared_projects.public_or_visible_to_user(current_user)
]
end
[
owned_projects.public_or_visible_to_user(current_user),
shared_projects.public_or_visible_to_user(current_user)
]
end
end
......
......@@ -11,6 +11,7 @@ module NavHelper
class_name = page_gutter_class
class_name << 'page-with-contextual-sidebar' if defined?(@left_sidebar) && @left_sidebar
class_name << 'page-with-icon-sidebar' if collapsed_sidebar? && @left_sidebar
class_name -= ['right-sidebar-expanded'] if defined?(@right_sidebar) && !@right_sidebar
class_name
end
......
......@@ -24,12 +24,9 @@ class InternalId < ActiveRecord::Base
#
# The operation locks the record and gathers a `ROW SHARE` lock (in PostgreSQL).
# As such, the increment is atomic and safe to be called concurrently.
#
# If a `maximum_iid` is passed in, this overrides the incremented value if it's
# greater than that. This can be used to correct the increment value if necessary.
def increment_and_save!(maximum_iid)
def increment_and_save!
lock!
self.last_value = [(last_value || 0) + 1, (maximum_iid || 0) + 1].max
self.last_value = (last_value || 0) + 1
save!
last_value
end
......@@ -93,16 +90,7 @@ class InternalId < ActiveRecord::Base
# and increment its last value
#
# Note this will acquire a ROW SHARE lock on the InternalId record
# Note we always calculate the maximum iid present here and
# pass it in to correct the InternalId entry if it's last_value is off.
#
# This can happen in a transition phase where both `AtomicInternalId` and
# `NonatomicInternalId` code runs (e.g. during a deploy).
#
# This is subject to be cleaned up with the 10.8 release:
# https://gitlab.com/gitlab-org/gitlab-ce/issues/45389.
(lookup || create_record).increment_and_save!(maximum_iid)
(lookup || create_record).increment_and_save!
end
end
......@@ -128,15 +116,11 @@ class InternalId < ActiveRecord::Base
InternalId.create!(
**scope,
usage: usage_value,
last_value: maximum_iid
last_value: init.call(subject) || 0
)
end
rescue ActiveRecord::RecordNotUnique
lookup
end
def maximum_iid
@maximum_iid ||= init.call(subject) || 0
end
end
end
......@@ -11,7 +11,7 @@ module ObjectStorage
ObjectStorageUnavailable = Class.new(StandardError)
DIRECT_UPLOAD_TIMEOUT = 4.hours
TMP_UPLOAD_PATH = 'tmp/upload'.freeze
TMP_UPLOAD_PATH = 'tmp/uploads'.freeze
module Store
LOCAL = 1
......
......@@ -6,7 +6,7 @@
%section.settings#secret-variables.no-animate{ class: ('expanded' if expanded) }
.settings-header
%h4
= _('Secret variables')
= _('Variables')
= link_to icon('question-circle'), help_page_path('ci/variables/README', anchor: 'secret-variables'), target: '_blank', rel: 'noopener noreferrer'
%button.btn.btn-default.js-settings-toggle{ type: "button" }
= expanded ? _('Collapse') : _('Expand')
......
......@@ -42,7 +42,7 @@
%section.settings.no-animate{ class: ('expanded' if expanded) }
.settings-header
%h4
= _('Secret variables')
= _('Variables')
= link_to icon('question-circle'), help_page_path('ci/variables/README', anchor: 'secret-variables'), target: '_blank', rel: 'noopener noreferrer'
%button.btn.js-settings-toggle{ type: 'button' }
= expanded ? 'Collapse' : 'Expand'
......
- page_title _("Wiki")
- @right_sidebar = false
%h3.page-title= s_("Wiki|Empty page")
%hr
.error_message
= s_("WikiEmptyPageError|You are not allowed to create wiki pages")
= render 'shared/empty_states/wikis'
- layout_path = 'shared/empty_states/wikis_layout'
- if can?(current_user, :create_wiki, @project)
- create_path = project_wiki_path(@project, params[:id], { view: 'create' })
- create_link = link_to s_('WikiEmpty|Create your first page'), create_path, class: 'btn btn-new', title: s_('WikiEmpty|Create your first page')
= render layout: layout_path, locals: { image_path: 'illustrations/wiki_login_empty.svg' } do
%h4
= s_('WikiEmpty|The wiki lets you write documentation for your project')
%p.text-left
= s_("WikiEmpty|A wiki is where you can store all the details about your project. This can include why you've created it, it's principles, how to use it, and so on.")
= create_link
- elsif can?(current_user, :read_issue, @project)
- issues_link = link_to s_('WikiEmptyIssueMessage|issue tracker'), project_issues_path(@project)
- new_issue_link = link_to s_('WikiEmpty|Suggest wiki improvement'), new_project_issue_path(@project), class: 'btn btn-new', title: s_('WikiEmptyIssueMessage|Suggest wiki improvement')
= render layout: layout_path, locals: { image_path: 'illustrations/wiki_logout_empty.svg' } do
%h4
= s_('WikiEmpty|This project has no wiki pages')
%p.text-left
= s_('WikiEmptyIssueMessage|You must be a project member in order to add wiki pages. If you have suggestions for how to improve the wiki for this project, consider opening an issue in the %{issues_link}.').html_safe % { issues_link: issues_link }
= new_issue_link
- else
= render layout: layout_path, locals: { image_path: 'illustrations/wiki_logout_empty.svg' } do
%h4
= s_('WikiEmpty|This project has no wiki pages')
%p
= s_('WikiEmpty|You must be a project member in order to add wiki pages.')
.row.empty-state
.col-xs-12
.svg-content
= image_tag image_path
.col-xs-12
.text-content.text-center
= yield
---
title: Add helpful messages to empty wiki view
merge_request: 19007
author:
type: other
---
title: Remove double-checked internal id generation.
merge_request: 19181
author:
type: performance
---
title: Expose artifacts_expire_at field for job entity in api
merge_request: 18872
author: Semyon Pupkov
type: added
---
title: Add backgound migration for filling nullfied file_store columns
merge_request: 18557
author:
type: performance
---
title: Improve performance of GroupsController#show
merge_request:
author:
type: performance
---
title: Fix FreeBSD can not upload artifacts due to wrong tmp path
merge_request: 19148
author:
type: fixed
---
title: Log Workhorse queue duration for Grape API calls
merge_request:
author:
type: other
# See http://doc.gitlab.com/ce/development/migration_style_guide.html
# for more information on how to write migrations for GitLab.
class MergeRequestsTargetIdIidStatePartialIndex < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
# Set this constant to true if this migration requires downtime.
DOWNTIME = false
INDEX_NAME = 'index_merge_requests_on_target_project_id_and_iid_opened'
disable_ddl_transaction!
def up
# On GitLab.com this index will take up roughly 5 MB of space.
add_concurrent_index(
:merge_requests,
[:target_project_id, :iid],
where: "state = 'opened'",
name: INDEX_NAME
)
end
def down
remove_concurrent_index_by_name(:merge_requests, INDEX_NAME)
end
end
class FillFileStore < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
class JobArtifact < ActiveRecord::Base
include EachBatch
self.table_name = 'ci_job_artifacts'
BATCH_SIZE = 10_000
def self.params_for_background_migration
yield self.where(file_store: nil), 'FillFileStoreJobArtifact', 5.minutes, BATCH_SIZE
end
end
class LfsObject < ActiveRecord::Base
include EachBatch
self.table_name = 'lfs_objects'
BATCH_SIZE = 10_000
def self.params_for_background_migration
yield self.where(file_store: nil), 'FillFileStoreLfsObject', 5.minutes, BATCH_SIZE
end
end
class Upload < ActiveRecord::Base
include EachBatch
self.table_name = 'uploads'
self.inheritance_column = :_type_disabled # Disable STI
BATCH_SIZE = 10_000
def self.params_for_background_migration
yield self.where(store: nil), 'FillStoreUpload', 5.minutes, BATCH_SIZE
end
end
def up
# NOTE: Schedule background migrations that fill 'NULL' value by '1'(ObjectStorage::Store::LOCAL) on `file_store`, `store` columns
#
# Here are the target columns
# - ci_job_artifacts.file_store
# - lfs_objects.file_store
# - uploads.store
FillFileStore::JobArtifact.params_for_background_migration do |relation, class_name, delay_interval, batch_size|
queue_background_migration_jobs_by_range_at_intervals(relation,
class_name,
delay_interval,
batch_size: batch_size)
end
FillFileStore::LfsObject.params_for_background_migration do |relation, class_name, delay_interval, batch_size|
queue_background_migration_jobs_by_range_at_intervals(relation,
class_name,
delay_interval,
batch_size: batch_size)
end
FillFileStore::Upload.params_for_background_migration do |relation, class_name, delay_interval, batch_size|
queue_background_migration_jobs_by_range_at_intervals(relation,
class_name,
delay_interval,
batch_size: batch_size)
end
end
def down
# noop
end
end
......@@ -11,7 +11,7 @@
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 20180521171529) do
ActiveRecord::Schema.define(version: 20180524132016) do
# These are extensions that must be enabled in order to support this database
enable_extension "plpgsql"
......@@ -1641,6 +1641,7 @@ ActiveRecord::Schema.define(version: 20180521171529) do
add_index "merge_requests", ["source_project_id", "source_branch"], name: "index_merge_requests_on_source_project_id_and_source_branch", using: :btree
add_index "merge_requests", ["target_branch"], name: "index_merge_requests_on_target_branch", using: :btree
add_index "merge_requests", ["target_project_id", "iid"], name: "index_merge_requests_on_target_project_id_and_iid", unique: true, using: :btree
add_index "merge_requests", ["target_project_id", "iid"], name: "index_merge_requests_on_target_project_id_and_iid_opened", where: "((state)::text = 'opened'::text)", using: :btree
add_index "merge_requests", ["target_project_id", "merge_commit_sha", "id"], name: "index_merge_requests_on_tp_id_and_merge_commit_sha_and_id", using: :btree
add_index "merge_requests", ["title"], name: "index_merge_requests_on_title", using: :btree
add_index "merge_requests", ["title"], name: "index_merge_requests_on_title_trigram", using: :gin, opclasses: {"title"=>"gin_trgm_ops"}
......
# Web terminals
> [Introduced][ce-7690] in GitLab 8.15. Only project masters and owners can
access web terminals.
>
[Introduced](https://gitlab.com/gitlab-org/gitlab-ce/merge_requests/7690)
in GitLab 8.15. Only project masters and owners can access web terminals.
With the introduction of the [Kubernetes project service][kubservice], GitLab
gained the ability to store and use credentials for a Kubernetes cluster. One
of the things it uses these credentials for is providing access to
[web terminals](../../ci/environments.html#web-terminals) for environments.
With the introduction of the [Kubernetes integration](../../user/project/clusters/index.md),
GitLab gained the ability to store and use credentials for a Kubernetes cluster.
One of the things it uses these credentials for is providing access to
[web terminals](../../ci/environments.md#web-terminals) for environments.
## How it works
......@@ -80,6 +81,3 @@ Terminal sessions use long-lived connections; by default, these may last
forever. You can configure a maximum session time in the Admin area of your
GitLab instance if you find this undesirable from a scalability or security
point of view.
[ce-7690]: https://gitlab.com/gitlab-org/gitlab-ce/merge_requests/7690
[kubservice]: ../../user/project/integrations/kubernetes.md
......@@ -29,7 +29,8 @@ For installations from source you'll have to install and configure it yourself.
Prometheus and it's exporters are on by default, starting with GitLab 9.0.
Prometheus will run as the `gitlab-prometheus` user and listen on
`http://localhost:9090`. Each exporter will be automatically be set up as a
`http://localhost:9090`. By default Prometheus is only accessible from the GitLab server itself.
Each exporter will be automatically set up as a
monitoring target for Prometheus, unless individually disabled.
To disable Prometheus and all of its exporters, as well as any added in the future:
......@@ -44,14 +45,16 @@ To disable Prometheus and all of its exporters, as well as any added in the futu
1. Save the file and [reconfigure GitLab][reconfigure] for the changes to
take effect
## Changing the port Prometheus listens on
## Changing the port and address Prometheus listens on
>**Note:**
The following change was added in [GitLab Omnibus 8.17][1261]. Although possible,
it's not recommended to change the default address and port Prometheus listens
it's not recommended to change the port Prometheus listens
on as this might affect or conflict with other services running on the GitLab
server. Proceed at your own risk.
In order to access Prometheus from outside the GitLab server you will need to
set a FQDN or IP in `prometheus['listen_address']`.
To change the address/port that Prometheus listens on:
1. Edit `/etc/gitlab/gitlab.rb`
......@@ -80,9 +83,9 @@ You can visit `http://localhost:9090` for the dashboard that Prometheus offers b
>**Note:**
If SSL has been enabled on your GitLab instance, you may not be able to access
Prometheus on the same browser as GitLab due to [HSTS][hsts]. We plan to
Prometheus on the same browser as GitLab if using the same FQDN due to [HSTS][hsts]. We plan to
[provide access via GitLab][multi-user-prometheus], but in the interim there are
some workarounds: using a separate browser for Prometheus, resetting HSTS, or
some workarounds: using a separate FQDN, using server IP, using a separate browser for Prometheus, resetting HSTS, or
having [Nginx proxy it][nginx-custom-config].
The performance data collected by Prometheus can be viewed directly in the
......
......@@ -38,6 +38,7 @@ Example of response
"size": 1000
},
"finished_at": "2015-12-24T17:54:27.895Z",
"artifacts_expire_at": "2016-01-23T17:54:27.895Z"
"id": 7,
"name": "teaspoon",
"pipeline": {
......@@ -81,6 +82,7 @@ Example of response
"created_at": "2015-12-24T15:51:21.727Z",
"artifacts_file": null,
"finished_at": "2015-12-24T17:54:24.921Z",
"artifacts_expire_at": "2016-01-23T17:54:24.921Z",
"id": 6,
"name": "rspec:other",
"pipeline": {
......@@ -152,6 +154,7 @@ Example of response
"size": 1000
},
"finished_at": "2015-12-24T17:54:27.895Z",
"artifacts_expire_at": "2016-01-23T17:54:27.895Z"
"id": 7,
"name": "teaspoon",
"pipeline": {
......@@ -195,6 +198,7 @@ Example of response
"created_at": "2015-12-24T15:51:21.727Z",
"artifacts_file": null,
"finished_at": "2015-12-24T17:54:24.921Z",
"artifacts_expire_at": "2016-01-23T17:54:24.921Z"
"id": 6,
"name": "rspec:other",
"pipeline": {
......@@ -261,6 +265,7 @@ Example of response
"created_at": "2015-12-24T15:51:21.880Z",
"artifacts_file": null,
"finished_at": "2015-12-24T17:54:31.198Z",
"artifacts_expire_at": "2016-01-23T17:54:31.198Z",
"id": 8,
"name": "rubocop",
"pipeline": {
......
<<<<<<< HEAD
# Auto Deploy
> [Introduced][mr-8135] in GitLab 8.15.
......@@ -127,3 +128,6 @@ If you have installed GitLab using a different method:
[kube-deploy]: https://gitlab.com/gitlab-examples/kubernetes-deploy "Kubernetes deploy example project"
[container-registry]: https://docs.gitlab.com/ce/user/project/container_registry.html
[postgresql]: https://www.postgresql.org/
=======
This document was moved to [another location](../../topics/autodevops/index.md#auto-deploy).
>>>>>>> upstream/master
......@@ -24,7 +24,7 @@ Environments are like tags for your CI jobs, describing where code gets deployed
Deployments are created when [jobs] deploy versions of code to environments,
so every environment can have one or more deployments. GitLab keeps track of
your deployments, so you always know what is currently being deployed on your
servers. If you have a deployment service such as [Kubernetes][kubernetes-service]
servers. If you have a deployment service such as [Kubernetes][kube]
enabled for your project, you can use it to assist with your deployments, and
can even access a [web terminal](#web-terminals) for your environment from within GitLab!
......@@ -605,7 +605,7 @@ Web terminals were added in GitLab 8.15 and are only available to project
masters and owners.
If you deploy to your environments with the help of a deployment service (e.g.,
the [Kubernetes service][kubernetes-service]), GitLab can open
the [Kubernetes integration][kube]), GitLab can open
a terminal session to your environment! This is a very powerful feature that
allows you to debug issues without leaving the comfort of your web browser. To
enable it, just follow the instructions given in the service integration
......@@ -672,7 +672,6 @@ Below are some links you may find interesting:
[Pipelines]: pipelines.md
[jobs]: yaml/README.md#jobs
[yaml]: yaml/README.md
[kubernetes-service]: ../user/project/integrations/kubernetes.md
[environments]: #environments
[deployments]: #deployments
[permissions]: ../user/permissions.md
......@@ -684,5 +683,5 @@ Below are some links you may find interesting:
[gitlab-flow]: ../workflow/gitlab_flow.md
[gitlab runner]: https://docs.gitlab.com/runner/
[git-strategy]: yaml/README.md#git-strategy
[kube]: ../user/project/integrations/kubernetes.md
[kube]: ../user/project/clusters/index.md
[prom]: ../user/project/integrations/prometheus.md
......@@ -236,8 +236,8 @@ are set in the build environment. These variables are only defined for
[deployment jobs](../environments.md). Please consult the documentation of
the project services that you are using to learn which variables they define.
An example project service that defines deployment variables is
[Kubernetes Service](../../user/project/integrations/kubernetes.md#deployment-variables).
An example project service that defines deployment variables is the
[Kubernetes integration](../../user/project/clusters/index.md#deployment-variables).
## Debug tracing
......
<<<<<<< HEAD
---
last_updated: 2017-12-28
---
......@@ -131,3 +132,6 @@ the deployment variables above, ensuring any pods you create are labelled with
`app=$CI_ENVIRONMENT_SLUG`. GitLab will do the rest!
[ee]: https://about.gitlab.com/products/
=======
This document was moved to [another location](../clusters/index.md).
>>>>>>> upstream/master
......@@ -41,7 +41,6 @@ Click on the service links to see further configuration instructions and details
| [JIRA](jira.md) | JIRA issue tracker |
| [Jenkins](../../../integration/jenkins.md) | An extendable open source continuous integration server |
| JetBrains TeamCity CI | A continuous integration and build server |
| [Kubernetes](kubernetes.md) _(Has been deprecated in GitLab 10.3)_ | A containerized deployment service |
| [Mattermost slash commands](mattermost_slash_commands.md) | Mattermost chat and ChatOps slash commands |
| [Mattermost Notifications](mattermost.md) | Receive event notifications in Mattermost |
| [Microsoft teams](microsoft_teams.md) | Receive notifications for actions that happen on GitLab into a room on Microsoft Teams using Office 365 Connectors |
......
......@@ -15,7 +15,8 @@ module API
include: [
GrapeLogging::Loggers::FilterParameters.new,
GrapeLogging::Loggers::ClientEnv.new,
Gitlab::GrapeLogging::Loggers::UserLogger.new
Gitlab::GrapeLogging::Loggers::UserLogger.new,
Gitlab::GrapeLogging::Loggers::QueueDurationLogger.new
]
allow_access_with_scope :api
......
......@@ -1048,6 +1048,7 @@ module API
class Job < JobBasic
expose :artifacts_file, using: JobArtifactFile, if: -> (job, opts) { job.artifacts? }
expose :runner, with: Runner
expose :artifacts_expire_at
end
class JobBasicWithProject < JobBasic
......
......@@ -69,7 +69,8 @@ module Banzai
{ group: [:owners, :group_members] },
:invited_groups,
:project_members,
:project_feature
:project_feature,
:route
]
}
),
......
# frozen_string_literal: true
# rubocop:disable Metrics/AbcSize
# rubocop:disable Style/Documentation
module Gitlab
module BackgroundMigration
class FillFileStoreJobArtifact
class JobArtifact < ActiveRecord::Base
self.table_name = 'ci_job_artifacts'
end
def perform(start_id, stop_id)
FillFileStoreJobArtifact::JobArtifact
.where(file_store: nil)
.where(id: (start_id..stop_id))
.update_all(file_store: 1)
end
end
end
end
# frozen_string_literal: true
# rubocop:disable Metrics/AbcSize
# rubocop:disable Style/Documentation
module Gitlab
module BackgroundMigration
class FillFileStoreLfsObject
class LfsObject < ActiveRecord::Base
self.table_name = 'lfs_objects'
end
def perform(start_id, stop_id)
FillFileStoreLfsObject::LfsObject
.where(file_store: nil)
.where(id: (start_id..stop_id))
.update_all(file_store: 1)
end
end
end
end
# frozen_string_literal: true
# rubocop:disable Metrics/AbcSize
# rubocop:disable Style/Documentation
module Gitlab
module BackgroundMigration
class FillStoreUpload
class Upload < ActiveRecord::Base
self.table_name = 'uploads'
self.inheritance_column = :_type_disabled
end
def perform(start_id, stop_id)
FillStoreUpload::Upload
.where(store: nil)
.where(id: (start_id..stop_id))
.update_all(store: 1)
end
end
end
end
# This grape_logging module (https://github.com/aserafin/grape_logging) makes it
# possible to log how much time an API request was queued by Workhorse.
module Gitlab
module GrapeLogging
module Loggers
class QueueDurationLogger < ::GrapeLogging::Loggers::Base
attr_accessor :start_time
def before
@start_time = Time.now
end
def parameters(request, _)
proxy_start = request.env['HTTP_GITLAB_WORKHORSE_PROXY_START'].presence
return {} unless proxy_start && start_time
# Time in milliseconds since gitlab-workhorse started the request
duration = (start_time.to_f * 1_000 - proxy_start.to_f / 1_000_000).round(2)
{ 'queue_duration': duration }
end
end
end
end
end
......@@ -8,8 +8,13 @@ msgid ""
msgstr ""
"Project-Id-Version: gitlab 1.0.0\n"
"Report-Msgid-Bugs-To: \n"
<<<<<<< HEAD
"POT-Creation-Date: 2018-05-23 10:28-0500\n"
"PO-Revision-Date: 2018-05-23 10:28-0500\n"
=======
"POT-Creation-Date: 2018-05-23 07:40-0500\n"
"PO-Revision-Date: 2018-05-23 07:40-0500\n"
>>>>>>> upstream/master
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
"Language-Team: LANGUAGE <LL@li.org>\n"
"Language: \n"
......@@ -4364,7 +4369,7 @@ msgstr ""
msgid "Seconds to wait for a storage access attempt"
msgstr ""
msgid "Secret variables"
msgid "Variables"
msgstr ""
msgid "Security report"
......@@ -5521,7 +5526,31 @@ msgstr ""
msgid "WikiEdit|There is already a page with the same title in that path."
msgstr ""
msgid "WikiEmptyPageError|You are not allowed to create wiki pages"
msgid "WikiEmptyIssueMessage|Suggest wiki improvement"
msgstr ""
msgid "WikiEmptyIssueMessage|You must be a project member in order to add wiki pages. If you have suggestions for how to improve the wiki for this project, consider opening an issue in the %{issues_link}."
msgstr ""
msgid "WikiEmptyIssueMessage|issue tracker"
msgstr ""
msgid "WikiEmpty|A wiki is where you can store all the details about your project. This can include why you've created it, it's principles, how to use it, and so on."
msgstr ""
msgid "WikiEmpty|Create your first page"
msgstr ""
msgid "WikiEmpty|Suggest wiki improvement"
msgstr ""
msgid "WikiEmpty|The wiki lets you write documentation for your project"
msgstr ""
msgid "WikiEmpty|This project has no wiki pages"
msgstr ""
msgid "WikiEmpty|You must be a project member in order to add wiki pages."
msgstr ""
msgid "WikiHistoricalPage|This is an old version of this page."
......@@ -5590,9 +5619,6 @@ msgstr ""
msgid "Wiki|Edit Page"
msgstr ""
msgid "Wiki|Empty page"
msgstr ""
msgid "Wiki|More Pages"
msgstr ""
......
......@@ -7,7 +7,7 @@ module QA # rubocop:disable Naming/FileName
view 'app/views/projects/settings/ci_cd/show.html.haml' do
element :runners_settings, 'Runners settings'
element :secret_variables, 'Secret variables'
element :secret_variables, 'Variables'
end
def expand_runners_settings(&block)
......@@ -17,7 +17,7 @@ module QA # rubocop:disable Naming/FileName
end
def expand_secret_variables(&block)
expand_section('Secret variables') do
expand_section('Variables') do
Settings::SecretVariables.perform(&block)
end
end
......
......@@ -17,17 +17,16 @@ module QA
get request.url, { params: { username: Runtime::User.name } }
expect_status(200)
expect(json_body).to be_an Array
expect(json_body.size).to eq(1)
expect(json_body.first[:username]).to eq Runtime::User.name
expect(json_body).to contain_exactly(
a_hash_including(username: Runtime::User.name)
)
end
scenario 'submit request with an invalid user name' do
get request.url, { params: { username: SecureRandom.hex(10) } }
expect_status(200)
expect(json_body).to be_an Array
expect(json_body.size).to eq(0)
expect(json_body).to eq([])
end
end
......
......@@ -19,6 +19,7 @@ feature 'Projects > Wiki > User previews markdown changes', :js do
visit project_path(project)
find('.shortcuts-wiki').click
click_link "Create your first page"
end
context "while creating a new wiki page" do
......
......@@ -8,6 +8,7 @@ describe "User creates wiki page" do
sign_in(user)
visit(project_wikis_path(project))
click_link "Create your first page"
end
context "when wiki is empty" do
......
......@@ -11,6 +11,7 @@ describe 'User updates wiki page' do
context 'when wiki is empty' do
before do
visit(project_wikis_path(project))
click_link "Create your first page"
end
context 'in a user namespace' do
......
require 'spec_helper'
describe 'User views empty wiki' do
let(:user) { create(:user) }
shared_examples 'empty wiki and accessible issues' do
it 'show "issue tracker" message' do
visit(project_wikis_path(project))
element = page.find('.row.empty-state')
expect(element).to have_content('This project has no wiki pages')
expect(element).to have_link("issue tracker", href: project_issues_path(project))
expect(element).to have_link("Suggest wiki improvement", href: new_project_issue_path(project))
end
end
shared_examples 'empty wiki and non-accessible issues' do
it 'does not show "issue tracker" message' do
visit(project_wikis_path(project))
element = page.find('.row.empty-state')
expect(element).to have_content('This project has no wiki pages')
expect(element).to have_no_link('Suggest wiki improvement')
end
end
context 'when user is logged out and issue tracker is public' do
let(:project) { create(:project, :public, :wiki_repo) }
it_behaves_like 'empty wiki and accessible issues'
end
context 'when user is logged in and not a member' do
let(:project) { create(:project, :public, :wiki_repo) }
before do
sign_in(user)
end
it_behaves_like 'empty wiki and accessible issues'
end
context 'when issue tracker is private' do
let(:project) { create(:project, :public, :wiki_repo, :issues_private) }
it_behaves_like 'empty wiki and non-accessible issues'
end
context 'when issue tracker is disabled' do
let(:project) { create(:project, :public, :wiki_repo, :issues_disabled) }
it_behaves_like 'empty wiki and non-accessible issues'
end
context 'when user is logged in and a memeber' do
let(:project) { create(:project, :public, :wiki_repo) }
before do
sign_in(user)
project.add_developer(user)
end
it 'show "create first page" message' do
visit(project_wikis_path(project))
element = page.find('.row.empty-state')
element.click_link 'Create your first page'
expect(page).to have_button('Create page')
end
end
end
......@@ -18,6 +18,7 @@ describe 'User views a wiki page' do
context 'when wiki is empty' do
before do
visit(project_wikis_path(project))
click_link "Create your first page"
click_on('New page')
......@@ -140,6 +141,7 @@ describe 'User views a wiki page' do
visit(project_path(project))
find('.shortcuts-wiki').click
click_link "Create your first page"
expect(page).to have_content('Home · Create Page')
end
......
require 'spec_helper'
describe Gitlab::GrapeLogging::Loggers::QueueDurationLogger do
subject { described_class.new }
describe ".parameters" do
let(:start_time) { Time.new(2018, 01, 01) }
describe 'when no proxy time is available' do
let(:mock_request) { OpenStruct.new(env: {}) }
it 'returns an empty hash' do
expect(subject.parameters(mock_request, nil)).to eq({})
end
end
describe 'when a proxy time is available' do
let(:mock_request) do
OpenStruct.new(
env: {
'HTTP_GITLAB_WORKHORSE_PROXY_START' => (start_time - 1.hour).to_i * (10**9)
}
)
end
it 'returns the correct duration in ms' do
Timecop.freeze(start_time) do
subject.before
expect(subject.parameters(mock_request, nil)).to eq( { 'queue_duration': 1.hour.to_f * 1000 })
end
end
end
end
end
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20180424151928_fill_file_store')
describe FillFileStore, :migration do
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:builds) { table(:ci_builds) }
let(:job_artifacts) { table(:ci_job_artifacts) }
let(:lfs_objects) { table(:lfs_objects) }
let(:uploads) { table(:uploads) }
before do
namespaces.create!(id: 123, name: 'gitlab1', path: 'gitlab1')
projects.create!(id: 123, name: 'gitlab1', path: 'gitlab1', namespace_id: 123)
builds.create!(id: 1)
##
# Create rows that have nullfied `file_store` column
job_artifacts.create!(project_id: 123, job_id: 1, file_type: 1, file_store: nil)
lfs_objects.create!(oid: 123, size: 10, file: 'file_name', file_store: nil)
uploads.create!(size: 10, path: 'path', uploader: 'uploader', mount_point: 'file_name', store: nil)
end
it 'correctly migrates nullified file_store/store column' do
expect(job_artifacts.where(file_store: nil).count).to eq(1)
expect(lfs_objects.where(file_store: nil).count).to eq(1)
expect(uploads.where(store: nil).count).to eq(1)
expect(job_artifacts.where(file_store: 1).count).to eq(0)
expect(lfs_objects.where(file_store: 1).count).to eq(0)
expect(uploads.where(store: 1).count).to eq(0)
migrate!
expect(job_artifacts.where(file_store: nil).count).to eq(0)
expect(lfs_objects.where(file_store: nil).count).to eq(0)
expect(uploads.where(store: nil).count).to eq(0)
expect(job_artifacts.where(file_store: 1).count).to eq(1)
expect(lfs_objects.where(file_store: 1).count).to eq(1)
expect(uploads.where(store: 1).count).to eq(1)
end
end
......@@ -5,7 +5,7 @@ describe InternalId do
let(:usage) { :issues }
let(:issue) { build(:issue, project: project) }
let(:scope) { { project: project } }
let(:init) { ->(s) { s.project.issues.maximum(:iid) } }
let(:init) { ->(s) { s.project.issues.size } }
context 'validations' do
it { is_expected.to validate_presence_of(:usage) }
......@@ -39,29 +39,6 @@ describe InternalId do
end
end
context 'with an InternalId record present and existing issues with a higher internal id' do
# This can happen if the old NonatomicInternalId is still in use
before do
issues = Array.new(rand(1..10)).map { create(:issue, project: project) }
issue = issues.last
issue.iid = issues.map { |i| i.iid }.max + 1
issue.save
end
let(:maximum_iid) { project.issues.map { |i| i.iid }.max }
it 'updates last_value to the maximum internal id present' do
subject
expect(described_class.find_by(project: project, usage: described_class.usages[usage.to_s]).last_value).to eq(maximum_iid + 1)
end
it 'returns next internal id correctly' do
expect(subject).to eq(maximum_iid + 1)
end
end
context 'with concurrent inserts on table' do
it 'looks up the record if it was created concurrently' do
args = { **scope, usage: described_class.usages[usage.to_s] }
......@@ -104,8 +81,7 @@ describe InternalId do
describe '#increment_and_save!' do
let(:id) { create(:internal_id) }
let(:maximum_iid) { nil }
subject { id.increment_and_save!(maximum_iid) }
subject { id.increment_and_save! }
it 'returns incremented iid' do
value = id.last_value
......@@ -126,14 +102,5 @@ describe InternalId do
expect(subject).to eq(1)
end
end
context 'with maximum_iid given' do
let(:id) { create(:internal_id, last_value: 1) }
let(:maximum_iid) { id.last_value + 10 }
it 'returns maximum_iid instead' do
expect(subject).to eq(12)
end
end
end
end
......@@ -13,7 +13,10 @@ describe API::Jobs do
ref: project.default_branch)
end
let!(:job) { create(:ci_build, :success, pipeline: pipeline) }
let!(:job) do
create(:ci_build, :success, pipeline: pipeline,
artifacts_expire_at: 1.day.since)
end
let(:user) { create(:user) }
let(:api_user) { user }
......@@ -44,6 +47,7 @@ describe API::Jobs do
it 'returns correct values' do
expect(json_response).not_to be_empty
expect(json_response.first['commit']['id']).to eq project.commit.id
expect(Time.parse(json_response.first['artifacts_expire_at'])).to be_like_time(job.artifacts_expire_at)
end
it 'returns pipeline data' do
......@@ -129,6 +133,7 @@ describe API::Jobs do
it 'returns correct values' do
expect(json_response).not_to be_empty
expect(json_response.first['commit']['id']).to eq project.commit.id
expect(Time.parse(json_response.first['artifacts_expire_at'])).to be_like_time(job.artifacts_expire_at)
end
it 'returns pipeline data' do
......@@ -202,6 +207,7 @@ describe API::Jobs do
expect(Time.parse(json_response['created_at'])).to be_like_time(job.created_at)
expect(Time.parse(json_response['started_at'])).to be_like_time(job.started_at)
expect(Time.parse(json_response['finished_at'])).to be_like_time(job.finished_at)
expect(Time.parse(json_response['artifacts_expire_at'])).to be_like_time(job.artifacts_expire_at)
expect(json_response['duration']).to eq(job.duration)
end
......
......@@ -1227,7 +1227,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
before do
fog_connection.directories.get('artifacts').files.create(
key: 'tmp/upload/12312300',
key: 'tmp/uploads/12312300',
body: 'content'
)
......
......@@ -1117,7 +1117,7 @@ describe 'Git LFS API and storage' do
context 'with valid remote_id' do
before do
fog_connection.directories.get('lfs-objects').files.create(
key: 'tmp/upload/12312300',
key: 'tmp/uploads/12312300',
body: 'content'
)
end
......
......@@ -630,7 +630,7 @@ describe ObjectStorage do
let!(:fog_file) do
fog_connection.directories.get('uploads').files.create(
key: 'tmp/upload/test/123123',
key: 'tmp/uploads/test/123123',
body: 'content'
)
end
......
......@@ -1274,9 +1274,9 @@ boom@5.x.x:
dependencies:
hoek "4.x.x"
bootstrap@4.1:
version "4.1.0"
resolved "https://registry.yarnpkg.com/bootstrap/-/bootstrap-4.1.0.tgz#110b05c31a236d56dbc9adcda6dd16f53738a28a"
bootstrap@~4.1.1:
version "4.1.1"
resolved "https://registry.yarnpkg.com/bootstrap/-/bootstrap-4.1.1.tgz#3aec85000fa619085da8d2e4983dfd67cf2114cb"
boxen@^1.2.1:
version "1.3.0"
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment