Commit 232e0a31 authored by GitLab Bot's avatar GitLab Bot

Add latest changes from gitlab-org/gitlab@master

parent 00fa950a
......@@ -40,7 +40,10 @@ Attach the screenshot and HTML snapshot of the page from the job's artifacts:
/due in 2 weeks
<!-- Base labels. -->
/label ~Quality ~QA ~bug ~S1
/label ~Quality ~QA ~test
<!-- Test failure type label, please use just one.-->
/label ~"failure::broken-test" ~"failure::flaky-test" ~"failure::stale-test" ~"failure::test-environment" ~"failure::investigating"
<!--
Choose the stage that appears in the test path, e.g. ~"devops::create" for
......
import Vue from 'vue';
import pdfLab from '../../pdf/index.vue';
import { GlLoadingIcon } from '@gitlab/ui';
import PdfViewer from './pdf_viewer.vue';
export default () => {
const el = document.getElementById('js-pdf-viewer');
return new Vue({
el,
components: {
pdfLab,
GlLoadingIcon,
render(createElement) {
return createElement(PdfViewer, {
props: {
pdf: el.dataset.endpoint,
},
});
},
data() {
return {
error: false,
loadError: false,
loading: true,
pdf: el.dataset.endpoint,
};
},
methods: {
onLoad() {
this.loading = false;
},
onError(error) {
this.loading = false;
this.loadError = true;
this.error = error;
},
},
template: `
<div class="js-pdf-viewer container-fluid md prepend-top-default append-bottom-default">
<div
class="text-center loading"
v-if="loading && !error">
<gl-loading-icon class="mt-5" size="lg"/>
</div>
<pdf-lab
v-if="!loadError"
:pdf="pdf"
@pdflabload="onLoad"
@pdflaberror="onError" />
<p
class="text-center"
v-if="error">
<span v-if="loadError">
An error occurred while loading the file. Please try again later.
</span>
<span v-else>
An error occurred while decoding the file.
</span>
</p>
</div>
`,
});
};
<script>
import PdfLab from '../../pdf/index.vue';
import { GlLoadingIcon } from '@gitlab/ui';
export default {
components: {
PdfLab,
GlLoadingIcon,
},
props: {
pdf: {
type: String,
required: true,
},
},
data() {
return {
error: false,
loadError: false,
loading: true,
};
},
methods: {
onLoad() {
this.loading = false;
},
onError(error) {
this.loading = false;
this.loadError = true;
this.error = error;
},
},
};
</script>
<template>
<div class="js-pdf-viewer container-fluid md prepend-top-default append-bottom-default">
<div v-if="loading && !error" class="text-center loading">
<gl-loading-icon class="mt-5" size="lg" />
</div>
<pdf-lab v-if="!loadError" :pdf="pdf" @pdflabload="onLoad" @pdflaberror="onError" />
<p v-if="error" class="text-center">
<span v-if="loadError" ref="loadError">
{{ __('An error occurred while loading the file. Please try again later.') }}
</span>
<span v-else>{{ __('An error occurred while decoding the file.') }}</span>
</p>
</div>
</template>
......@@ -8,6 +8,7 @@ import axios from './axios_utils';
import { getLocationHash } from './url_utility';
import { convertToCamelCase, convertToSnakeCase } from './text_utility';
import { isObject } from './type_utility';
import { isFunction } from 'lodash';
export const getPagePath = (index = 0) => {
const page = $('body').attr('data-page') || '';
......@@ -667,30 +668,34 @@ export const spriteIcon = (icon, className = '') => {
};
/**
* This method takes in object with snake_case property names
* and returns a new object with camelCase property names
*
* Reasoning for this method is to ensure consistent property
* naming conventions across JS code.
* @callback ConversionFunction
* @param {string} prop
*/
/**
* This function takes a conversion function as the first parameter
* and applies this function to each prop in the provided object.
*
* This method also supports additional params in `options` object
*
* @param {ConversionFunction} conversionFunction - Function to apply to each prop of the object.
* @param {Object} obj - Object to be converted.
* @param {Object} options - Object containing additional options.
* @param {boolean} options.deep - FLag to allow deep object converting
* @param {Array[]} dropKeys - List of properties to discard while building new object
* @param {Array[]} ignoreKeyNames - List of properties to leave intact (as snake_case) while building new object
* @param {Array[]} options.dropKeys - List of properties to discard while building new object
* @param {Array[]} options.ignoreKeyNames - List of properties to leave intact (as snake_case) while building new object
*/
export const convertObjectPropsToCamelCase = (obj = {}, options = {}) => {
if (obj === null) {
export const convertObjectProps = (conversionFunction, obj = {}, options = {}) => {
if (!isFunction(conversionFunction) || obj === null) {
return {};
}
const initial = Array.isArray(obj) ? [] : {};
const { deep = false, dropKeys = [], ignoreKeyNames = [] } = options;
const isObjParameterArray = Array.isArray(obj);
const initialValue = isObjParameterArray ? [] : {};
return Object.keys(obj).reduce((acc, prop) => {
const result = acc;
const val = obj[prop];
// Drop properties from new object if
......@@ -702,34 +707,54 @@ export const convertObjectPropsToCamelCase = (obj = {}, options = {}) => {
// Skip converting properties in new object
// if there are any mentioned in options
if (ignoreKeyNames.indexOf(prop) > -1) {
result[prop] = obj[prop];
acc[prop] = val;
return acc;
}
if (deep && (isObject(val) || Array.isArray(val))) {
result[convertToCamelCase(prop)] = convertObjectPropsToCamelCase(val, options);
if (isObjParameterArray) {
acc[prop] = convertObjectProps(conversionFunction, val, options);
} else {
acc[conversionFunction(prop)] = convertObjectProps(conversionFunction, val, options);
}
} else {
result[convertToCamelCase(prop)] = obj[prop];
acc[conversionFunction(prop)] = val;
}
return acc;
}, initial);
}, initialValue);
};
/**
* This method takes in object with snake_case property names
* and returns a new object with camelCase property names
*
* Reasoning for this method is to ensure consistent property
* naming conventions across JS code.
*
* This method also supports additional params in `options` object
*
* @param {Object} obj - Object to be converted.
* @param {Object} options - Object containing additional options.
* @param {boolean} options.deep - FLag to allow deep object converting
* @param {Array[]} options.dropKeys - List of properties to discard while building new object
* @param {Array[]} options.ignoreKeyNames - List of properties to leave intact (as snake_case) while building new object
*/
export const convertObjectPropsToCamelCase = (obj = {}, options = {}) =>
convertObjectProps(convertToCamelCase, obj, options);
/**
* Converts all the object keys to snake case
*
* @param {Object} obj Object to transform
* @returns {Object}
* This method also supports additional params in `options` object
*
* @param {Object} obj - Object to be converted.
* @param {Object} options - Object containing additional options.
* @param {boolean} options.deep - FLag to allow deep object converting
* @param {Array[]} options.dropKeys - List of properties to discard while building new object
* @param {Array[]} options.ignoreKeyNames - List of properties to leave intact (as snake_case) while building new object
*/
// Follow up to add additional options param:
// https://gitlab.com/gitlab-org/gitlab/issues/39173
export const convertObjectPropsToSnakeCase = (obj = {}) =>
obj
? Object.entries(obj).reduce(
(acc, [key, value]) => ({ ...acc, [convertToSnakeCase(key)]: value }),
{},
)
: {};
export const convertObjectPropsToSnakeCase = (obj = {}, options = {}) =>
convertObjectProps(convertToSnakeCase, obj, options);
export const imagePath = imgUrl =>
`${gon.asset_host || ''}${gon.relative_url_root || ''}/assets/${imgUrl}`;
......
......@@ -7,7 +7,7 @@ module Groups
before_action :authorize_admin_group!
before_action :authorize_update_max_artifacts_size!, only: [:update]
before_action do
push_frontend_feature_flag(:new_variables_ui, @group)
push_frontend_feature_flag(:new_variables_ui, @group, default_enabled: true)
end
before_action :define_variables, only: [:show, :create_deploy_token]
......
......@@ -21,7 +21,7 @@ class Projects::MergeRequestsController < Projects::MergeRequests::ApplicationCo
before_action only: [:show] do
push_frontend_feature_flag(:diffs_batch_load, @project, default_enabled: true)
push_frontend_feature_flag(:deploy_from_footer, @project, default_enabled: true)
push_frontend_feature_flag(:single_mr_diff_view, @project)
push_frontend_feature_flag(:single_mr_diff_view, @project, default_enabled: true)
push_frontend_feature_flag(:suggest_pipeline) if experiment_enabled?(:suggest_pipeline)
end
......
......@@ -6,7 +6,7 @@ module Projects
before_action :authorize_admin_pipeline!
before_action :define_variables
before_action do
push_frontend_feature_flag(:new_variables_ui, @project)
push_frontend_feature_flag(:new_variables_ui, @project, default_enabled: true)
end
def show
......
......@@ -28,7 +28,8 @@ module Ci
license_scanning: 'gl-license-scanning-report.json',
performance: 'performance.json',
metrics: 'metrics.txt',
lsif: 'lsif.json'
lsif: 'lsif.json',
dotenv: '.env'
}.freeze
INTERNAL_TYPES = {
......@@ -43,6 +44,7 @@ module Ci
metrics_referee: :gzip,
network_referee: :gzip,
lsif: :gzip,
dotenv: :gzip,
# All these file formats use `raw` as we need to store them uncompressed
# for Frontend to fetch the files and do analysis
......@@ -118,7 +120,8 @@ module Ci
metrics: 12, ## EE-specific
metrics_referee: 13, ## runner referees
network_referee: 14, ## runner referees
lsif: 15 # LSIF data for code navigation
lsif: 15, # LSIF data for code navigation
dotenv: 16
}
enum file_format: {
......
......@@ -4,11 +4,14 @@ module Ci
class JobVariable < ApplicationRecord
extend Gitlab::Ci::Model
include NewHasVariable
include BulkInsertSafe
belongs_to :job, class_name: "Ci::Build", foreign_key: :job_id
alias_attribute :secret_value, :value
validates :key, uniqueness: { scope: :job_id }
validates :key, uniqueness: { scope: :job_id }, unless: :dotenv_source?
enum source: { internal: 0, dotenv: 1 }, _suffix: true
end
end
......@@ -67,14 +67,14 @@ class DiffFileEntity < DiffFileBaseEntity
private
def parallel_diff_view?(options, diff_file)
return true unless Feature.enabled?(:single_mr_diff_view, diff_file.repository.project)
return true unless Feature.enabled?(:single_mr_diff_view, diff_file.repository.project, default_enabled: true)
# If we're not rendering inline, we must be rendering parallel
!inline_diff_view?(options, diff_file)
end
def inline_diff_view?(options, diff_file)
return true unless Feature.enabled?(:single_mr_diff_view, diff_file.repository.project)
return true unless Feature.enabled?(:single_mr_diff_view, diff_file.repository.project, default_enabled: true)
# If nothing is present, inline will be the default.
options.fetch(:diff_view, :inline).to_sym == :inline
......
......@@ -10,10 +10,24 @@ module Ci
].freeze
def execute(job, artifacts_file, params, metadata_file: nil)
return success if sha256_matches_existing_artifact?(job, params['artifact_type'], artifacts_file)
artifact, artifact_metadata = build_artifact(job, artifacts_file, params, metadata_file)
result = parse_artifact(job, artifact)
return result unless result[:status] == :success
persist_artifact(job, artifact, artifact_metadata)
end
private
def build_artifact(job, artifacts_file, params, metadata_file)
expire_in = params['expire_in'] ||
Gitlab::CurrentSettings.current_application_settings.default_artifacts_expire_in
job.job_artifacts.build(
artifact = Ci::JobArtifact.new(
job_id: job.id,
project: job.project,
file: artifacts_file,
file_type: params['artifact_type'],
......@@ -21,34 +35,51 @@ module Ci
file_sha256: artifacts_file.sha256,
expire_in: expire_in)
if metadata_file
job.job_artifacts.build(
project: job.project,
file: metadata_file,
file_type: :metadata,
file_format: :gzip,
file_sha256: metadata_file.sha256,
expire_in: expire_in)
artifact_metadata = if metadata_file
Ci::JobArtifact.new(
job_id: job.id,
project: job.project,
file: metadata_file,
file_type: :metadata,
file_format: :gzip,
file_sha256: metadata_file.sha256,
expire_in: expire_in)
end
[artifact, artifact_metadata]
end
def parse_artifact(job, artifact)
unless Feature.enabled?(:ci_synchronous_artifact_parsing, job.project, default_enabled: true)
return success
end
if job.update(artifacts_expire_in: expire_in)
success
else
error(job.errors.messages, :bad_request)
case artifact.file_type
when 'dotenv' then parse_dotenv_artifact(job, artifact)
else success
end
end
rescue ActiveRecord::RecordNotUnique => error
return success if sha256_matches_existing_artifact?(job, params['artifact_type'], artifacts_file)
def persist_artifact(job, artifact, artifact_metadata)
Ci::JobArtifact.transaction do
artifact.save!
artifact_metadata&.save!
# NOTE: The `artifacts_expire_at` column is already deprecated and to be removed in the near future.
job.update_column(:artifacts_expire_at, artifact.expire_at)
end
success
rescue ActiveRecord::RecordNotUnique => error
track_exception(error, job, params)
error('another artifact of the same type already exists', :bad_request)
rescue *OBJECT_STORAGE_ERRORS => error
track_exception(error, job, params)
error(error.message, :service_unavailable)
rescue => error
error(error.message, :bad_request)
end
private
def sha256_matches_existing_artifact?(job, artifact_type, artifacts_file)
existing_artifact = job.job_artifacts.find_by_file_type(artifact_type)
return false unless existing_artifact
......@@ -63,5 +94,9 @@ module Ci
uploading_type: params['artifact_type']
)
end
def parse_dotenv_artifact(job, artifact)
Ci::ParseDotenvArtifactService.new(job.project, current_user).execute(artifact)
end
end
end
# frozen_string_literal: true
module Ci
class ParseDotenvArtifactService < ::BaseService
MAX_ACCEPTABLE_DOTENV_SIZE = 5.kilobytes
MAX_ACCEPTABLE_VARIABLES_COUNT = 10
SizeLimitError = Class.new(StandardError)
ParserError = Class.new(StandardError)
def execute(artifact)
validate!(artifact)
variables = parse!(artifact)
Ci::JobVariable.bulk_insert!(variables)
success
rescue SizeLimitError, ParserError, ActiveRecord::RecordInvalid => error
Gitlab::ErrorTracking.track_exception(error, job_id: artifact.job_id)
error(error.message, :bad_request)
end
private
def validate!(artifact)
unless artifact&.dotenv?
raise ArgumentError, 'Artifact is not dotenv file type'
end
unless artifact.file.size < MAX_ACCEPTABLE_DOTENV_SIZE
raise SizeLimitError,
"Dotenv Artifact Too Big. Maximum Allowable Size: #{MAX_ACCEPTABLE_DOTENV_SIZE}"
end
end
def parse!(artifact)
variables = []
artifact.each_blob do |blob|
blob.each_line do |line|
key, value = scan_line!(line)
variables << Ci::JobVariable.new(job_id: artifact.job_id,
source: :dotenv, key: key, value: value)
end
end
if variables.size > MAX_ACCEPTABLE_VARIABLES_COUNT
raise SizeLimitError,
"Dotenv files cannot have more than #{MAX_ACCEPTABLE_VARIABLES_COUNT} variables"
end
variables
end
def scan_line!(line)
result = line.scan(/^(.*)=(.*)$/).last
raise ParserError, 'Invalid Format' if result.nil?
result.each(&:strip!)
end
end
end
......@@ -5,7 +5,7 @@
- link_start = '<a href="%{url}">'.html_safe % { url: help_page_path('ci/variables/README', anchor: 'protected-variables') }
= s_('Environment variables are configured by your administrator to be %{link_start}protected%{link_end} by default').html_safe % { link_start: link_start, link_end: '</a>'.html_safe }
- if Feature.enabled?(:new_variables_ui, @project || @group)
- if Feature.enabled?(:new_variables_ui, @project || @group, default_enabled: true)
- is_group = !@group.nil?
#js-ci-project-variables{ data: { endpoint: save_endpoint, project_id: @project&.id || '', group: is_group.to_s, maskable_regex: ci_variable_maskable_regex} }
......
---
title: Support DotEnv Variables through report type artifact
merge_request: 26247
author:
type: added
---
title: Diffs load each view style separately, on demand
merge_request: 24821
author:
type: performance
---
title: Update UI for project and group settings CI variables
merge_request: 26901
author:
type: added
# frozen_string_literal: true
class AddRuntimeCreatedToCiJobVariables < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
DEFAULT_SOURCE = 0 # Equvalent to Ci::JobVariable.internal_source
def up
add_column_with_default(:ci_job_variables, :source, :integer, limit: 2, default: DEFAULT_SOURCE, allow_null: false)
end
def down
remove_column(:ci_job_variables, :source)
end
end
......@@ -780,6 +780,7 @@ ActiveRecord::Schema.define(version: 2020_03_12_163407) do
t.string "encrypted_value_iv"
t.bigint "job_id", null: false
t.integer "variable_type", limit: 2, default: 1, null: false
t.integer "source", limit: 2, default: 0, null: false
t.index ["job_id"], name: "index_ci_job_variables_on_job_id"
t.index ["key", "job_id"], name: "index_ci_job_variables_on_key_and_job_id", unique: true
end
......
......@@ -156,6 +156,70 @@ Starting with GitLab 9.3, the environment URL is exposed to the Runner via
- `.gitlab-ci.yml`.
- The external URL from the environment if not defined in `.gitlab-ci.yml`.
#### Set dynamic environment URLs after a job finishes
> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/17066) in GitLab 12.9.
In a job script, you can specify a static [environment URL](#using-the-environment-url).
However, there may be times when you want a dynamic URL. For example,
if you deploy a Review App to an external hosting
service that generates a random URL per deployment, like `https://94dd65b.amazonaws.com/qa-lambda-1234567`,
you don't know the URL before the deployment script finishes.
If you want to use the environment URL in GitLab, you would have to update it manually.
To address this problem, you can configure a deployment job to report back a set of
variables, including the URL that was dynamically-generated by the external service.
GitLab supports [dotenv](https://github.com/bkeepers/dotenv) file as the format,
and expands the `environment:url` value with variables defined in the dotenv file.
To use this feature, specify the
[`artifacts:reports:dotenv`](yaml/README.md#artifactsreportsdotenv) keyword in `.gitlab-ci.yml`.
##### Example of setting dynamic environment URLs
The following example shows a Review App that creates a new environment
per merge request. The `review` job is triggered by every push, and
creates or updates an environment named `review/your-branch-name`.
The environment URL is set to `$DYNAMIC_ENVIRONMENT_URL`:
```yaml
review:
script:
- DYNAMIC_ENVIRONMENT_URL=$(deploy-script) # In script, get the environment URL.
- echo "DYNAMIC_ENVIRONMENT_URL=$DYNAMIC_ENVIRONMENT_URL" >> deploy.env # Add the value to a dotenv file.
artifacts:
reports:
dotenv: deploy.env # Report back dotenv file to rails.
environment:
name: review/$CI_COMMIT_REF_SLUG
url: $DYNAMIC_ENVIRONMENT_URL # and set the variable produced in script to `environment:url`
on_stop: stop_review
stop_review:
script:
- ./teardown-environment
when: manual
environment:
name: review/$CI_COMMIT_REF_SLUG
action: stop
```
As soon as the `review` job finishes, GitLab updates the `review/your-branch-name`
environment's URL.
It parses the report artifact `deploy.env`, registers a list of variables as runtime-created,
uses it for expanding `environment:url: $DYNAMIC_ENVIRONMENT_URL` and sets it to the environment URL.
You can also specify a static part of the URL at `environment:url:`, such as
`https://$DYNAMIC_ENVIRONMENT_URL`. If the value of `DYNAMIC_ENVIRONMENT_URL` is
`123.awesome.com`, the final result will be `https://123.awesome.com`.
The assigned URL for the `review/your-branch-name` environment is visible in the UI.
[See where the environment URL is displayed](#using-the-environment-url).
> **Notes:**
>
> - `stop_review` doesn't generate a dotenv report artifact, so it won't recognize the `DYNAMIC_ENVIRONMENT_URL` variable. Therefore you should not set `environment:url:` in the `stop_review` job.
> - If the environment URL is not valid (for example, the URL is malformed), the system doesn't update the environment URL.
### Configuring manual deployments
Adding `when: manual` to an automatically executed job's configuration converts it to
......
---
disqus_identifier: 'https://docs.gitlab.com/ee/ci/pipelines.html'
type: reference
---
......
---
disqus_identifier: 'https://docs.gitlab.com/ee/user/project/pipelines/job_artifacts.html'
type: reference, howto
---
......
---
disqus_identifier: 'https://docs.gitlab.com/ee/user/project/pipelines/schedules.html'
type: reference, howto
---
......
---
disqus_identifier: 'https://docs.gitlab.com/ee/user/project/pipelines/settings.html'
type: reference, howto
---
......
......@@ -2264,6 +2264,25 @@ concatenated into a single file. Use a filename pattern (`junit: rspec-*.xml`),
an array of filenames (`junit: [rspec-1.xml, rspec-2.xml, rspec-3.xml]`), or a
combination thereof (`junit: [rspec.xml, test-results/TEST-*.xml]`).
##### `artifacts:reports:dotenv`
> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/17066) in GitLab 12.9. Requires GitLab Runner 11.5 and later.
The `dotenv` report collects a set of environment variables as artifacts.
The collected variables are registered as runtime-created variables of the job,
which is useful to [set dynamic environment URLs after a job finishes](../environments.md#set-dynamic-environment-urls-after-a-job-finishes).
It is not available for download through the web interface.
There are a couple of limitations on top of the [original dotenv rules](https://github.com/motdotla/dotenv#rules).
- The variable key can contain only letters, digits and underscore ('_').
- The size of dotenv file must be smaller than 5 kilobytes.
- The number of variables must be less than 10.
- It doesn't support variable substitution in the dotenv file itself.
- It doesn't support empty lines and comments (`#`) in dotenv file.
- It doesn't support quote escape, spaces in a quote, a new line expansion in a quote, in dotenv file.
##### `artifacts:reports:codequality` **(STARTER)**
> Introduced in GitLab 11.5. Requires GitLab Runner 11.5 and above.
......
......@@ -39,6 +39,8 @@ To select your issue template for use within Incident Management:
GitLab can react to the alerts that your applications and services may be
triggering by automatically creating issues, and alerting developers via email.
The emails will be sent to [owners and maintainers](../permissions.md) of the project and will contain details on the alert as well as a link to see more information.
### Prometheus alerts
Prometheus alerts can be set up in both:
......
......@@ -11,7 +11,10 @@ module Gitlab
include ::Gitlab::Config::Entry::Validatable
include ::Gitlab::Config::Entry::Attributable
ALLOWED_KEYS = %i[junit codequality sast dependency_scanning container_scanning dast performance license_management license_scanning metrics lsif].freeze
ALLOWED_KEYS =
%i[junit codequality sast dependency_scanning container_scanning
dast performance license_management license_scanning metrics lsif
dotenv].freeze
attributes ALLOWED_KEYS
......@@ -31,6 +34,7 @@ module Gitlab
validates :license_scanning, array_of_strings_or_string: true
validates :metrics, array_of_strings_or_string: true
validates :lsif, array_of_strings_or_string: true
validates :dotenv, array_of_strings_or_string: true
end
end
......
......@@ -1777,6 +1777,9 @@ msgstr ""
msgid "An error occurred while committing your changes."
msgstr ""
msgid "An error occurred while decoding the file."
msgstr ""
msgid "An error occurred while deleting the approvers group"
msgstr ""
......@@ -1918,6 +1921,9 @@ msgstr ""
msgid "An error occurred while loading the file."
msgstr ""
msgid "An error occurred while loading the file. Please try again later."
msgstr ""
msgid "An error occurred while loading the merge request changes."
msgstr ""
......
......@@ -149,6 +149,16 @@ FactoryBot.define do
end
end
trait :dotenv do
file_type { :dotenv }
file_format { :gzip }
after(:build) do |artifact, evaluator|
artifact.file = fixture_file_upload(
Rails.root.join('spec/fixtures/build.env.gz'), 'application/x-gzip')
end
end
trait :correct_checksum do
after(:build) do |artifact, evaluator|
artifact.file_sha256 = Digest::SHA256.file(artifact.file.path).hexdigest
......
......@@ -6,5 +6,9 @@ FactoryBot.define do
value { 'VARIABLE_VALUE' }
job factory: :ci_build
trait :dotenv_source do
source { :dotenv }
end
end
end
import { shallowMount } from '@vue/test-utils';
import { GlLoadingIcon } from '@gitlab/ui';
import { FIXTURES_PATH } from 'spec/test_constants';
import renderPDF from '~/blob/pdf';
import component from '~/blob/pdf/pdf_viewer.vue';
import PdfLab from '~/pdf/index.vue';
const testPDF = `${FIXTURES_PATH}/blob/pdf/test.pdf`;
describe('PDF renderer', () => {
let viewer;
let app;
let wrapper;
const checkLoaded = done => {
if (app.loading) {
setTimeout(() => {
checkLoaded(done);
}, 100);
} else {
done();
}
const mountComponent = () => {
wrapper = shallowMount(component, {
propsData: {
pdf: testPDF,
},
});
};
preloadFixtures('static/pdf_viewer.html');
const findLoading = () => wrapper.find(GlLoadingIcon);
const findPdfLab = () => wrapper.find(PdfLab);
const findLoadError = () => wrapper.find({ ref: 'loadError' });
beforeEach(() => {
loadFixtures('static/pdf_viewer.html');
viewer = document.getElementById('js-pdf-viewer');
viewer.dataset.endpoint = testPDF;
mountComponent();
});
it('shows loading icon', () => {
renderPDF();
afterEach(() => {
wrapper.destroy();
wrapper = null;
});
expect(document.querySelector('.loading')).not.toBeNull();
it('shows loading icon', () => {
expect(findLoading().exists()).toBe(true);
});
describe('successful response', () => {
beforeEach(done => {
app = renderPDF();
checkLoaded(done);
beforeEach(() => {
findPdfLab().vm.$emit('pdflabload');
});
it('does not show loading icon', () => {
expect(document.querySelector('.loading')).toBeNull();
expect(findLoading().exists()).toBe(false);
});
it('renders the PDF', () => {
expect(document.querySelector('.pdf-viewer')).not.toBeNull();
});
it('renders the PDF page', () => {
expect(document.querySelector('.pdf-page')).not.toBeNull();
expect(findPdfLab().exists()).toBe(true);
});
});
describe('error getting file', () => {
beforeEach(done => {
viewer.dataset.endpoint = 'invalid/path/to/file.pdf';
app = renderPDF();
checkLoaded(done);
beforeEach(() => {
findPdfLab().vm.$emit('pdflaberror', 'foo');
});
it('does not show loading icon', () => {
expect(document.querySelector('.loading')).toBeNull();
expect(findLoading().exists()).toBe(false);
});
it('shows error message', () => {
expect(document.querySelector('.md').textContent.trim()).toBe(
expect(findLoadError().text()).toBe(
'An error occurred while loading the file. Please try again later.',
);
});
......
<div class="file-content" data-endpoint="/test" id="js-pdf-viewer"></div>
import Vue from 'vue';
import { GlobalWorkerOptions } from 'pdfjs-dist/build/pdf';
import workerSrc from 'pdfjs-dist/build/pdf.worker.min';
import { FIXTURES_PATH } from 'spec/test_constants';
import PDFLab from '~/pdf/index.vue';
const pdf = `${FIXTURES_PATH}/blob/pdf/test.pdf`;
GlobalWorkerOptions.workerSrc = workerSrc;
const Component = Vue.extend(PDFLab);
describe('PDF component', () => {
......
import Vue from 'vue';
import pdfjsLib from 'pdfjs-dist/build/pdf';
import workerSrc from 'pdfjs-dist/build/pdf.worker.min';
import pdfjsLib from 'pdfjs-dist/webpack';
import mountComponent from 'spec/helpers/vue_mount_component_helper';
import { FIXTURES_PATH } from 'spec/test_constants';
......@@ -14,7 +13,6 @@ describe('Page component', () => {
let testPage;
beforeEach(done => {
pdfjsLib.GlobalWorkerOptions.workerSrc = workerSrc;
pdfjsLib
.getDocument(testPDF)
.promise.then(pdf => pdf.getPage(1))
......
......@@ -44,6 +44,7 @@ describe Gitlab::Ci::Config::Entry::Reports do
:license_scanning | 'gl-license-scanning-report.json'
:performance | 'performance.json'
:lsif | 'lsif.json'
:dotenv | 'build.dotenv'
end
with_them do
......
......@@ -7,6 +7,11 @@ describe PagesDomain do
subject(:pages_domain) { described_class.new }
# Locking in date due to cert expiration date https://gitlab.com/gitlab-org/gitlab/-/issues/210557#note_304749257
around do |example|
Timecop.travel(Time.new(2020, 3, 12)) { example.run }
end
describe 'associations' do
it { is_expected.to belong_to(:project) }
it { is_expected.to have_many(:serverless_domain_clusters) }
......
......@@ -1937,6 +1937,49 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
end
end
end
context 'when artifact_type is dotenv' do
context 'when artifact_format is gzip' do
let(:file_upload) { fixture_file_upload('spec/fixtures/build.env.gz') }
let(:params) { { artifact_type: :dotenv, artifact_format: :gzip } }
it 'stores dotenv file' do
upload_artifacts(file_upload, headers_with_token, params)
expect(response).to have_gitlab_http_status(:created)
expect(job.reload.job_artifacts_dotenv).not_to be_nil
end
it 'parses dotenv file' do
expect do
upload_artifacts(file_upload, headers_with_token, params)
end.to change { job.job_variables.count }.from(0).to(2)
end
context 'when parse error happens' do
let(:file_upload) { fixture_file_upload('spec/fixtures/ci_build_artifacts_metadata.gz') }
it 'returns an error' do
upload_artifacts(file_upload, headers_with_token, params)
expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['message']).to eq('Invalid Format')
end
end
end
context 'when artifact_format is raw' do
let(:file_upload) { fixture_file_upload('spec/fixtures/build.env.gz') }
let(:params) { { artifact_type: :dotenv, artifact_format: :raw } }
it 'returns an error' do
upload_artifacts(file_upload, headers_with_token, params)
expect(response).to have_gitlab_http_status(:bad_request)
expect(job.reload.job_artifacts_dotenv).to be_nil
end
end
end
end
context 'when artifacts already exist for the job' do
......
......@@ -121,6 +121,42 @@ describe Ci::CreateJobArtifactsService do
end
end
context 'when artifact type is dotenv' do
let(:artifacts_file) do
file_to_upload('spec/fixtures/build.env.gz', sha256: artifacts_sha256)
end
let(:params) do
{
'artifact_type' => 'dotenv',
'artifact_format' => 'gzip'
}
end
it 'calls parse service' do
expect_any_instance_of(Ci::ParseDotenvArtifactService) do |service|
expect(service).to receive(:execute).once.and_call_original
end
expect(subject[:status]).to eq(:success)
expect(job.job_variables.as_json).to contain_exactly(
hash_including('key' => 'KEY1', 'value' => 'VAR1', 'source' => 'dotenv'),
hash_including('key' => 'KEY2', 'value' => 'VAR2', 'source' => 'dotenv'))
end
context 'when ci_synchronous_artifact_parsing feature flag is disabled' do
before do
stub_feature_flags(ci_synchronous_artifact_parsing: false)
end
it 'does not call parse service' do
expect(Ci::ParseDotenvArtifactService).not_to receive(:new)
expect(subject[:status]).to eq(:success)
end
end
end
shared_examples 'rescues object storage error' do |klass, message, expected_message|
it "handles #{klass}" do
allow_next_instance_of(JobArtifactUploader) do |uploader|
......
# frozen_string_literal: true
require 'spec_helper'
describe Ci::ParseDotenvArtifactService do
let_it_be(:project) { create(:project) }
let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
let(:build) { create(:ci_build, pipeline: pipeline, project: project) }
let(:service) { described_class.new(project, nil) }
describe '#execute' do
subject { service.execute(artifact) }
context 'when build has a dotenv artifact' do
let!(:artifact) { create(:ci_job_artifact, :dotenv, job: build) }
it 'parses the artifact' do
expect(subject[:status]).to eq(:success)
expect(build.job_variables.as_json).to contain_exactly(
hash_including('key' => 'KEY1', 'value' => 'VAR1'),
hash_including('key' => 'KEY2', 'value' => 'VAR2'))
end
context 'when parse error happens' do
before do
allow(service).to receive(:scan_line!) { raise described_class::ParserError.new('Invalid Format') }
end
it 'returns error' do
expect(Gitlab::ErrorTracking).to receive(:track_exception)
.with(described_class::ParserError, job_id: build.id)
expect(subject[:status]).to eq(:error)
expect(subject[:message]).to eq('Invalid Format')
expect(subject[:http_status]).to eq(:bad_request)
end
end
context 'when artifact size is too big' do
before do
allow(artifact.file).to receive(:size) { 10.kilobytes }
end
it 'returns error' do
expect(subject[:status]).to eq(:error)
expect(subject[:message]).to eq("Dotenv Artifact Too Big. Maximum Allowable Size: #{described_class::MAX_ACCEPTABLE_DOTENV_SIZE}")
expect(subject[:http_status]).to eq(:bad_request)
end
end
context 'when artifact has the specified blob' do
before do
allow(artifact).to receive(:each_blob).and_yield(blob)
end
context 'when a white space trails the key' do
let(:blob) { 'KEY1 =VAR1' }
it 'trims the trailing space' do
subject
expect(build.job_variables.as_json).to contain_exactly(
hash_including('key' => 'KEY1', 'value' => 'VAR1'))
end
end
context 'when multiple key/value pairs exist in one line' do
let(:blob) { 'KEY1=VAR1KEY2=VAR1' }
it 'returns error' do
expect(subject[:status]).to eq(:error)
expect(subject[:message]).to eq("Validation failed: Key can contain only letters, digits and '_'.")
expect(subject[:http_status]).to eq(:bad_request)
end
end
context 'when key contains UNICODE' do
let(:blob) { '🛹=skateboard' }
it 'returns error' do
expect(subject[:status]).to eq(:error)
expect(subject[:message]).to eq("Validation failed: Key can contain only letters, digits and '_'.")
expect(subject[:http_status]).to eq(:bad_request)
end
end
context 'when value contains UNICODE' do
let(:blob) { 'skateboard=🛹' }
it 'parses the dotenv data' do
subject
expect(build.job_variables.as_json).to contain_exactly(
hash_including('key' => 'skateboard', 'value' => '🛹'))
end
end
context 'when key contains a space' do
let(:blob) { 'K E Y 1=VAR1' }
it 'returns error' do
expect(subject[:status]).to eq(:error)
expect(subject[:message]).to eq("Validation failed: Key can contain only letters, digits and '_'.")
expect(subject[:http_status]).to eq(:bad_request)
end
end
context 'when value contains a space' do
let(:blob) { 'KEY1=V A R 1' }
it 'parses the dotenv data' do
subject
expect(build.job_variables.as_json).to contain_exactly(
hash_including('key' => 'KEY1', 'value' => 'V A R 1'))
end
end
context 'when value is double quoated' do
let(:blob) { 'KEY1="VAR1"' }
it 'parses the value as-is' do
subject
expect(build.job_variables.as_json).to contain_exactly(
hash_including('key' => 'KEY1', 'value' => '"VAR1"'))
end
end
context 'when value is single quoated' do
let(:blob) { "KEY1='VAR1'" }
it 'parses the value as-is' do
subject
expect(build.job_variables.as_json).to contain_exactly(
hash_including('key' => 'KEY1', 'value' => "'VAR1'"))
end
end
context 'when value has white spaces in double quote' do
let(:blob) { 'KEY1=" VAR1 "' }
it 'parses the value as-is' do
subject
expect(build.job_variables.as_json).to contain_exactly(
hash_including('key' => 'KEY1', 'value' => '" VAR1 "'))
end
end
context 'when key is missing' do
let(:blob) { '=VAR1' }
it 'returns error' do
expect(subject[:status]).to eq(:error)
expect(subject[:message]).to match(/Key can't be blank/)
expect(subject[:http_status]).to eq(:bad_request)
end
end
context 'when value is missing' do
let(:blob) { 'KEY1=' }
it 'parses the dotenv data' do
subject
expect(build.job_variables.as_json).to contain_exactly(
hash_including('key' => 'KEY1', 'value' => ''))
end
end
context 'when it is not dotenv format' do
let(:blob) { "{ 'KEY1': 'VAR1' }" }
it 'returns error' do
expect(subject[:status]).to eq(:error)
expect(subject[:message]).to eq('Invalid Format')
expect(subject[:http_status]).to eq(:bad_request)
end
end
context 'when more than limitated variables are specified in dotenv' do
let(:blob) do
StringIO.new.tap do |s|
(described_class::MAX_ACCEPTABLE_VARIABLES_COUNT + 1).times do |i|
s << "KEY#{i}=VAR#{i}\n"
end
end.string
end
it 'returns error' do
expect(subject[:status]).to eq(:error)
expect(subject[:message]).to eq("Dotenv files cannot have more than #{described_class::MAX_ACCEPTABLE_VARIABLES_COUNT} variables")
expect(subject[:http_status]).to eq(:bad_request)
end
end
context 'when variables are cross-referenced in dotenv' do
let(:blob) do
<<~EOS
KEY1=VAR1
KEY2=${KEY1}_Test
EOS
end
it 'does not support variable expansion in dotenv parser' do
subject
expect(build.job_variables.as_json).to contain_exactly(
hash_including('key' => 'KEY1', 'value' => 'VAR1'),
hash_including('key' => 'KEY2', 'value' => '${KEY1}_Test'))
end
end
context 'when there is an empty line' do
let(:blob) do
<<~EOS
KEY1=VAR1
KEY2=VAR2
EOS
end
it 'does not support empty line in dotenv parser' do
subject
expect(subject[:status]).to eq(:error)
expect(subject[:message]).to eq('Invalid Format')
expect(subject[:http_status]).to eq(:bad_request)
end
end
context 'when there is a comment' do
let(:blob) do
<<~EOS
KEY1=VAR1 # This is variable
EOS
end
it 'does not support comment in dotenv parser' do
subject
expect(build.job_variables.as_json).to contain_exactly(
hash_including('key' => 'KEY1', 'value' => 'VAR1 # This is variable'))
end
end
end
end
context 'when build does not have a dotenv artifact' do
let!(:artifact) { }
it 'raises an error' do
expect { subject }.to raise_error(ArgumentError)
end
end
end
end
......@@ -36,7 +36,7 @@ describe Ci::RetryBuildService do
job_artifacts_performance job_artifacts_lsif
job_artifacts_codequality job_artifacts_metrics scheduled_at
job_variables waiting_for_resource_at job_artifacts_metrics_referee
job_artifacts_network_referee needs].freeze
job_artifacts_network_referee job_artifacts_dotenv needs].freeze
IGNORE_ACCESSORS =
%i[type lock_version target_url base_tags trace_sections
......
......@@ -177,6 +177,26 @@ describe Deployments::AfterCreateService do
it { is_expected.to eq('http://review/host') }
end
context 'when job variables are generated during runtime' do
let(:job) do
create(:ci_build,
:with_deployment,
pipeline: pipeline,
environment: 'review/$CI_COMMIT_REF_NAME',
project: project,
job_variables: [job_variable],
options: { environment: { name: 'review/$CI_COMMIT_REF_NAME', url: 'http://$DYNAMIC_ENV_URL' } })
end
let(:job_variable) do
build(:ci_job_variable, :dotenv_source, key: 'DYNAMIC_ENV_URL', value: 'abc.test.com')
end
it 'expands the environment URL from the dynamic variable' do
is_expected.to eq('http://abc.test.com')
end
end
context 'when yaml environment does not have url' do
let(:job) { create(:ci_build, :with_deployment, pipeline: pipeline, environment: 'staging', project: project) }
......
......@@ -7,6 +7,11 @@ describe PagesDomainSslRenewalCronWorker do
subject(:worker) { described_class.new }
# Locking in date due to cert expiration date https://gitlab.com/gitlab-org/gitlab/-/issues/210557#note_304749257
around do |example|
Timecop.travel(Time.new(2020, 3, 12)) { example.run }
end
before do
stub_lets_encrypt_settings
end
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment