Commit 8fd76a75 authored by Shinya Maeda's avatar Shinya Maeda

Merge branch 'live-trace-v2' into live-trace-v2-efficient-destroy-all

parents 07375df1 8b47980e
...@@ -9,6 +9,10 @@ terms. ...@@ -9,6 +9,10 @@ terms.
[DCO + License](https://gitlab.com/gitlab-org/dco/blob/master/README.md) [DCO + License](https://gitlab.com/gitlab-org/dco/blob/master/README.md)
All Documentation content that resides under the [doc/ directory](/doc) of this
repository is licensed under Creative Commons:
[CC BY-SA 4.0](https://creativecommons.org/licenses/by-sa/4.0/).
_This notice should stay as the first item in the CONTRIBUTING.md file._ _This notice should stay as the first item in the CONTRIBUTING.md file._
--- ---
......
...@@ -33,7 +33,7 @@ gem 'grape-route-helpers', '~> 2.1.0' ...@@ -33,7 +33,7 @@ gem 'grape-route-helpers', '~> 2.1.0'
gem 'faraday', '~> 0.12' gem 'faraday', '~> 0.12'
# Authentication libraries # Authentication libraries
gem 'devise', '~> 4.2' gem 'devise', '~> 4.4'
gem 'doorkeeper', '~> 4.3' gem 'doorkeeper', '~> 4.3'
gem 'doorkeeper-openid_connect', '~> 1.3' gem 'doorkeeper-openid_connect', '~> 1.3'
gem 'omniauth', '~> 1.8' gem 'omniauth', '~> 1.8'
...@@ -41,7 +41,7 @@ gem 'omniauth-auth0', '~> 2.0.0' ...@@ -41,7 +41,7 @@ gem 'omniauth-auth0', '~> 2.0.0'
gem 'omniauth-azure-oauth2', '~> 0.0.9' gem 'omniauth-azure-oauth2', '~> 0.0.9'
gem 'omniauth-cas3', '~> 1.1.4' gem 'omniauth-cas3', '~> 1.1.4'
gem 'omniauth-facebook', '~> 4.0.0' gem 'omniauth-facebook', '~> 4.0.0'
gem 'omniauth-github', '~> 1.1.1' gem 'omniauth-github', '~> 1.3'
gem 'omniauth-gitlab', '~> 1.0.2' gem 'omniauth-gitlab', '~> 1.0.2'
gem 'omniauth-google-oauth2', '~> 0.5.3' gem 'omniauth-google-oauth2', '~> 0.5.3'
gem 'omniauth-kerberos', '~> 0.3.0', group: :kerberos gem 'omniauth-kerberos', '~> 0.3.0', group: :kerberos
...@@ -90,7 +90,7 @@ gem 'github-linguist', '~> 5.3.3', require: 'linguist' ...@@ -90,7 +90,7 @@ gem 'github-linguist', '~> 5.3.3', require: 'linguist'
# API # API
gem 'grape', '~> 1.0' gem 'grape', '~> 1.0'
gem 'grape-entity', '~> 0.6.0' gem 'grape-entity', '~> 0.7.1'
gem 'rack-cors', '~> 1.0.0', require: 'rack/cors' gem 'rack-cors', '~> 1.0.0', require: 'rack/cors'
# Disable strong_params so that Mash does not respond to :permitted? # Disable strong_params so that Mash does not respond to :permitted?
......
...@@ -143,7 +143,7 @@ GEM ...@@ -143,7 +143,7 @@ GEM
connection_pool (2.2.1) connection_pool (2.2.1)
crack (0.4.3) crack (0.4.3)
safe_yaml (~> 1.0.0) safe_yaml (~> 1.0.0)
crass (1.0.3) crass (1.0.4)
creole (0.5.0) creole (0.5.0)
css_parser (1.5.0) css_parser (1.5.0)
addressable addressable
...@@ -162,10 +162,10 @@ GEM ...@@ -162,10 +162,10 @@ GEM
descendants_tracker (0.0.4) descendants_tracker (0.0.4)
thread_safe (~> 0.3, >= 0.3.1) thread_safe (~> 0.3, >= 0.3.1)
device_detector (1.0.0) device_detector (1.0.0)
devise (4.2.0) devise (4.4.3)
bcrypt (~> 3.0) bcrypt (~> 3.0)
orm_adapter (~> 0.1) orm_adapter (~> 0.1)
railties (>= 4.1.0, < 5.1) railties (>= 4.1.0, < 6.0)
responders responders
warden (~> 1.2.3) warden (~> 1.2.3)
devise-two-factor (3.0.0) devise-two-factor (3.0.0)
...@@ -366,8 +366,8 @@ GEM ...@@ -366,8 +366,8 @@ GEM
rack (>= 1.3.0) rack (>= 1.3.0)
rack-accept rack-accept
virtus (>= 1.0.0) virtus (>= 1.0.0)
grape-entity (0.6.0) grape-entity (0.7.1)
activesupport activesupport (>= 4.0)
multi_json (>= 1.3.2) multi_json (>= 1.3.2)
grape-route-helpers (2.1.0) grape-route-helpers (2.1.0)
activesupport activesupport
...@@ -546,9 +546,9 @@ GEM ...@@ -546,9 +546,9 @@ GEM
omniauth (~> 1.2) omniauth (~> 1.2)
omniauth-facebook (4.0.0) omniauth-facebook (4.0.0)
omniauth-oauth2 (~> 1.2) omniauth-oauth2 (~> 1.2)
omniauth-github (1.1.2) omniauth-github (1.3.0)
omniauth (~> 1.0) omniauth (~> 1.5)
omniauth-oauth2 (~> 1.1) omniauth-oauth2 (>= 1.4.0, < 2.0)
omniauth-gitlab (1.0.2) omniauth-gitlab (1.0.2)
omniauth (~> 1.0) omniauth (~> 1.0)
omniauth-oauth2 (~> 1.0) omniauth-oauth2 (~> 1.0)
...@@ -646,7 +646,7 @@ GEM ...@@ -646,7 +646,7 @@ GEM
pry (>= 0.9.10) pry (>= 0.9.10)
public_suffix (3.0.2) public_suffix (3.0.2)
pyu-ruby-sasl (0.0.3.3) pyu-ruby-sasl (0.0.3.3)
rack (1.6.9) rack (1.6.10)
rack-accept (0.4.5) rack-accept (0.4.5)
rack (>= 0.4) rack (>= 0.4)
rack-attack (4.4.1) rack-attack (4.4.1)
...@@ -694,7 +694,7 @@ GEM ...@@ -694,7 +694,7 @@ GEM
rainbow (2.2.2) rainbow (2.2.2)
rake rake
raindrops (0.18.0) raindrops (0.18.0)
rake (12.3.0) rake (12.3.1)
rb-fsevent (0.10.2) rb-fsevent (0.10.2)
rb-inotify (0.9.10) rb-inotify (0.9.10)
ffi (>= 0.5.0, < 2) ffi (>= 0.5.0, < 2)
...@@ -735,8 +735,9 @@ GEM ...@@ -735,8 +735,9 @@ GEM
declarative-option (< 0.2.0) declarative-option (< 0.2.0)
uber (< 0.2.0) uber (< 0.2.0)
request_store (1.3.1) request_store (1.3.1)
responders (2.3.0) responders (2.4.0)
railties (>= 4.2.0, < 5.1) actionpack (>= 4.2.0, < 5.3)
railties (>= 4.2.0, < 5.3)
rest-client (2.0.2) rest-client (2.0.2)
http-cookie (>= 1.0.2, < 2.0) http-cookie (>= 1.0.2, < 2.0)
mime-types (>= 1.16, < 4.0) mime-types (>= 1.16, < 4.0)
...@@ -966,7 +967,7 @@ GEM ...@@ -966,7 +967,7 @@ GEM
descendants_tracker (~> 0.0, >= 0.0.3) descendants_tracker (~> 0.0, >= 0.0.3)
equalizer (~> 0.0, >= 0.0.9) equalizer (~> 0.0, >= 0.0.9)
vmstat (2.3.0) vmstat (2.3.0)
warden (1.2.6) warden (1.2.7)
rack (>= 1.0) rack (>= 1.0)
webmock (2.3.2) webmock (2.3.2)
addressable (>= 2.3.6) addressable (>= 2.3.6)
...@@ -1028,7 +1029,7 @@ DEPENDENCIES ...@@ -1028,7 +1029,7 @@ DEPENDENCIES
deckar01-task_list (= 2.0.0) deckar01-task_list (= 2.0.0)
default_value_for (~> 3.0.0) default_value_for (~> 3.0.0)
device_detector device_detector
devise (~> 4.2) devise (~> 4.4)
devise-two-factor (~> 3.0.0) devise-two-factor (~> 3.0.0)
diffy (~> 3.1.0) diffy (~> 3.1.0)
doorkeeper (~> 4.3) doorkeeper (~> 4.3)
...@@ -1072,7 +1073,7 @@ DEPENDENCIES ...@@ -1072,7 +1073,7 @@ DEPENDENCIES
google-protobuf (= 3.5.1) google-protobuf (= 3.5.1)
gpgme gpgme
grape (~> 1.0) grape (~> 1.0)
grape-entity (~> 0.6.0) grape-entity (~> 0.7.1)
grape-route-helpers (~> 2.1.0) grape-route-helpers (~> 2.1.0)
grape_logging (~> 1.7) grape_logging (~> 1.7)
grpc (~> 1.11.0) grpc (~> 1.11.0)
...@@ -1113,7 +1114,7 @@ DEPENDENCIES ...@@ -1113,7 +1114,7 @@ DEPENDENCIES
omniauth-azure-oauth2 (~> 0.0.9) omniauth-azure-oauth2 (~> 0.0.9)
omniauth-cas3 (~> 1.1.4) omniauth-cas3 (~> 1.1.4)
omniauth-facebook (~> 4.0.0) omniauth-facebook (~> 4.0.0)
omniauth-github (~> 1.1.1) omniauth-github (~> 1.3)
omniauth-gitlab (~> 1.0.2) omniauth-gitlab (~> 1.0.2)
omniauth-google-oauth2 (~> 0.5.3) omniauth-google-oauth2 (~> 0.5.3)
omniauth-kerberos (~> 0.3.0) omniauth-kerberos (~> 0.3.0)
......
...@@ -162,6 +162,7 @@ GEM ...@@ -162,6 +162,7 @@ GEM
activerecord (>= 3.2.0, < 5.2) activerecord (>= 3.2.0, < 5.2)
descendants_tracker (0.0.4) descendants_tracker (0.0.4)
thread_safe (~> 0.3, >= 0.3.1) thread_safe (~> 0.3, >= 0.3.1)
device_detector (1.0.1)
devise (4.4.1) devise (4.4.1)
bcrypt (~> 3.0) bcrypt (~> 3.0)
orm_adapter (~> 0.1) orm_adapter (~> 0.1)
...@@ -375,7 +376,7 @@ GEM ...@@ -375,7 +376,7 @@ GEM
rake rake
grape_logging (1.7.0) grape_logging (1.7.0)
grape grape
grpc (1.10.0) grpc (1.11.0)
google-protobuf (~> 3.1) google-protobuf (~> 3.1)
googleapis-common-protos-types (~> 1.0.0) googleapis-common-protos-types (~> 1.0.0)
googleauth (>= 0.5.1, < 0.7) googleauth (>= 0.5.1, < 0.7)
...@@ -554,9 +555,6 @@ GEM ...@@ -554,9 +555,6 @@ GEM
jwt (>= 1.5) jwt (>= 1.5)
omniauth (>= 1.1.1) omniauth (>= 1.1.1)
omniauth-oauth2 (>= 1.5) omniauth-oauth2 (>= 1.5)
omniauth-jwt (0.0.2)
jwt
omniauth (~> 1.1)
omniauth-kerberos (0.3.0) omniauth-kerberos (0.3.0)
omniauth-multipassword omniauth-multipassword
timfel-krb5-auth (~> 0.8) timfel-krb5-auth (~> 0.8)
...@@ -1033,6 +1031,7 @@ DEPENDENCIES ...@@ -1033,6 +1031,7 @@ DEPENDENCIES
database_cleaner (~> 1.5.0) database_cleaner (~> 1.5.0)
deckar01-task_list (= 2.0.0) deckar01-task_list (= 2.0.0)
default_value_for (~> 3.0.5) default_value_for (~> 3.0.5)
device_detector
devise (~> 4.2) devise (~> 4.2)
devise-two-factor (~> 3.0.0) devise-two-factor (~> 3.0.0)
diffy (~> 3.1.0) diffy (~> 3.1.0)
...@@ -1080,7 +1079,7 @@ DEPENDENCIES ...@@ -1080,7 +1079,7 @@ DEPENDENCIES
grape-entity (~> 0.6.0) grape-entity (~> 0.6.0)
grape-route-helpers (~> 2.1.0) grape-route-helpers (~> 2.1.0)
grape_logging (~> 1.7) grape_logging (~> 1.7)
grpc (~> 1.10.0) grpc (~> 1.11.0)
haml_lint (~> 0.26.0) haml_lint (~> 0.26.0)
hamlit (~> 2.6.1) hamlit (~> 2.6.1)
hashie-forbidden_attributes hashie-forbidden_attributes
...@@ -1121,7 +1120,6 @@ DEPENDENCIES ...@@ -1121,7 +1120,6 @@ DEPENDENCIES
omniauth-github (~> 1.1.1) omniauth-github (~> 1.1.1)
omniauth-gitlab (~> 1.0.2) omniauth-gitlab (~> 1.0.2)
omniauth-google-oauth2 (~> 0.5.3) omniauth-google-oauth2 (~> 0.5.3)
omniauth-jwt (~> 0.0.2)
omniauth-kerberos (~> 0.3.0) omniauth-kerberos (~> 0.3.0)
omniauth-oauth2-generic (~> 0.2.2) omniauth-oauth2-generic (~> 0.2.2)
omniauth-saml (~> 1.10) omniauth-saml (~> 1.10)
......
...@@ -5,3 +5,8 @@ Permission is hereby granted, free of charge, to any person obtaining a copy of ...@@ -5,3 +5,8 @@ Permission is hereby granted, free of charge, to any person obtaining a copy of
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
---
All Documentation content that resides under the doc/ directory of this
repository is licensed under Creative Commons: CC BY-SA 4.0.
...@@ -43,6 +43,7 @@ ...@@ -43,6 +43,7 @@
<div class="environments-container"> <div class="environments-container">
<loading-icon <loading-icon
class="prepend-top-default"
label="Loading environments" label="Loading environments"
v-if="isLoading" v-if="isLoading"
size="3" size="3"
......
<script> <script>
import playIconSvg from 'icons/_icon_play.svg'; import Icon from '~/vue_shared/components/icon.vue';
import eventHub from '../event_hub'; import eventHub from '../event_hub';
import loadingIcon from '../../vue_shared/components/loading_icon.vue'; import loadingIcon from '../../vue_shared/components/loading_icon.vue';
import tooltip from '../../vue_shared/directives/tooltip'; import tooltip from '../../vue_shared/directives/tooltip';
...@@ -8,9 +8,9 @@ ...@@ -8,9 +8,9 @@
directives: { directives: {
tooltip, tooltip,
}, },
components: { components: {
loadingIcon, loadingIcon,
Icon,
}, },
props: { props: {
actions: { actions: {
...@@ -19,20 +19,16 @@ ...@@ -19,20 +19,16 @@
default: () => [], default: () => [],
}, },
}, },
data() { data() {
return { return {
playIconSvg,
isLoading: false, isLoading: false,
}; };
}, },
computed: { computed: {
title() { title() {
return 'Deploy to...'; return 'Deploy to...';
}, },
}, },
methods: { methods: {
onClickAction(endpoint) { onClickAction(endpoint) {
this.isLoading = true; this.isLoading = true;
...@@ -65,7 +61,10 @@ ...@@ -65,7 +61,10 @@
:disabled="isLoading" :disabled="isLoading"
> >
<span> <span>
<span v-html="playIconSvg"></span> <icon
name="play"
:size="12"
/>
<i <i
class="fa fa-caret-down" class="fa fa-caret-down"
aria-hidden="true" aria-hidden="true"
...@@ -86,7 +85,10 @@ ...@@ -86,7 +85,10 @@
:class="{ disabled: isActionDisabled(action) }" :class="{ disabled: isActionDisabled(action) }"
:disabled="isActionDisabled(action)" :disabled="isActionDisabled(action)"
> >
<span v-html="playIconSvg"></span> <icon
name="play"
:size="12"
/>
<span> <span>
{{ action.name }} {{ action.name }}
</span> </span>
......
<script> <script>
import Icon from '~/vue_shared/components/icon.vue';
import tooltip from '../../vue_shared/directives/tooltip'; import tooltip from '../../vue_shared/directives/tooltip';
import { s__ } from '../../locale'; import { s__ } from '../../locale';
...@@ -6,6 +7,9 @@ ...@@ -6,6 +7,9 @@
* Renders the external url link in environments table. * Renders the external url link in environments table.
*/ */
export default { export default {
components: {
Icon,
},
directives: { directives: {
tooltip, tooltip,
}, },
...@@ -15,7 +19,6 @@ ...@@ -15,7 +19,6 @@
required: true, required: true,
}, },
}, },
computed: { computed: {
title() { title() {
return s__('Environments|Open'); return s__('Environments|Open');
...@@ -34,10 +37,9 @@ ...@@ -34,10 +37,9 @@
:aria-label="title" :aria-label="title"
:href="externalUrl" :href="externalUrl"
> >
<i <icon
class="fa fa-external-link" name="external-link"
aria-hidden="true" :size="12"
> />
</i>
</a> </a>
</template> </template>
...@@ -2,20 +2,22 @@ ...@@ -2,20 +2,22 @@
/** /**
* Renders the Monitoring (Metrics) link in environments table. * Renders the Monitoring (Metrics) link in environments table.
*/ */
import Icon from '~/vue_shared/components/icon.vue';
import tooltip from '../../vue_shared/directives/tooltip'; import tooltip from '../../vue_shared/directives/tooltip';
export default { export default {
components: {
Icon,
},
directives: { directives: {
tooltip, tooltip,
}, },
props: { props: {
monitoringUrl: { monitoringUrl: {
type: String, type: String,
required: true, required: true,
}, },
}, },
computed: { computed: {
title() { title() {
return 'Monitoring'; return 'Monitoring';
...@@ -33,10 +35,9 @@ ...@@ -33,10 +35,9 @@
:title="title" :title="title"
:aria-label="title" :aria-label="title"
> >
<i <icon
class="fa fa-area-chart" name="chart"
aria-hidden="true" :size="12"
> />
</i>
</a> </a>
</template> </template>
...@@ -12,7 +12,6 @@ ...@@ -12,7 +12,6 @@
components: { components: {
loadingIcon, loadingIcon,
}, },
props: { props: {
retryUrl: { retryUrl: {
type: String, type: String,
...@@ -24,13 +23,11 @@ ...@@ -24,13 +23,11 @@
default: true, default: true,
}, },
}, },
data() { data() {
return { return {
isLoading: false, isLoading: false,
}; };
}, },
methods: { methods: {
onClick() { onClick() {
this.isLoading = true; this.isLoading = true;
......
...@@ -3,14 +3,16 @@ ...@@ -3,14 +3,16 @@
* Renders a terminal button to open a web terminal. * Renders a terminal button to open a web terminal.
* Used in environments table. * Used in environments table.
*/ */
import terminalIconSvg from 'icons/_icon_terminal.svg'; import Icon from '~/vue_shared/components/icon.vue';
import tooltip from '../../vue_shared/directives/tooltip'; import tooltip from '../../vue_shared/directives/tooltip';
export default { export default {
components: {
Icon,
},
directives: { directives: {
tooltip, tooltip,
}, },
props: { props: {
terminalPath: { terminalPath: {
type: String, type: String,
...@@ -18,13 +20,6 @@ ...@@ -18,13 +20,6 @@
default: '', default: '',
}, },
}, },
data() {
return {
terminalIconSvg,
};
},
computed: { computed: {
title() { title() {
return 'Terminal'; return 'Terminal';
...@@ -40,7 +35,10 @@ ...@@ -40,7 +35,10 @@
:title="title" :title="title"
:aria-label="title" :aria-label="title"
:href="terminalPath" :href="terminalPath"
v-html="terminalIconSvg"
> >
<icon
name="terminal"
:size="12"
/>
</a> </a>
</template> </template>
...@@ -43,7 +43,7 @@ export default { ...@@ -43,7 +43,7 @@ export default {
return `${this.changedIcon}-solid`; return `${this.changedIcon}-solid`;
}, },
changedIconClass() { changedIconClass() {
return `multi-${this.changedIcon} prepend-left-5 pull-left`; return `multi-${this.changedIcon} pull-left`;
}, },
tooltipTitle() { tooltipTitle() {
if (!this.showTooltip) return undefined; if (!this.showTooltip) return undefined;
...@@ -79,13 +79,7 @@ export default { ...@@ -79,13 +79,7 @@ export default {
class="ide-file-changed-icon" class="ide-file-changed-icon"
> >
<icon <icon
v-if="file.staged && showStagedIcon" v-if="file.changed || file.tempFile || file.staged"
:name="stagedIcon"
:size="12"
:css-classes="changedIconClass"
/>
<icon
v-if="file.changed || file.tempFile || (file.staged && !showStagedIcon)"
:name="changedIcon" :name="changedIcon"
:size="12" :size="12"
:css-classes="changedIconClass" :css-classes="changedIconClass"
......
...@@ -15,17 +15,10 @@ export default { ...@@ -15,17 +15,10 @@ export default {
type: String, type: String,
required: true, required: true,
}, },
committedStateSvgPath: {
type: String,
required: true,
},
}, },
computed: { computed: {
...mapState(['lastCommitMsg', 'rightPanelCollapsed']), ...mapState(['lastCommitMsg', 'rightPanelCollapsed', 'changedFiles', 'stagedFiles']),
...mapGetters(['collapseButtonIcon', 'collapseButtonTooltip']), ...mapGetters(['collapseButtonIcon', 'collapseButtonTooltip']),
statusSvg() {
return this.lastCommitMsg ? this.committedStateSvgPath : this.noChangesStateSvgPath;
},
}, },
methods: { methods: {
...mapActions(['toggleRightPanelCollapsed']), ...mapActions(['toggleRightPanelCollapsed']),
...@@ -35,6 +28,7 @@ export default { ...@@ -35,6 +28,7 @@ export default {
<template> <template>
<div <div
v-if="!lastCommitMsg"
class="multi-file-commit-panel-section ide-commit-empty-state js-empty-state" class="multi-file-commit-panel-section ide-commit-empty-state js-empty-state"
> >
<header <header
...@@ -64,12 +58,11 @@ export default { ...@@ -64,12 +58,11 @@ export default {
v-if="!rightPanelCollapsed" v-if="!rightPanelCollapsed"
> >
<div class="svg-content svg-80"> <div class="svg-content svg-80">
<img :src="statusSvg" /> <img :src="noChangesStateSvgPath" />
</div> </div>
<div class="append-right-default prepend-left-default"> <div class="append-right-default prepend-left-default">
<div <div
class="text-content text-center" class="text-content text-center"
v-if="!lastCommitMsg"
> >
<h4> <h4>
{{ __('No changes') }} {{ __('No changes') }}
...@@ -78,15 +71,6 @@ export default { ...@@ -78,15 +71,6 @@ export default {
{{ __('Edit files in the editor and commit changes here') }} {{ __('Edit files in the editor and commit changes here') }}
</p> </p>
</div> </div>
<div
class="text-content text-center"
v-else
>
<h4>
{{ __('All changes are committed') }}
</h4>
<p v-html="lastCommitMsg"></p>
</div>
</div> </div>
</div> </div>
</div> </div>
......
...@@ -36,7 +36,7 @@ export default { ...@@ -36,7 +36,7 @@ export default {
return this.file.tempFile ? `file-addition${prefix}` : `file-modified${prefix}`; return this.file.tempFile ? `file-addition${prefix}` : `file-modified${prefix}`;
}, },
iconClass() { iconClass() {
return `multi-file-${this.file.tempFile ? 'additions' : 'modified'} append-right-8`; return `multi-file-${this.file.tempFile ? 'addition' : 'modified'} append-right-8`;
}, },
}, },
methods: { methods: {
......
<script>
import { mapState } from 'vuex';
export default {
props: {
committedStateSvgPath: {
type: String,
required: true,
},
},
computed: {
...mapState(['lastCommitMsg']),
},
};
</script>
<template>
<div
class="multi-file-commit-panel-success-message"
aria-live="assertive"
>
<div class="svg-content svg-80">
<img
:src="committedStateSvgPath"
alt=""
/>
</div>
<div class="append-right-default prepend-left-default">
<div
class="text-content text-center"
>
<h4>
{{ __('All changes are committed') }}
</h4>
<p v-html="lastCommitMsg"></p>
</div>
</div>
</div>
</template>
...@@ -7,6 +7,7 @@ import LoadingButton from '~/vue_shared/components/loading_button.vue'; ...@@ -7,6 +7,7 @@ import LoadingButton from '~/vue_shared/components/loading_button.vue';
import CommitFilesList from './commit_sidebar/list.vue'; import CommitFilesList from './commit_sidebar/list.vue';
import EmptyState from './commit_sidebar/empty_state.vue'; import EmptyState from './commit_sidebar/empty_state.vue';
import CommitMessageField from './commit_sidebar/message_field.vue'; import CommitMessageField from './commit_sidebar/message_field.vue';
import SuccessMessage from './commit_sidebar/success_message.vue';
import * as consts from '../stores/modules/commit/constants'; import * as consts from '../stores/modules/commit/constants';
import Actions from './commit_sidebar/actions.vue'; import Actions from './commit_sidebar/actions.vue';
...@@ -16,6 +17,7 @@ export default { ...@@ -16,6 +17,7 @@ export default {
Icon, Icon,
CommitFilesList, CommitFilesList,
EmptyState, EmptyState,
SuccessMessage,
Actions, Actions,
LoadingButton, LoadingButton,
CommitMessageField, CommitMessageField,
...@@ -34,9 +36,15 @@ export default { ...@@ -34,9 +36,15 @@ export default {
}, },
}, },
computed: { computed: {
...mapState(['changedFiles', 'stagedFiles', 'rightPanelCollapsed']), showStageUnstageArea() {
return !!(this.someUncommitedChanges || this.lastCommitMsg || !this.unusedSeal);
},
someUncommitedChanges() {
return !!(this.changedFiles.length || this.stagedFiles.length);
},
...mapState(['changedFiles', 'stagedFiles', 'rightPanelCollapsed', 'lastCommitMsg', 'unusedSeal']),
...mapState('commit', ['commitMessage', 'submitCommitLoading']), ...mapState('commit', ['commitMessage', 'submitCommitLoading']),
...mapGetters('commit', ['commitButtonDisabled', 'discardDraftButtonDisabled', 'branchName']), ...mapGetters('commit', ['commitButtonDisabled', 'discardDraftButtonDisabled']),
}, },
methods: { methods: {
...mapActions('commit', [ ...mapActions('commit', [
...@@ -69,7 +77,7 @@ export default { ...@@ -69,7 +77,7 @@ export default {
</template> </template>
</deprecated-modal> </deprecated-modal>
<template <template
v-if="changedFiles.length || stagedFiles.length" v-if="showStageUnstageArea"
> >
<commit-files-list <commit-files-list
icon-name="unstaged" icon-name="unstaged"
...@@ -89,11 +97,23 @@ export default { ...@@ -89,11 +97,23 @@ export default {
:show-toggle="false" :show-toggle="false"
:staged-list="true" :staged-list="true"
/> />
</template>
<empty-state
v-if="unusedSeal"
:no-changes-state-svg-path="noChangesStateSvgPath"
/>
<div
class="multi-file-commit-panel-bottom"
>
<form <form
class="form-horizontal multi-file-commit-form" class="form-horizontal multi-file-commit-form"
@submit.prevent.stop="commitChanges" @submit.prevent.stop="commitChanges"
v-if="!rightPanelCollapsed" v-if="!rightPanelCollapsed"
> >
<success-message
v-if="lastCommitMsg && !someUncommitedChanges"
:committed-state-svg-path="committedStateSvgPath"
/>
<commit-message-field <commit-message-field
:text="commitMessage" :text="commitMessage"
@input="updateCommitMessage" @input="updateCommitMessage"
...@@ -117,11 +137,6 @@ export default { ...@@ -117,11 +137,6 @@ export default {
</button> </button>
</div> </div>
</form> </form>
</template> </div>
<empty-state
v-else
:no-changes-state-svg-path="noChangesStateSvgPath"
:committed-state-svg-path="committedStateSvgPath"
/>
</div> </div>
</template> </template>
<script> <script>
import { mapActions } from 'vuex'; import { mapActions, mapGetters } from 'vuex';
import skeletonLoadingContainer from '~/vue_shared/components/skeleton_loading_container.vue'; import { n__, __, sprintf } from '~/locale';
import fileIcon from '~/vue_shared/components/file_icon.vue'; import tooltip from '~/vue_shared/directives/tooltip';
import SkeletonLoadingContainer from '~/vue_shared/components/skeleton_loading_container.vue';
import Icon from '~/vue_shared/components/icon.vue';
import FileIcon from '~/vue_shared/components/file_icon.vue';
import router from '../ide_router'; import router from '../ide_router';
import newDropdown from './new_dropdown/index.vue'; import NewDropdown from './new_dropdown/index.vue';
import fileStatusIcon from './repo_file_status_icon.vue'; import FileStatusIcon from './repo_file_status_icon.vue';
import changedFileIcon from './changed_file_icon.vue'; import ChangedFileIcon from './changed_file_icon.vue';
import mrFileIcon from './mr_file_icon.vue'; import MrFileIcon from './mr_file_icon.vue';
export default { export default {
name: 'RepoFile', name: 'RepoFile',
directives: {
tooltip,
},
components: { components: {
skeletonLoadingContainer, SkeletonLoadingContainer,
newDropdown, NewDropdown,
fileStatusIcon, FileStatusIcon,
fileIcon, FileIcon,
changedFileIcon, ChangedFileIcon,
mrFileIcon, MrFileIcon,
Icon,
}, },
props: { props: {
file: { file: {
...@@ -29,6 +36,34 @@ export default { ...@@ -29,6 +36,34 @@ export default {
}, },
}, },
computed: { computed: {
...mapGetters([
'getChangesInFolder',
'getUnstagedFilesCountForPath',
'getStagedFilesCountForPath',
]),
folderUnstagedCount() {
return this.getUnstagedFilesCountForPath(this.file.path);
},
folderStagedCount() {
return this.getStagedFilesCountForPath(this.file.path);
},
changesCount() {
return this.getChangesInFolder(this.file.path);
},
folderChangesTooltip() {
if (this.changesCount === 0) return undefined;
if (this.folderUnstagedCount > 0 && this.folderStagedCount === 0) {
return n__('%d unstaged change', '%d unstaged changes', this.folderUnstagedCount);
} else if (this.folderUnstagedCount === 0 && this.folderStagedCount > 0) {
return n__('%d staged change', '%d staged changes', this.folderStagedCount);
}
return sprintf(__('%{unstaged} unstaged and %{staged} staged changes'), {
unstaged: this.folderUnstagedCount,
staged: this.folderStagedCount,
});
},
isTree() { isTree() {
return this.file.type === 'tree'; return this.file.type === 'tree';
}, },
...@@ -48,10 +83,19 @@ export default { ...@@ -48,10 +83,19 @@ export default {
'is-open': this.file.opened, 'is-open': this.file.opened,
}; };
}, },
showTreeChangesCount() {
return this.isTree && this.changesCount > 0 && !this.file.opened;
},
showChangedFileIcon() {
return this.file.changed || this.file.tempFile || this.file.staged;
},
}, },
updated() { updated() {
if (this.file.type === 'blob' && this.file.active) { if (this.file.type === 'blob' && this.file.active) {
this.$el.scrollIntoView(); this.$el.scrollIntoView({
behavior: 'smooth',
block: 'nearest',
});
} }
}, },
methods: { methods: {
...@@ -101,8 +145,23 @@ export default { ...@@ -101,8 +145,23 @@ export default {
<mr-file-icon <mr-file-icon
v-if="file.mrChange" v-if="file.mrChange"
/> />
<span
v-if="showTreeChangesCount"
class="ide-tree-changes"
>
{{ changesCount }}
<icon
v-tooltip
:title="folderChangesTooltip"
data-container="body"
data-placement="right"
name="file-modified"
:size="12"
css-classes="prepend-left-5 multi-file-modified"
/>
</span>
<changed-file-icon <changed-file-icon
v-if="file.changed || file.tempFile || file.staged" v-else-if="showChangedFileIcon"
:file="file" :file="file"
:show-tooltip="true" :show-tooltip="true"
:show-staged-icon="true" :show-staged-icon="true"
......
...@@ -149,6 +149,12 @@ export const updateTempFlagForEntry = ({ commit, dispatch, state }, { file, temp ...@@ -149,6 +149,12 @@ export const updateTempFlagForEntry = ({ commit, dispatch, state }, { file, temp
export const toggleFileFinder = ({ commit }, fileFindVisible) => export const toggleFileFinder = ({ commit }, fileFindVisible) =>
commit(types.TOGGLE_FILE_FINDER, fileFindVisible); commit(types.TOGGLE_FILE_FINDER, fileFindVisible);
export const burstUnusedSeal = ({ state, commit }) => {
if (state.unusedSeal) {
commit(types.BURST_UNUSED_SEAL);
}
};
export * from './actions/tree'; export * from './actions/tree';
export * from './actions/file'; export * from './actions/file';
export * from './actions/project'; export * from './actions/project';
......
...@@ -117,7 +117,7 @@ export const getRawFileData = ({ state, commit, dispatch }, { path, baseSha }) = ...@@ -117,7 +117,7 @@ export const getRawFileData = ({ state, commit, dispatch }, { path, baseSha }) =
}); });
}; };
export const changeFileContent = ({ state, commit }, { path, content }) => { export const changeFileContent = ({ commit, dispatch, state }, { path, content }) => {
const file = state.entries[path]; const file = state.entries[path];
commit(types.UPDATE_FILE_CONTENT, { path, content }); commit(types.UPDATE_FILE_CONTENT, { path, content });
...@@ -128,6 +128,8 @@ export const changeFileContent = ({ state, commit }, { path, content }) => { ...@@ -128,6 +128,8 @@ export const changeFileContent = ({ state, commit }, { path, content }) => {
} else if (!file.changed && indexOfChangedFile !== -1) { } else if (!file.changed && indexOfChangedFile !== -1) {
commit(types.REMOVE_FILE_FROM_CHANGED, path); commit(types.REMOVE_FILE_FROM_CHANGED, path);
} }
dispatch('burstUnusedSeal', {}, { root: true });
}; };
export const setFileLanguage = ({ getters, commit }, { fileLanguage }) => { export const setFileLanguage = ({ getters, commit }, { fileLanguage }) => {
......
import { __ } from '~/locale'; import { __ } from '~/locale';
import { getChangesCountForFiles, filePathMatches } from './utils';
export const activeFile = state => state.openFiles.find(file => file.active) || null; export const activeFile = state => state.openFiles.find(file => file.active) || null;
...@@ -55,7 +56,23 @@ export const allBlobs = state => ...@@ -55,7 +56,23 @@ export const allBlobs = state =>
}, []) }, [])
.sort((a, b) => b.lastOpenedAt - a.lastOpenedAt); .sort((a, b) => b.lastOpenedAt - a.lastOpenedAt);
export const getChangedFile = state => path => state.changedFiles.find(f => f.path === path);
export const getStagedFile = state => path => state.stagedFiles.find(f => f.path === path); export const getStagedFile = state => path => state.stagedFiles.find(f => f.path === path);
export const getChangesInFolder = state => path => {
const changedFilesCount = state.changedFiles.filter(f => filePathMatches(f, path)).length;
const stagedFilesCount = state.stagedFiles.filter(
f => filePathMatches(f, path) && !getChangedFile(state)(f.path),
).length;
return changedFilesCount + stagedFilesCount;
};
export const getUnstagedFilesCountForPath = state => path =>
getChangesCountForFiles(state.changedFiles, path);
export const getStagedFilesCountForPath = state => path =>
getChangesCountForFiles(state.stagedFiles, path);
// prevent babel-plugin-rewire from generating an invalid default during karma tests // prevent babel-plugin-rewire from generating an invalid default during karma tests
export default () => {}; export default () => {};
...@@ -182,6 +182,10 @@ export const commitChanges = ({ commit, state, getters, dispatch, rootState }) = ...@@ -182,6 +182,10 @@ export const commitChanges = ({ commit, state, getters, dispatch, rootState }) =
} }
commit(rootTypes.CLEAR_STAGED_CHANGES, null, { root: true }); commit(rootTypes.CLEAR_STAGED_CHANGES, null, { root: true });
setTimeout(() => {
commit(rootTypes.SET_LAST_COMMIT_MSG, '', { root: true });
}, 5000);
}) })
.then(() => dispatch('updateCommitAction', consts.COMMIT_TO_CURRENT_BRANCH)); .then(() => dispatch('updateCommitAction', consts.COMMIT_TO_CURRENT_BRANCH));
}) })
......
...@@ -61,3 +61,4 @@ export const REMOVE_PENDING_TAB = 'REMOVE_PENDING_TAB'; ...@@ -61,3 +61,4 @@ export const REMOVE_PENDING_TAB = 'REMOVE_PENDING_TAB';
export const UPDATE_TEMP_FLAG = 'UPDATE_TEMP_FLAG'; export const UPDATE_TEMP_FLAG = 'UPDATE_TEMP_FLAG';
export const TOGGLE_FILE_FINDER = 'TOGGLE_FILE_FINDER'; export const TOGGLE_FILE_FINDER = 'TOGGLE_FILE_FINDER';
export const BURST_UNUSED_SEAL = 'BURST_UNUSED_SEAL';
...@@ -128,6 +128,11 @@ export default { ...@@ -128,6 +128,11 @@ export default {
}), }),
}); });
}, },
[types.BURST_UNUSED_SEAL](state) {
Object.assign(state, {
unusedSeal: false,
});
},
...projectMutations, ...projectMutations,
...mergeRequestMutation, ...mergeRequestMutation,
...fileMutations, ...fileMutations,
......
...@@ -18,5 +18,6 @@ export default () => ({ ...@@ -18,5 +18,6 @@ export default () => ({
entries: {}, entries: {},
viewer: 'editor', viewer: 'editor',
delayViewerUpdated: false, delayViewerUpdated: false,
unusedSeal: true,
fileFindVisible: false, fileFindVisible: false,
}); });
...@@ -33,7 +33,6 @@ export const dataStructure = () => ({ ...@@ -33,7 +33,6 @@ export const dataStructure = () => ({
raw: '', raw: '',
content: '', content: '',
parentTreeUrl: '', parentTreeUrl: '',
parentPath: '',
renderError: false, renderError: false,
base64: false, base64: false,
editorRow: 1, editorRow: 1,
...@@ -43,6 +42,7 @@ export const dataStructure = () => ({ ...@@ -43,6 +42,7 @@ export const dataStructure = () => ({
viewMode: 'edit', viewMode: 'edit',
previewMode: null, previewMode: null,
size: 0, size: 0,
parentPath: null,
lastOpenedAt: 0, lastOpenedAt: 0,
}); });
...@@ -83,7 +83,6 @@ export const decorateData = entity => { ...@@ -83,7 +83,6 @@ export const decorateData = entity => {
opened, opened,
active, active,
parentTreeUrl, parentTreeUrl,
parentPath,
changed, changed,
renderError, renderError,
content, content,
...@@ -91,6 +90,7 @@ export const decorateData = entity => { ...@@ -91,6 +90,7 @@ export const decorateData = entity => {
previewMode, previewMode,
file_lock, file_lock,
html, html,
parentPath,
}; };
}; };
...@@ -137,3 +137,9 @@ export const sortTree = sortedTree => ...@@ -137,3 +137,9 @@ export const sortTree = sortedTree =>
}), }),
) )
.sort(sortTreesByTypeAndName); .sort(sortTreesByTypeAndName);
export const filePathMatches = (f, path) =>
f.path.replace(new RegExp(`${f.name}$`), '').indexOf(`${path}/`) === 0;
export const getChangesCountForFiles = (files, path) =>
files.filter(f => filePathMatches(f, path)).length;
...@@ -74,7 +74,11 @@ export function capitalizeFirstCharacter(text) { ...@@ -74,7 +74,11 @@ export function capitalizeFirstCharacter(text) {
* @param {*} replace * @param {*} replace
* @returns {String} * @returns {String}
*/ */
export const stripHtml = (string, replace = '') => string.replace(/<[^>]*>/g, replace); export const stripHtml = (string, replace = '') => {
if (!string) return string;
return string.replace(/<[^>]*>/g, replace);
};
/** /**
* Converts snake_case string to camelCase * Converts snake_case string to camelCase
......
...@@ -70,6 +70,9 @@ ...@@ -70,6 +70,9 @@
toggleMoreParticipants() { toggleMoreParticipants() {
this.isShowingMoreParticipants = !this.isShowingMoreParticipants; this.isShowingMoreParticipants = !this.isShowingMoreParticipants;
}, },
onClickCollapsedIcon() {
this.$emit('toggleSidebar');
},
}, },
}; };
</script> </script>
...@@ -82,6 +85,7 @@ ...@@ -82,6 +85,7 @@
data-container="body" data-container="body"
data-placement="left" data-placement="left"
:title="participantLabel" :title="participantLabel"
@click="onClickCollapsedIcon"
> >
<i <i
class="fa fa-users" class="fa fa-users"
......
<script> <script>
import Store from '../../stores/sidebar_store'; import Store from '../../stores/sidebar_store';
import eventHub from '../../event_hub';
import Flash from '../../../flash'; import Flash from '../../../flash';
import { __ } from '../../../locale'; import { __ } from '../../../locale';
import subscriptions from './subscriptions.vue'; import subscriptions from './subscriptions.vue';
...@@ -20,12 +19,6 @@ export default { ...@@ -20,12 +19,6 @@ export default {
store: new Store(), store: new Store(),
}; };
}, },
created() {
eventHub.$on('toggleSubscription', this.onToggleSubscription);
},
beforeDestroy() {
eventHub.$off('toggleSubscription', this.onToggleSubscription);
},
methods: { methods: {
onToggleSubscription() { onToggleSubscription() {
this.mediator.toggleSubscription() this.mediator.toggleSubscription()
...@@ -42,6 +35,7 @@ export default { ...@@ -42,6 +35,7 @@ export default {
<subscriptions <subscriptions
:loading="store.isFetching.subscriptions" :loading="store.isFetching.subscriptions"
:subscribed="store.subscribed" :subscribed="store.subscribed"
@toggleSubscription="onToggleSubscription"
/> />
</div> </div>
</template> </template>
...@@ -47,8 +47,25 @@ ...@@ -47,8 +47,25 @@
}, },
}, },
methods: { methods: {
/**
* We need to emit this event on both component & eventHub
* for 2 dependencies;
*
* 1. eventHub: This component is used in Issue Boards sidebar
* where component template is part of HAML
* and event listeners are tied to app's eventHub.
* 2. Component: This compone is also used in Epics in EE
* where listeners are tied to component event.
*/
toggleSubscription() { toggleSubscription() {
// App's eventHub event emission.
eventHub.$emit('toggleSubscription', this.id); eventHub.$emit('toggleSubscription', this.id);
// Component event emission.
this.$emit('toggleSubscription', this.id);
},
onClickCollapsedIcon() {
this.$emit('toggleSidebar');
}, },
}, },
}; };
...@@ -56,7 +73,10 @@ ...@@ -56,7 +73,10 @@
<template> <template>
<div> <div>
<div class="sidebar-collapsed-icon"> <div
class="sidebar-collapsed-icon"
@click="onClickCollapsedIcon"
>
<span <span
v-tooltip v-tooltip
:title="notificationTooltip" :title="notificationTooltip"
......
<script>
export default { export default {
name: 'time-tracking-spent-only-pane', name: 'TimeTrackingSpentOnlyPane',
props: { props: {
timeSpentHumanReadable: { timeSpentHumanReadable: {
type: String, type: String,
required: true, required: true,
}, },
}, },
template: ` };
</script>
<template>
<div class="time-tracking-spend-only-pane"> <div class="time-tracking-spend-only-pane">
<span class="bold">Spent:</span> <span class="bold">Spent:</span>
{{ timeSpentHumanReadable }} {{ timeSpentHumanReadable }}
</div> </div>
`, </template>
};
<script> <script>
import TimeTrackingHelpState from './help_state.vue'; import TimeTrackingHelpState from './help_state.vue';
import TimeTrackingCollapsedState from './collapsed_state.vue'; import TimeTrackingCollapsedState from './collapsed_state.vue';
import timeTrackingSpentOnlyPane from './spent_only_pane'; import TimeTrackingSpentOnlyPane from './spent_only_pane.vue';
import TimeTrackingNoTrackingPane from './no_tracking_pane.vue'; import TimeTrackingNoTrackingPane from './no_tracking_pane.vue';
import TimeTrackingEstimateOnlyPane from './estimate_only_pane.vue'; import TimeTrackingEstimateOnlyPane from './estimate_only_pane.vue';
import TimeTrackingComparisonPane from './comparison_pane.vue'; import TimeTrackingComparisonPane from './comparison_pane.vue';
...@@ -13,7 +13,7 @@ export default { ...@@ -13,7 +13,7 @@ export default {
components: { components: {
TimeTrackingCollapsedState, TimeTrackingCollapsedState,
TimeTrackingEstimateOnlyPane, TimeTrackingEstimateOnlyPane,
'time-tracking-spent-only-pane': timeTrackingSpentOnlyPane, TimeTrackingSpentOnlyPane,
TimeTrackingNoTrackingPane, TimeTrackingNoTrackingPane,
TimeTrackingComparisonPane, TimeTrackingComparisonPane,
TimeTrackingHelpState, TimeTrackingHelpState,
......
...@@ -317,6 +317,7 @@ ...@@ -317,6 +317,7 @@
a { a {
color: $gl-text-color; color: $gl-text-color;
word-wrap: break-word; word-wrap: break-word;
word-break: break-word;
margin-right: 2px; margin-right: 2px;
} }
} }
...@@ -462,6 +463,7 @@ ...@@ -462,6 +463,7 @@
.issuable-header-text { .issuable-header-text {
padding-right: 35px; padding-right: 35px;
word-break: break-word;
> strong { > strong {
font-weight: $gl-font-weight-bold; font-weight: $gl-font-weight-bold;
......
...@@ -44,6 +44,12 @@ ...@@ -44,6 +44,12 @@
} }
} }
.note-text {
table {
font-family: $font-family-sans-serif;
}
}
table { table {
width: 100%; width: 100%;
font-family: $monospace_font; font-family: $monospace_font;
......
...@@ -549,6 +549,7 @@ ...@@ -549,6 +549,7 @@
margin-bottom: 0; margin-bottom: 0;
border-bottom: 1px solid $white-dark; border-bottom: 1px solid $white-dark;
padding: $gl-btn-padding 0; padding: $gl-btn-padding 0;
min-height: 56px;
} }
.multi-file-commit-panel-header-title { .multi-file-commit-panel-header-title {
...@@ -602,14 +603,14 @@ ...@@ -602,14 +603,14 @@
} }
} }
.multi-file-additions, .multi-file-addition,
.multi-file-additions-solid { .multi-file-addition-solid {
fill: $green-500; color: $green-500;
} }
.multi-file-modified, .multi-file-modified,
.multi-file-modified-solid { .multi-file-modified-solid {
fill: $orange-500; color: $orange-500;
} }
.multi-file-commit-list-collapsed { .multi-file-commit-list-collapsed {
...@@ -673,6 +674,24 @@ ...@@ -673,6 +674,24 @@
} }
} }
.multi-file-commit-panel-bottom {
position: relative;
.multi-file-commit-panel-success-message {
position: absolute;
top: 1px;
left: 3px;
bottom: 0;
right: 0;
z-index: 10;
background: $gray-light;
overflow: auto;
display: flex;
flex-direction: column;
justify-content: center;
}
}
.dirty-diff { .dirty-diff {
// !important need to override monaco inline style // !important need to override monaco inline style
width: 4px !important; width: 4px !important;
...@@ -972,6 +991,12 @@ ...@@ -972,6 +991,12 @@
resize: none; resize: none;
} }
.ide-tree-changes {
display: flex;
align-items: center;
font-size: 12px;
}
.ide-new-modal-label { .ide-new-modal-label {
line-height: 34px; line-height: 34px;
} }
...@@ -59,11 +59,14 @@ module Ci ...@@ -59,11 +59,14 @@ module Ci
end end
def truncate(offset = 0) def truncate(offset = 0)
self.append("", offset) if offset < size raise ArgumentError, 'Offset is out of range' if offset > size || offset < 0
return if offset == size # Skip the following process as it doesn't affect anything
self.append("", offset)
end end
def append(new_data, offset) def append(new_data, offset)
raise ArgumentError, 'Offset is out of range' if offset > data.bytesize || offset < 0 raise ArgumentError, 'Offset is out of range' if offset > size || offset < 0
raise ArgumentError, 'Chunk size overflow' if CHUNK_SIZE < (offset + new_data.bytesize) raise ArgumentError, 'Chunk size overflow' if CHUNK_SIZE < (offset + new_data.bytesize)
set_data(data.byteslice(0, offset) + new_data) set_data(data.byteslice(0, offset) + new_data)
...@@ -130,7 +133,7 @@ module Ci ...@@ -130,7 +133,7 @@ module Ci
def schedule_to_db def schedule_to_db
return if db? return if db?
BuildTraceChunkFlushToDbWorker.perform_async(id) Ci::BuildTraceChunkFlushWorker.perform_async(id)
end end
def fullfilled? def fullfilled?
......
class Identity < ActiveRecord::Base class Identity < ActiveRecord::Base
def self.uniqueness_scope
:provider
end
include Sortable include Sortable
include CaseSensitivity include CaseSensitivity
belongs_to :user belongs_to :user
validates :provider, presence: true validates :provider, presence: true
validates :extern_uid, allow_blank: true, uniqueness: { scope: :provider, case_sensitive: false } validates :extern_uid, allow_blank: true, uniqueness: { scope: uniqueness_scope, case_sensitive: false }
validates :user_id, uniqueness: { scope: :provider } validates :user_id, uniqueness: { scope: uniqueness_scope }
before_save :ensure_normalized_extern_uid, if: :extern_uid_changed? before_save :ensure_normalized_extern_uid, if: :extern_uid_changed?
after_destroy :clear_user_synced_attributes, if: :user_synced_attributes_metadata_from_provider? after_destroy :clear_user_synced_attributes, if: :user_synced_attributes_metadata_from_provider?
......
...@@ -323,7 +323,7 @@ class MergeRequest < ActiveRecord::Base ...@@ -323,7 +323,7 @@ class MergeRequest < ActiveRecord::Base
# updates `merge_jid` with the MergeWorker#jid. # updates `merge_jid` with the MergeWorker#jid.
# This helps tracking enqueued and ongoing merge jobs. # This helps tracking enqueued and ongoing merge jobs.
def merge_async(user_id, params) def merge_async(user_id, params)
jid = MergeWorker.perform_async(id, user_id, params) jid = MergeWorker.perform_async(id, user_id, params.to_h)
update_column(:merge_jid, jid) update_column(:merge_jid, jid)
end end
......
...@@ -35,7 +35,7 @@ ...@@ -35,7 +35,7 @@
= link_to avatar_icon_for_user(@user, 400), target: '_blank', rel: 'noopener noreferrer' do = link_to avatar_icon_for_user(@user, 400), target: '_blank', rel: 'noopener noreferrer' do
= image_tag avatar_icon_for_user(@user, 90), class: "avatar s90", alt: '' = image_tag avatar_icon_for_user(@user, 90), class: "avatar s90", alt: ''
.user-info .user-info.prepend-left-default.append-right-default
.cover-title .cover-title
= @user.name = @user.name
......
...@@ -66,7 +66,7 @@ ...@@ -66,7 +66,7 @@
- pipeline_processing:pipeline_update - pipeline_processing:pipeline_update
- pipeline_processing:stage_update - pipeline_processing:stage_update
- pipeline_processing:update_head_pipeline_for_merge_request - pipeline_processing:update_head_pipeline_for_merge_request
- pipeline_processing:build_trace_chunk_flush_to_db - pipeline_processing:ci_build_trace_chunk_flush
- repository_check:repository_check_clear - repository_check:repository_check_clear
- repository_check:repository_check_single_repository - repository_check:repository_check_single_repository
......
class BuildTraceChunkFlushToDbWorker
include ApplicationWorker
include PipelineQueue
queue_namespace :pipeline_processing
def perform(build_trace_chunk_id)
Ci::BuildTraceChunk.find_by(id: build_trace_chunk_id).try do |build_trace_chunk|
build_trace_chunk.use_database!
end
end
end
module Ci
class BuildTraceChunkFlushWorker
include ApplicationWorker
queue_namespace :pipeline_processing
def perform(build_trace_chunk_id)
::Ci::BuildTraceChunk.find_by(id: build_trace_chunk_id).try do |build_trace_chunk|
build_trace_chunk.use_database!
end
end
end
end
---
title: Add loading icon padding for pipeline environments
merge_request: 18631
author: George Tsiolis
type: fixed
---
title: Add padding to profile description
merge_request: 18663
author: George Tsiolis
type: changed
---
title: Break issue title for board card title and issuable header text
merge_request: 18674
author: George Tsiolis
type: changed
---
title: Output some useful information when running the rails console
merge_request: 18697
author:
type: added
---
title: Change font for tables inside diff discussions
merge_request: 18660
author: George Tsiolis
type: changed
---
title: Add documentation about how to use variables to define deploy policies for
staging/production environments
merge_request: 18675
author:
type: other
---
title: Improve interaction on WebIDE commit panel
merge_request:
author:
type: changed
---
title: Move TimeTrackingSpentOnlyPane vue component
merge_request: 18710
author: George Tsiolis
type: performance
---
title: Update environment item action buttons icons
merge_request: 18632
author: George Tsiolis
type: changed
---
title: Gitaly handles repository forks by default
merge_request:
author:
type: other
# rubocop:disable Rails/Output
if defined?(Rails::Console)
# note that this will not print out when using `spring`
justify = 15
puts "-------------------------------------------------------------------------------------"
puts " Gitlab:".ljust(justify) + "#{Gitlab::VERSION} (#{Gitlab::REVISION})"
puts " Gitlab Shell:".ljust(justify) + Gitlab::Shell.new.version
puts " #{Gitlab::Database.adapter_name}:".ljust(justify) + Gitlab::Database.version
puts "-------------------------------------------------------------------------------------"
end
...@@ -22,3 +22,16 @@ end.compact ...@@ -22,3 +22,16 @@ end.compact
Rails.application.config.action_dispatch.trusted_proxies = ( Rails.application.config.action_dispatch.trusted_proxies = (
['127.0.0.1', '::1'] + gitlab_trusted_proxies) ['127.0.0.1', '::1'] + gitlab_trusted_proxies)
# A monkey patch to make trusted proxies work with Rails 5.0.
# Inspired by https://github.com/rails/rails/issues/5223#issuecomment-263778719
# Remove this monkey patch when upstream is fixed.
if Gitlab.rails5?
module TrustedProxyMonkeyPatch
def ip
@ip ||= (get_header("action_dispatch.remote_ip") || super).to_s
end
end
ActionDispatch::Request.send(:include, TrustedProxyMonkeyPatch)
end
...@@ -495,6 +495,7 @@ also be customized, and you can easily use a [custom buildpack](#custom-buildpac ...@@ -495,6 +495,7 @@ also be customized, and you can easily use a [custom buildpack](#custom-buildpac
| `POSTGRES_PASSWORD` | The PostgreSQL password; defaults to `testing-password`. Set it to use a custom password. | | `POSTGRES_PASSWORD` | The PostgreSQL password; defaults to `testing-password`. Set it to use a custom password. |
| `POSTGRES_DB` | The PostgreSQL database name; defaults to the value of [`$CI_ENVIRONMENT_SLUG`](../../ci/variables/README.md#predefined-variables-environment-variables). Set it to use a custom database name. | | `POSTGRES_DB` | The PostgreSQL database name; defaults to the value of [`$CI_ENVIRONMENT_SLUG`](../../ci/variables/README.md#predefined-variables-environment-variables). Set it to use a custom database name. |
| `BUILDPACK_URL` | The buildpack's full URL. It can point to either Git repositories or a tarball URL. For Git repositories, it is possible to point to a specific `ref`, for example `https://github.com/heroku/heroku-buildpack-ruby.git#v142` | | `BUILDPACK_URL` | The buildpack's full URL. It can point to either Git repositories or a tarball URL. For Git repositories, it is possible to point to a specific `ref`, for example `https://github.com/heroku/heroku-buildpack-ruby.git#v142` |
| `STAGING_ENABLED` | From GitLab 10.8, this variable can be used to define a [deploy policy for staging and production environments](#deploy-policy-for-staging-and-production-environments). |
TIP: **Tip:** TIP: **Tip:**
Set up the replica variables using a Set up the replica variables using a
...@@ -561,6 +562,22 @@ service: ...@@ -561,6 +562,22 @@ service:
internalPort: 5000 internalPort: 5000
``` ```
#### Deploy policy for staging and production environments
> [Introduced](https://gitlab.com/gitlab-org/gitlab-ci-yml/merge_requests/160)
in GitLab 10.8.
The normal behavior of Auto DevOps is to use Continuous Deployment, pushing
automatically to the `production` environment every time a new pipeline is run
on the default branch. However, there are cases where you might want to use a
staging environment and deploy to production manually. For this scenario, the
`STAGING_ENABLED` environment variable was introduced.
If `STAGING_ENABLED` is defined in your project (e.g., set `STAGING_ENABLED` to
`1` as a secret variable), then the application will be automatically deployed
to a `staging` environment, and a `production_manual` job will be created for
you when you're ready to manually deploy to production.
## Currently supported languages ## Currently supported languages
NOTE: **Note:** NOTE: **Note:**
......
...@@ -5,10 +5,6 @@ module SharedGroup ...@@ -5,10 +5,6 @@ module SharedGroup
is_member_of(current_user.name, "Owned", Gitlab::Access::DEVELOPER) is_member_of(current_user.name, "Owned", Gitlab::Access::DEVELOPER)
end end
step '"John Doe" is owner of group "Owned"' do
is_member_of("John Doe", "Owned", Gitlab::Access::OWNER)
end
step '"John Doe" is guest of group "Guest"' do step '"John Doe" is guest of group "Guest"' do
is_member_of("John Doe", "Guest", Gitlab::Access::GUEST) is_member_of("John Doe", "Guest", Gitlab::Access::GUEST)
end end
......
...@@ -48,10 +48,6 @@ module SharedPaths ...@@ -48,10 +48,6 @@ module SharedPaths
visit group_group_members_path(Group.find_by(name: "Owned")) visit group_group_members_path(Group.find_by(name: "Owned"))
end end
step 'I visit group "Owned" settings page' do
visit edit_group_path(Group.find_by(name: "Owned"))
end
step 'I visit group "Owned" projects page' do step 'I visit group "Owned" projects page' do
visit projects_group_path(Group.find_by(name: "Owned")) visit projects_group_path(Group.find_by(name: "Owned"))
end end
......
...@@ -149,11 +149,11 @@ module API ...@@ -149,11 +149,11 @@ module API
expose_url(api_v4_projects_path(id: project.id)) expose_url(api_v4_projects_path(id: project.id))
end end
expose :issues, if: -> (*args) { issues_available?(*args) } do |project| expose :issues, if: -> (project, options) { issues_available?(project, options) } do |project|
expose_url(api_v4_projects_issues_path(id: project.id)) expose_url(api_v4_projects_issues_path(id: project.id))
end end
expose :merge_requests, if: -> (*args) { mrs_available?(*args) } do |project| expose :merge_requests, if: -> (project, options) { mrs_available?(project, options) } do |project|
expose_url(api_v4_projects_merge_requests_path(id: project.id)) expose_url(api_v4_projects_merge_requests_path(id: project.id))
end end
......
...@@ -63,7 +63,8 @@ module Gitlab ...@@ -63,7 +63,8 @@ module Gitlab
end end
def fork_repository(new_shard_name, new_repository_relative_path) def fork_repository(new_shard_name, new_repository_relative_path)
Gitlab::GitalyClient.migrate(:fork_repository) do |is_enabled| Gitlab::GitalyClient.migrate(:fork_repository,
status: Gitlab::GitalyClient::MigrationStatus::OPT_OUT) do |is_enabled|
if is_enabled if is_enabled
gitaly_fork_repository(new_shard_name, new_repository_relative_path) gitaly_fork_repository(new_shard_name, new_repository_relative_path)
else else
......
...@@ -161,6 +161,7 @@ feature 'Issues > User uses quick actions', :js do ...@@ -161,6 +161,7 @@ feature 'Issues > User uses quick actions', :js do
before do before do
target_project.add_master(user) target_project.add_master(user)
gitlab_sign_out
sign_in(user) sign_in(user)
visit project_issue_path(project, issue) visit project_issue_path(project, issue)
end end
...@@ -220,6 +221,7 @@ feature 'Issues > User uses quick actions', :js do ...@@ -220,6 +221,7 @@ feature 'Issues > User uses quick actions', :js do
before do before do
target_project.add_master(user) target_project.add_master(user)
gitlab_sign_out
sign_in(user) sign_in(user)
visit project_issue_path(project, issue) visit project_issue_path(project, issue)
end end
......
...@@ -24,42 +24,10 @@ describe('IDE commit panel empty state', () => { ...@@ -24,42 +24,10 @@ describe('IDE commit panel empty state', () => {
resetStore(vm.$store); resetStore(vm.$store);
}); });
describe('statusSvg', () => {
it('uses noChangesStateSvgPath when commit message is empty', () => {
expect(vm.statusSvg).toBe('no-changes');
expect(vm.$el.querySelector('img').getAttribute('src')).toBe(
'no-changes',
);
});
it('uses committedStateSvgPath when commit message exists', done => {
vm.$store.state.lastCommitMsg = 'testing';
Vue.nextTick(() => {
expect(vm.statusSvg).toBe('committed-state');
expect(vm.$el.querySelector('img').getAttribute('src')).toBe(
'committed-state',
);
done();
});
});
});
it('renders no changes text when last commit message is empty', () => { it('renders no changes text when last commit message is empty', () => {
expect(vm.$el.textContent).toContain('No changes'); expect(vm.$el.textContent).toContain('No changes');
}); });
it('renders last commit message when it exists', done => {
vm.$store.state.lastCommitMsg = 'testing commit message';
Vue.nextTick(() => {
expect(vm.$el.textContent).toContain('testing commit message');
done();
});
});
describe('toggle button', () => { describe('toggle button', () => {
it('calls store action', () => { it('calls store action', () => {
spyOn(vm, 'toggleRightPanelCollapsed'); spyOn(vm, 'toggleRightPanelCollapsed');
......
import Vue from 'vue';
import store from '~/ide/stores';
import successMessage from '~/ide/components/commit_sidebar/success_message.vue';
import { createComponentWithStore } from '../../../helpers/vue_mount_component_helper';
import { resetStore } from '../../helpers';
describe('IDE commit panel successful commit state', () => {
let vm;
beforeEach(() => {
const Component = Vue.extend(successMessage);
vm = createComponentWithStore(Component, store, {
committedStateSvgPath: 'committed-state',
});
vm.$mount();
});
afterEach(() => {
vm.$destroy();
resetStore(vm.$store);
});
it('renders last commit message when it exists', done => {
vm.$store.state.lastCommitMsg = 'testing commit message';
Vue.nextTick(() => {
expect(vm.$el.textContent).toContain('testing commit message');
done();
});
});
});
...@@ -48,6 +48,33 @@ describe('RepoFile', () => { ...@@ -48,6 +48,33 @@ describe('RepoFile', () => {
}); });
}); });
describe('folder', () => {
it('renders changes count inside folder', () => {
const f = {
...file('folder'),
path: 'testing',
type: 'tree',
branchId: 'master',
projectId: 'project',
};
store.state.changedFiles.push({
...file('fileName'),
path: 'testing/fileName',
});
createComponent({
file: f,
level: 0,
});
const treeChangesEl = vm.$el.querySelector('.ide-tree-changes');
expect(treeChangesEl).not.toBeNull();
expect(treeChangesEl.textContent).toContain('1');
});
});
describe('locked file', () => { describe('locked file', () => {
let f; let f;
...@@ -72,8 +99,7 @@ describe('RepoFile', () => { ...@@ -72,8 +99,7 @@ describe('RepoFile', () => {
it('renders a tooltip', () => { it('renders a tooltip', () => {
expect( expect(
vm.$el.querySelector('.ide-file-name span:nth-child(2)').dataset vm.$el.querySelector('.ide-file-name span:nth-child(2)').dataset.originalTitle,
.originalTitle,
).toContain('Locked by testuser'); ).toContain('Locked by testuser');
}); });
}); });
......
...@@ -398,6 +398,20 @@ describe('IDE store file actions', () => { ...@@ -398,6 +398,20 @@ describe('IDE store file actions', () => {
}) })
.catch(done.fail); .catch(done.fail);
}); });
it('bursts unused seal', done => {
store
.dispatch('changeFileContent', {
path: tmpFile.path,
content: 'content',
})
.then(() => {
expect(store.state.unusedSeal).toBe(false);
done();
})
.catch(done.fail);
});
}); });
describe('discardFileChanges', () => { describe('discardFileChanges', () => {
......
...@@ -84,4 +84,67 @@ describe('IDE store getters', () => { ...@@ -84,4 +84,67 @@ describe('IDE store getters', () => {
expect(getters.allBlobs(localState)[0].name).toBe('blob'); expect(getters.allBlobs(localState)[0].name).toBe('blob');
}); });
}); });
describe('getChangesInFolder', () => {
it('returns length of changed files for a path', () => {
localState.changedFiles.push(
{
path: 'test/index',
name: 'index',
},
{
path: 'app/123',
name: '123',
},
);
expect(getters.getChangesInFolder(localState)('test')).toBe(1);
});
it('returns length of changed & staged files for a path', () => {
localState.changedFiles.push(
{
path: 'test/index',
name: 'index',
},
{
path: 'testing/123',
name: '123',
},
);
localState.stagedFiles.push(
{
path: 'test/123',
name: '123',
},
{
path: 'test/index',
name: 'index',
},
{
path: 'testing/12345',
name: '12345',
},
);
expect(getters.getChangesInFolder(localState)('test')).toBe(2);
});
it('returns length of changed & tempFiles files for a path', () => {
localState.changedFiles.push(
{
path: 'test/index',
name: 'index',
},
{
path: 'test/newfile',
name: 'newfile',
tempFile: true,
},
);
expect(getters.getChangesInFolder(localState)('test')).toBe(2);
});
});
}); });
...@@ -116,4 +116,14 @@ describe('Multi-file store mutations', () => { ...@@ -116,4 +116,14 @@ describe('Multi-file store mutations', () => {
expect(localState.fileFindVisible).toBe(true); expect(localState.fileFindVisible).toBe(true);
}); });
}); });
describe('BURST_UNUSED_SEAL', () => {
it('updates unusedSeal', () => {
expect(localState.unusedSeal).toBe(true);
mutations.BURST_UNUSED_SEAL(localState);
expect(localState.unusedSeal).toBe(false);
});
});
}); });
...@@ -75,6 +75,14 @@ describe('text_utility', () => { ...@@ -75,6 +75,14 @@ describe('text_utility', () => {
'This is a text with html .', 'This is a text with html .',
); );
}); });
it('passes through with null string input', () => {
expect(textUtils.stripHtml(null, ' ')).toEqual(null);
});
it('passes through with undefined string input', () => {
expect(textUtils.stripHtml(undefined, ' ')).toEqual(undefined);
});
}); });
describe('convertToCamelCase', () => { describe('convertToCamelCase', () => {
......
...@@ -170,5 +170,19 @@ describe('Participants', function () { ...@@ -170,5 +170,19 @@ describe('Participants', function () {
expect(vm.isShowingMoreParticipants).toBe(true); expect(vm.isShowingMoreParticipants).toBe(true);
}); });
it('clicking on participants icon emits `toggleSidebar` event', () => {
vm = mountComponent(Participants, {
loading: false,
participants: PARTICIPANT_LIST,
numberOfLessParticipants: 2,
});
spyOn(vm, '$emit');
const participantsIconEl = vm.$el.querySelector('.sidebar-collapsed-icon');
participantsIconEl.click();
expect(vm.$emit).toHaveBeenCalledWith('toggleSidebar');
});
}); });
}); });
...@@ -3,7 +3,6 @@ import sidebarSubscriptions from '~/sidebar/components/subscriptions/sidebar_sub ...@@ -3,7 +3,6 @@ import sidebarSubscriptions from '~/sidebar/components/subscriptions/sidebar_sub
import SidebarMediator from '~/sidebar/sidebar_mediator'; import SidebarMediator from '~/sidebar/sidebar_mediator';
import SidebarService from '~/sidebar/services/sidebar_service'; import SidebarService from '~/sidebar/services/sidebar_service';
import SidebarStore from '~/sidebar/stores/sidebar_store'; import SidebarStore from '~/sidebar/stores/sidebar_store';
import eventHub from '~/sidebar/event_hub';
import mountComponent from 'spec/helpers/vue_mount_component_helper'; import mountComponent from 'spec/helpers/vue_mount_component_helper';
import Mock from './mock_data'; import Mock from './mock_data';
...@@ -32,7 +31,7 @@ describe('Sidebar Subscriptions', function () { ...@@ -32,7 +31,7 @@ describe('Sidebar Subscriptions', function () {
mediator, mediator,
}); });
eventHub.$emit('toggleSubscription'); vm.onToggleSubscription();
expect(mediator.toggleSubscription).toHaveBeenCalled(); expect(mediator.toggleSubscription).toHaveBeenCalled();
}); });
......
import Vue from 'vue'; import Vue from 'vue';
import subscriptions from '~/sidebar/components/subscriptions/subscriptions.vue'; import subscriptions from '~/sidebar/components/subscriptions/subscriptions.vue';
import eventHub from '~/sidebar/event_hub';
import mountComponent from 'spec/helpers/vue_mount_component_helper'; import mountComponent from 'spec/helpers/vue_mount_component_helper';
describe('Subscriptions', function () { describe('Subscriptions', function () {
...@@ -39,4 +40,22 @@ describe('Subscriptions', function () { ...@@ -39,4 +40,22 @@ describe('Subscriptions', function () {
expect(vm.$refs.toggleButton.$el.querySelector('.project-feature-toggle')).toHaveClass('is-checked'); expect(vm.$refs.toggleButton.$el.querySelector('.project-feature-toggle')).toHaveClass('is-checked');
}); });
it('toggleSubscription method emits `toggleSubscription` event on eventHub and Component', () => {
vm = mountComponent(Subscriptions, { subscribed: true });
spyOn(eventHub, '$emit');
spyOn(vm, '$emit');
vm.toggleSubscription();
expect(eventHub.$emit).toHaveBeenCalledWith('toggleSubscription', jasmine.any(Object));
expect(vm.$emit).toHaveBeenCalledWith('toggleSubscription', jasmine.any(Object));
});
it('onClickCollapsedIcon method emits `toggleSidebar` event on component', () => {
vm = mountComponent(Subscriptions, { subscribed: true });
spyOn(vm, '$emit');
vm.onClickCollapsedIcon();
expect(vm.$emit).toHaveBeenCalledWith('toggleSidebar');
});
}); });
...@@ -4,678 +4,24 @@ describe Gitlab::Ci::Trace, :clean_gitlab_redis_cache do ...@@ -4,678 +4,24 @@ describe Gitlab::Ci::Trace, :clean_gitlab_redis_cache do
let(:build) { create(:ci_build) } let(:build) { create(:ci_build) }
let(:trace) { described_class.new(build) } let(:trace) { described_class.new(build) }
before do
stub_feature_flags(ci_enable_live_trace: true)
end
describe "associations" do describe "associations" do
it { expect(trace).to respond_to(:job) } it { expect(trace).to respond_to(:job) }
it { expect(trace).to delegate_method(:old_trace).to(:job) } it { expect(trace).to delegate_method(:old_trace).to(:job) }
end end
describe '#html' do context 'when live trace feature is disabled' do
before do
trace.set("12\n34")
end
it "returns formatted html" do
expect(trace.html).to eq("12<br>34")
end
it "returns last line of formatted html" do
expect(trace.html(last_lines: 1)).to eq("34")
end
end
describe '#raw' do
before do
trace.set("12\n34")
end
it "returns raw output" do
expect(trace.raw).to eq("12\n34")
end
it "returns last line of raw output" do
expect(trace.raw(last_lines: 1)).to eq("34")
end
end
describe '#extract_coverage' do
let(:regex) { '\(\d+.\d+\%\) covered' }
context 'matching coverage' do
before do
trace.set('Coverage 1033 / 1051 LOC (98.29%) covered')
end
it "returns valid coverage" do
expect(trace.extract_coverage(regex)).to eq("98.29")
end
end
context 'no coverage' do
before do
trace.set('No coverage')
end
it 'returs nil' do
expect(trace.extract_coverage(regex)).to be_nil
end
end
end
describe '#extract_sections' do
let(:log) { 'No sections' }
let(:sections) { trace.extract_sections }
before do
trace.set(log)
end
context 'no sections' do
it 'returs []' do
expect(trace.extract_sections).to eq([])
end
end
context 'multiple sections available' do
let(:log) { File.read(expand_fixture_path('trace/trace_with_sections')) }
let(:sections_data) do
[
{ name: 'prepare_script', lines: 2, duration: 3.seconds },
{ name: 'get_sources', lines: 4, duration: 1.second },
{ name: 'restore_cache', lines: 0, duration: 0.seconds },
{ name: 'download_artifacts', lines: 0, duration: 0.seconds },
{ name: 'build_script', lines: 2, duration: 1.second },
{ name: 'after_script', lines: 0, duration: 0.seconds },
{ name: 'archive_cache', lines: 0, duration: 0.seconds },
{ name: 'upload_artifacts', lines: 0, duration: 0.seconds }
]
end
it "returns valid sections" do
expect(sections).not_to be_empty
expect(sections.size).to eq(sections_data.size),
"expected #{sections_data.size} sections, got #{sections.size}"
buff = StringIO.new(log)
sections.each_with_index do |s, i|
expected = sections_data[i]
expect(s[:name]).to eq(expected[:name])
expect(s[:date_end] - s[:date_start]).to eq(expected[:duration])
buff.seek(s[:byte_start], IO::SEEK_SET)
length = s[:byte_end] - s[:byte_start]
lines = buff.read(length).count("\n")
expect(lines).to eq(expected[:lines])
end
end
end
context 'logs contains "section_start"' do
let(:log) { "section_start:1506417476:a_section\r\033[0Klooks like a section_start:invalid\nsection_end:1506417477:a_section\r\033[0K"}
it "returns only one section" do
expect(sections).not_to be_empty
expect(sections.size).to eq(1)
section = sections[0]
expect(section[:name]).to eq('a_section')
expect(section[:byte_start]).not_to eq(section[:byte_end]), "got an empty section"
end
end
context 'missing section_end' do
let(:log) { "section_start:1506417476:a_section\r\033[0KSome logs\nNo section_end\n"}
it "returns no sections" do
expect(sections).to be_empty
end
end
context 'missing section_start' do
let(:log) { "Some logs\nNo section_start\nsection_end:1506417476:a_section\r\033[0K"}
it "returns no sections" do
expect(sections).to be_empty
end
end
context 'inverted section_start section_end' do
let(:log) { "section_end:1506417476:a_section\r\033[0Klooks like a section_start:invalid\nsection_start:1506417477:a_section\r\033[0K"}
it "returns no sections" do
expect(sections).to be_empty
end
end
end
describe '#set' do
before do
trace.set("12")
end
it "returns trace" do
expect(trace.raw).to eq("12")
end
context 'overwrite trace' do
before do
trace.set("34")
end
it "returns new trace" do
expect(trace.raw).to eq("34")
end
end
context 'runners token' do
let(:token) { 'my_secret_token' }
before do
build.project.update(runners_token: token)
trace.set(token)
end
it "hides token" do
expect(trace.raw).not_to include(token)
end
end
context 'hides build token' do
let(:token) { 'my_secret_token' }
before do
build.update(token: token)
trace.set(token)
end
it "hides token" do
expect(trace.raw).not_to include(token)
end
end
end
describe '#append' do
before do
trace.set("1234")
end
it "returns correct trace" do
expect(trace.append("56", 4)).to eq(6)
expect(trace.raw).to eq("123456")
end
context 'tries to append trace at different offset' do
it "fails with append" do
expect(trace.append("56", 2)).to eq(-4)
expect(trace.raw).to eq("1234")
end
end
context 'runners token' do
let(:token) { 'my_secret_token' }
before do
build.project.update(runners_token: token)
trace.append(token, 0)
end
it "hides token" do
expect(trace.raw).not_to include(token)
end
end
context 'build token' do
let(:token) { 'my_secret_token' }
before do
build.update(token: token)
trace.append(token, 0)
end
it "hides token" do
expect(trace.raw).not_to include(token)
end
end
end
describe '#read' do
shared_examples 'read successfully with IO' do
it 'yields with source' do
trace.read do |stream|
expect(stream).to be_a(Gitlab::Ci::Trace::Stream)
expect(stream.stream).to be_a(IO)
end
end
end
shared_examples 'read successfully with StringIO' do
it 'yields with source' do
trace.read do |stream|
expect(stream).to be_a(Gitlab::Ci::Trace::Stream)
expect(stream.stream).to be_a(StringIO)
end
end
end
shared_examples 'read successfully with ChunkedIO' do
it 'yields with source' do
trace.read do |stream|
expect(stream).to be_a(Gitlab::Ci::Trace::Stream)
expect(stream.stream).to be_a(Gitlab::Ci::Trace::ChunkedIO)
end
end
end
shared_examples 'failed to read' do
it 'yields without source' do
trace.read do |stream|
expect(stream).to be_a(Gitlab::Ci::Trace::Stream)
expect(stream.stream).to be_nil
end
end
end
context 'when trace artifact exists' do
before do
create(:ci_job_artifact, :trace, job: build)
end
it_behaves_like 'read successfully with IO'
end
context 'when current_path (with project_id) exists' do
before do
expect(trace).to receive(:default_path) { expand_fixture_path('trace/sample_trace') }
end
it_behaves_like 'read successfully with IO'
end
context 'when current_path (with project_ci_id) exists' do
before do
expect(trace).to receive(:deprecated_path) { expand_fixture_path('trace/sample_trace') }
end
it_behaves_like 'read successfully with IO'
end
context 'when db trace exists' do
before do
build.send(:write_attribute, :trace, "data")
end
it_behaves_like 'read successfully with StringIO'
end
context 'when live trace exists' do
before do
Gitlab::Ci::Trace::ChunkedIO.new(build) do |stream|
stream.write('abc')
end
end
it_behaves_like 'read successfully with ChunkedIO'
end
context 'when no sources exist' do
it_behaves_like 'failed to read'
end
end
describe 'trace handling' do
subject { trace.exist? }
context 'trace does not exist' do
it { expect(trace.exist?).to be(false) }
end
context 'when trace artifact exists' do
before do
create(:ci_job_artifact, :trace, job: build)
end
it { is_expected.to be_truthy }
context 'when the trace artifact has been erased' do
before do
trace.erase!
end
it { is_expected.to be_falsy }
it 'removes associations' do
expect(Ci::JobArtifact.exists?(job_id: build.id, file_type: :trace)).to be_falsy
end
end
end
context 'new trace path is used' do
before do
trace.send(:ensure_directory)
File.open(trace.send(:default_path), "w") do |file|
file.write("data")
end
end
it "trace exist" do
expect(trace.exist?).to be(true)
end
it "can be erased" do
trace.erase!
expect(trace.exist?).to be(false)
end
end
context 'deprecated path' do
let(:path) { trace.send(:deprecated_path) }
context 'with valid ci_id' do
before do
build.project.update(ci_id: 1000)
FileUtils.mkdir_p(File.dirname(path))
File.open(path, "w") do |file|
file.write("data")
end
end
it "trace exist" do
expect(trace.exist?).to be(true)
end
it "can be erased" do
trace.erase!
expect(trace.exist?).to be(false)
end
end
context 'without valid ci_id' do
it "does not return deprecated path" do
expect(path).to be_nil
end
end
end
context 'stored in database' do
before do
build.send(:write_attribute, :trace, "data")
end
it "trace exist" do
expect(trace.exist?).to be(true)
end
it "can be erased" do
trace.erase!
expect(trace.exist?).to be(false)
end
it "returns database data" do
expect(trace.raw).to eq("data")
end
end
context 'stored in database' do
before do
Gitlab::Ci::Trace::ChunkedIO.new(build) do |stream|
stream.write('abc')
end
end
it "trace exist" do
expect(trace.exist?).to be(true)
end
it "can be erased" do
trace.erase!
expect(trace.exist?).to be(false)
expect(Ci::BuildTraceChunk.where(build: build)).not_to be_exist
end
it "returns live trace data" do
expect(trace.raw).to eq("abc")
end
end
end
describe '#archive!' do
subject { trace.archive! }
before do
stub_feature_flags(ci_enable_live_trace: false)
end
shared_examples 'archive trace file' do
it do
expect { subject }.to change { Ci::JobArtifact.count }.by(1)
build.reload
expect(build.trace.exist?).to be_truthy
expect(build.job_artifacts_trace.file.exists?).to be_truthy
expect(build.job_artifacts_trace.file.filename).to eq('job.log')
expect(File.exist?(src_path)).to be_falsy
expect(src_checksum)
.to eq(Digest::SHA256.file(build.job_artifacts_trace.file.path).hexdigest)
expect(build.job_artifacts_trace.file_sha256).to eq(src_checksum)
end
end
shared_examples 'source trace file stays intact' do |error:|
it do
expect { subject }.to raise_error(error)
build.reload
expect(build.trace.exist?).to be_truthy
expect(build.job_artifacts_trace).to be_nil
expect(File.exist?(src_path)).to be_truthy
end
end
shared_examples 'archive trace in database' do
it do
expect { subject }.to change { Ci::JobArtifact.count }.by(1)
build.reload
expect(build.trace.exist?).to be_truthy
expect(build.job_artifacts_trace.file.exists?).to be_truthy
expect(build.job_artifacts_trace.file.filename).to eq('job.log')
expect(build.old_trace).to be_nil
expect(src_checksum)
.to eq(Digest::SHA256.file(build.job_artifacts_trace.file.path).hexdigest)
expect(build.job_artifacts_trace.file_sha256).to eq(src_checksum)
end
end
shared_examples 'source trace in database stays intact' do |error:|
it do
expect { subject }.to raise_error(error)
build.reload
expect(build.trace.exist?).to be_truthy
expect(build.job_artifacts_trace).to be_nil
expect(build.old_trace).to eq(trace_content)
end
end
shared_examples 'archive trace file in ChunkedIO' do
it do
expect { subject }.to change { Ci::JobArtifact.count }.by(1)
build.reload
expect(build.trace.exist?).to be_truthy
expect(build.job_artifacts_trace.file.exists?).to be_truthy
expect(build.job_artifacts_trace.file.filename).to eq('job.log')
expect(Ci::BuildTraceChunk.where(build: build)).not_to be_exist
expect(src_checksum)
.to eq(Digest::SHA256.file(build.job_artifacts_trace.file.path).hexdigest)
expect(build.job_artifacts_trace.file_sha256).to eq(src_checksum)
end
end
shared_examples 'source trace in ChunkedIO stays intact' do |error:|
it do
expect { subject }.to raise_error(error)
build.reload
expect(build.trace.exist?).to be_truthy
expect(build.job_artifacts_trace).to be_nil
Gitlab::Ci::Trace::ChunkedIO.new(build) do |stream|
expect(stream.read).to eq(trace_raw)
end
end
end
context 'when job does not have trace artifact' do
context 'when trace file stored in default path' do
let(:build) { create(:ci_build, :success, :trace_live) }
let(:src_path) { trace.read { |s| s.path } }
let(:src_checksum) { Digest::SHA256.file(src_path).hexdigest }
before do
stub_feature_flags(ci_enable_live_trace: false)
build # Initialize after set feature flag
src_path
src_checksum
end
it_behaves_like 'archive trace file'
context 'when failed to create clone file' do
before do
allow(IO).to receive(:copy_stream).and_return(0)
end
it_behaves_like 'source trace file stays intact', error: Gitlab::Ci::Trace::ArchiveError
end
context 'when failed to create job artifact record' do
before do
allow_any_instance_of(Ci::JobArtifact).to receive(:save).and_return(false)
allow_any_instance_of(Ci::JobArtifact).to receive_message_chain(:errors, :full_messages)
.and_return(%w[Error Error])
end
it_behaves_like 'source trace file stays intact', error: ActiveRecord::RecordInvalid
end
end
context 'when trace is stored in database' do
let(:build) { create(:ci_build, :success) }
let(:trace_content) { 'Sample trace' }
let(:src_checksum) { Digest::SHA256.hexdigest(trace_content) }
before do before do
stub_feature_flags(ci_enable_live_trace: false) stub_feature_flags(ci_enable_live_trace: false)
build # Initialize after set feature flag
trace_content
src_checksum
build.update_column(:trace, trace_content)
end end
it_behaves_like 'archive trace in database' it_behaves_like 'trace with disabled live trace feature'
context 'when failed to create clone file' do
before do
allow(IO).to receive(:copy_stream).and_return(0)
end end
it_behaves_like 'source trace in database stays intact', error: Gitlab::Ci::Trace::ArchiveError context 'when live trace feature is enabled' do
end
context 'when failed to create job artifact record' do
before do
allow_any_instance_of(Ci::JobArtifact).to receive(:save).and_return(false)
allow_any_instance_of(Ci::JobArtifact).to receive_message_chain(:errors, :full_messages)
.and_return(%w[Error Error])
end
it_behaves_like 'source trace in database stays intact', error: ActiveRecord::RecordInvalid
end
context 'when there is a validation error on Ci::Build' do
before do
allow_any_instance_of(Ci::Build).to receive(:save).and_return(false)
allow_any_instance_of(Ci::Build).to receive_message_chain(:errors, :full_messages)
.and_return(%w[Error Error])
end
context "when erase old trace with 'save'" do
before do
build.send(:write_attribute, :trace, nil)
build.save
end
it 'old trace is not deleted' do
build.reload
expect(build.trace.raw).to eq(trace_content)
end
end
it_behaves_like 'archive trace in database'
end
end
context 'when trace is stored in ChunkedIO' do
let(:build) { create(:ci_build, :success, :trace_live) }
let(:trace_raw) { build.trace.raw }
let(:src_checksum) { Digest::SHA256.hexdigest(trace_raw) }
before do before do
stub_feature_flags(ci_enable_live_trace: true) stub_feature_flags(ci_enable_live_trace: true)
build # Initialize after set feature flag
trace_raw
src_checksum
end end
it_behaves_like 'archive trace file in ChunkedIO' it_behaves_like 'trace with enabled live trace feature'
context 'when failed to create clone file' do
before do
allow(IO).to receive(:copy_stream).and_return(0)
end
it_behaves_like 'source trace in ChunkedIO stays intact', error: Gitlab::Ci::Trace::ArchiveError
end
context 'when failed to create job artifact record' do
before do
allow_any_instance_of(Ci::JobArtifact).to receive(:save).and_return(false)
allow_any_instance_of(Ci::JobArtifact).to receive_message_chain(:errors, :full_messages)
.and_return(%w[Error Error])
end
it_behaves_like 'source trace in ChunkedIO stays intact', error: ActiveRecord::RecordInvalid
end
end
end
context 'when job has trace artifact' do
before do
create(:ci_job_artifact, :trace, job: build)
end
it 'does not archive' do
expect_any_instance_of(described_class).not_to receive(:archive_stream!)
expect { subject }.to raise_error('Already archived')
expect(build.job_artifacts_trace.file.exists?).to be_truthy
end
end
context 'when job is not finished yet' do
let!(:build) { create(:ci_build, :running, :trace_live) }
it 'does not archive' do
expect_any_instance_of(described_class).not_to receive(:archive_stream!)
expect { subject }.to raise_error('Job is not finished yet')
expect(build.trace.exist?).to be_truthy
end
end
end end
end end
...@@ -75,7 +75,7 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do ...@@ -75,7 +75,7 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
let(:value) { 'a' * described_class::CHUNK_SIZE } let(:value) { 'a' * described_class::CHUNK_SIZE }
it 'schedules stashing data' do it 'schedules stashing data' do
expect(BuildTraceChunkFlushToDbWorker).to receive(:perform_async).once expect(Ci::BuildTraceChunkFlushWorker).to receive(:perform_async).once
subject subject
end end
...@@ -112,7 +112,7 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do ...@@ -112,7 +112,7 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
context 'when fullfilled chunk size' do context 'when fullfilled chunk size' do
it 'does not schedule stashing data' do it 'does not schedule stashing data' do
expect(BuildTraceChunkFlushToDbWorker).not_to receive(:perform_async) expect(Ci::BuildTraceChunkFlushWorker).not_to receive(:perform_async)
subject subject
end end
...@@ -141,11 +141,7 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do ...@@ -141,11 +141,7 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
context 'when offset is bigger than data size' do context 'when offset is bigger than data size' do
let(:offset) { data.bytesize + 1 } let(:offset) { data.bytesize + 1 }
it do it { expect { subject }.to raise_error('Offset is out of range') }
expect_any_instance_of(described_class).not_to receive(:append) { }
subject
end
end end
context 'when offset is 10' do context 'when offset is 10' do
......
...@@ -1213,7 +1213,7 @@ describe MergeRequest do ...@@ -1213,7 +1213,7 @@ describe MergeRequest do
it 'enqueues MergeWorker job and updates merge_jid' do it 'enqueues MergeWorker job and updates merge_jid' do
merge_request = create(:merge_request) merge_request = create(:merge_request)
user_id = double(:user_id) user_id = double(:user_id)
params = double(:params) params = {}
merge_jid = 'hash-123' merge_jid = 'hash-123'
expect(MergeWorker).to receive(:perform_async).with(merge_request.id, user_id, params) do expect(MergeWorker).to receive(:perform_async).with(merge_request.id, user_id, params) do
......
...@@ -2,7 +2,7 @@ require 'spec_helper' ...@@ -2,7 +2,7 @@ require 'spec_helper'
describe API::Runner, :clean_gitlab_redis_shared_state do describe API::Runner, :clean_gitlab_redis_shared_state do
include StubGitlabCalls include StubGitlabCalls
include ChunkedIOHelpers include RedisHelpers
let(:registration_token) { 'abcdefg123456' } let(:registration_token) { 'abcdefg123456' }
...@@ -867,29 +867,51 @@ describe API::Runner, :clean_gitlab_redis_shared_state do ...@@ -867,29 +867,51 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
end end
end end
context 'when redis had an outage' do context 'when trace is patched' do
it "recovers" do before do
# GitLab-Runner patchs
patch_the_trace patch_the_trace
end
it 'has valid trace' do
expect(response.status).to eq(202)
expect(job.reload.trace.raw).to eq 'BUILD TRACE appended appended' expect(job.reload.trace.raw).to eq 'BUILD TRACE appended appended'
end
context 'when redis data are flushed' do
before do
redis_shared_state_cleanup!
end
# GitLab-Rails enxounters an outage on Redis it 'has empty trace' do
redis_shared_state_outage!
expect(job.reload.trace.raw).to eq '' expect(job.reload.trace.raw).to eq ''
end
# GitLab-Runner patchs context 'when we perform partial patch' do
before do
patch_the_trace('hello', headers.merge({ 'Content-Range' => "28-32" })) patch_the_trace('hello', headers.merge({ 'Content-Range' => "28-32" }))
expect(response.status).to eq 202 end
it 'returns an error' do
expect(response.status).to eq(202)
expect(response.header).to have_key 'Range' expect(response.header).to have_key 'Range'
expect(response.header['Range']).to eq '0-0' expect(response.header['Range']).to eq '0-0'
expect(job.reload.trace.raw).to eq '' expect(job.reload.trace.raw).to eq ''
end
end
# GitLab-Runner re-patchs context 'when we resend full trace' do
before do
patch_the_trace('BUILD TRACE appended appended hello') patch_the_trace('BUILD TRACE appended appended hello')
end
it 'succeeds with updating trace' do
expect(response.status).to eq(202)
expect(job.reload.trace.raw).to eq 'BUILD TRACE appended appended hello' expect(job.reload.trace.raw).to eq 'BUILD TRACE appended appended hello'
end end
end end
end end
end
end
context 'when Runner makes a force-patch' do context 'when Runner makes a force-patch' do
before do before do
......
...@@ -86,6 +86,7 @@ RSpec.configure do |config| ...@@ -86,6 +86,7 @@ RSpec.configure do |config|
config.include WaitForRequests, :js config.include WaitForRequests, :js
config.include LiveDebugger, :js config.include LiveDebugger, :js
config.include MigrationsHelpers, :migration config.include MigrationsHelpers, :migration
config.include RedisHelpers
if ENV['CI'] if ENV['CI']
# This includes the first try, i.e. tests will be run 4 times before failing. # This includes the first try, i.e. tests will be run 4 times before failing.
...@@ -146,21 +147,27 @@ RSpec.configure do |config| ...@@ -146,21 +147,27 @@ RSpec.configure do |config|
end end
config.around(:each, :clean_gitlab_redis_cache) do |example| config.around(:each, :clean_gitlab_redis_cache) do |example|
Gitlab::Redis::Cache.with(&:flushall) redis_cache_cleanup!
example.run example.run
Gitlab::Redis::Cache.with(&:flushall) redis_cache_cleanup!
end end
config.around(:each, :clean_gitlab_redis_shared_state) do |example| config.around(:each, :clean_gitlab_redis_shared_state) do |example|
Gitlab::Redis::SharedState.with(&:flushall) redis_shared_state_cleanup!
Sidekiq.redis(&:flushall)
example.run example.run
Gitlab::Redis::SharedState.with(&:flushall) redis_shared_state_cleanup!
Sidekiq.redis(&:flushall) end
config.around(:each, :clean_gitlab_redis_queues) do |example|
redis_queues_cleanup!
example.run
redis_queues_cleanup!
end end
# The :each scope runs "inside" the example, so this hook ensures the DB is in the # The :each scope runs "inside" the example, so this hook ensures the DB is in the
......
...@@ -8,9 +8,4 @@ module ChunkedIOHelpers ...@@ -8,9 +8,4 @@ module ChunkedIOHelpers
stub_const('Ci::BuildTraceChunk::CHUNK_SIZE', size) stub_const('Ci::BuildTraceChunk::CHUNK_SIZE', size)
stub_const('Gitlab::Ci::Trace::ChunkedIO::CHUNK_SIZE', size) stub_const('Gitlab::Ci::Trace::ChunkedIO::CHUNK_SIZE', size)
end end
def redis_shared_state_outage!
Gitlab::Redis::SharedState.with(&:flushall)
Sidekiq.redis(&:flushall)
end
end end
module RedisHelpers
# config/README.md
# Usage: performance enhancement
def redis_cache_cleanup!
Gitlab::Redis::Cache.with(&:flushall)
end
# Usage: SideKiq, Mailroom, CI Runner, Workhorse, push services
def redis_queues_cleanup!
Gitlab::Redis::Queues.with(&:flushall)
end
# Usage: session state, rate limiting
def redis_shared_state_cleanup!
Gitlab::Redis::SharedState.with(&:flushall)
end
end
shared_examples_for 'common trace features' do
describe '#html' do
before do
trace.set("12\n34")
end
it "returns formatted html" do
expect(trace.html).to eq("12<br>34")
end
it "returns last line of formatted html" do
expect(trace.html(last_lines: 1)).to eq("34")
end
end
describe '#raw' do
before do
trace.set("12\n34")
end
it "returns raw output" do
expect(trace.raw).to eq("12\n34")
end
it "returns last line of raw output" do
expect(trace.raw(last_lines: 1)).to eq("34")
end
end
describe '#extract_coverage' do
let(:regex) { '\(\d+.\d+\%\) covered' }
context 'matching coverage' do
before do
trace.set('Coverage 1033 / 1051 LOC (98.29%) covered')
end
it "returns valid coverage" do
expect(trace.extract_coverage(regex)).to eq("98.29")
end
end
context 'no coverage' do
before do
trace.set('No coverage')
end
it 'returs nil' do
expect(trace.extract_coverage(regex)).to be_nil
end
end
end
describe '#extract_sections' do
let(:log) { 'No sections' }
let(:sections) { trace.extract_sections }
before do
trace.set(log)
end
context 'no sections' do
it 'returs []' do
expect(trace.extract_sections).to eq([])
end
end
context 'multiple sections available' do
let(:log) { File.read(expand_fixture_path('trace/trace_with_sections')) }
let(:sections_data) do
[
{ name: 'prepare_script', lines: 2, duration: 3.seconds },
{ name: 'get_sources', lines: 4, duration: 1.second },
{ name: 'restore_cache', lines: 0, duration: 0.seconds },
{ name: 'download_artifacts', lines: 0, duration: 0.seconds },
{ name: 'build_script', lines: 2, duration: 1.second },
{ name: 'after_script', lines: 0, duration: 0.seconds },
{ name: 'archive_cache', lines: 0, duration: 0.seconds },
{ name: 'upload_artifacts', lines: 0, duration: 0.seconds }
]
end
it "returns valid sections" do
expect(sections).not_to be_empty
expect(sections.size).to eq(sections_data.size),
"expected #{sections_data.size} sections, got #{sections.size}"
buff = StringIO.new(log)
sections.each_with_index do |s, i|
expected = sections_data[i]
expect(s[:name]).to eq(expected[:name])
expect(s[:date_end] - s[:date_start]).to eq(expected[:duration])
buff.seek(s[:byte_start], IO::SEEK_SET)
length = s[:byte_end] - s[:byte_start]
lines = buff.read(length).count("\n")
expect(lines).to eq(expected[:lines])
end
end
end
context 'logs contains "section_start"' do
let(:log) { "section_start:1506417476:a_section\r\033[0Klooks like a section_start:invalid\nsection_end:1506417477:a_section\r\033[0K"}
it "returns only one section" do
expect(sections).not_to be_empty
expect(sections.size).to eq(1)
section = sections[0]
expect(section[:name]).to eq('a_section')
expect(section[:byte_start]).not_to eq(section[:byte_end]), "got an empty section"
end
end
context 'missing section_end' do
let(:log) { "section_start:1506417476:a_section\r\033[0KSome logs\nNo section_end\n"}
it "returns no sections" do
expect(sections).to be_empty
end
end
context 'missing section_start' do
let(:log) { "Some logs\nNo section_start\nsection_end:1506417476:a_section\r\033[0K"}
it "returns no sections" do
expect(sections).to be_empty
end
end
context 'inverted section_start section_end' do
let(:log) { "section_end:1506417476:a_section\r\033[0Klooks like a section_start:invalid\nsection_start:1506417477:a_section\r\033[0K"}
it "returns no sections" do
expect(sections).to be_empty
end
end
end
describe '#set' do
before do
trace.set("12")
end
it "returns trace" do
expect(trace.raw).to eq("12")
end
context 'overwrite trace' do
before do
trace.set("34")
end
it "returns new trace" do
expect(trace.raw).to eq("34")
end
end
context 'runners token' do
let(:token) { 'my_secret_token' }
before do
build.project.update(runners_token: token)
trace.set(token)
end
it "hides token" do
expect(trace.raw).not_to include(token)
end
end
context 'hides build token' do
let(:token) { 'my_secret_token' }
before do
build.update(token: token)
trace.set(token)
end
it "hides token" do
expect(trace.raw).not_to include(token)
end
end
end
describe '#append' do
before do
trace.set("1234")
end
it "returns correct trace" do
expect(trace.append("56", 4)).to eq(6)
expect(trace.raw).to eq("123456")
end
context 'tries to append trace at different offset' do
it "fails with append" do
expect(trace.append("56", 2)).to eq(-4)
expect(trace.raw).to eq("1234")
end
end
context 'runners token' do
let(:token) { 'my_secret_token' }
before do
build.project.update(runners_token: token)
trace.append(token, 0)
end
it "hides token" do
expect(trace.raw).not_to include(token)
end
end
context 'build token' do
let(:token) { 'my_secret_token' }
before do
build.update(token: token)
trace.append(token, 0)
end
it "hides token" do
expect(trace.raw).not_to include(token)
end
end
end
end
shared_examples_for 'trace with disabled live trace feature' do
it_behaves_like 'common trace features'
describe '#read' do
shared_examples 'read successfully with IO' do
it 'yields with source' do
trace.read do |stream|
expect(stream).to be_a(Gitlab::Ci::Trace::Stream)
expect(stream.stream).to be_a(IO)
end
end
end
shared_examples 'read successfully with StringIO' do
it 'yields with source' do
trace.read do |stream|
expect(stream).to be_a(Gitlab::Ci::Trace::Stream)
expect(stream.stream).to be_a(StringIO)
end
end
end
shared_examples 'failed to read' do
it 'yields without source' do
trace.read do |stream|
expect(stream).to be_a(Gitlab::Ci::Trace::Stream)
expect(stream.stream).to be_nil
end
end
end
context 'when trace artifact exists' do
before do
create(:ci_job_artifact, :trace, job: build)
end
it_behaves_like 'read successfully with IO'
end
context 'when current_path (with project_id) exists' do
before do
expect(trace).to receive(:default_path) { expand_fixture_path('trace/sample_trace') }
end
it_behaves_like 'read successfully with IO'
end
context 'when current_path (with project_ci_id) exists' do
before do
expect(trace).to receive(:deprecated_path) { expand_fixture_path('trace/sample_trace') }
end
it_behaves_like 'read successfully with IO'
end
context 'when db trace exists' do
before do
build.send(:write_attribute, :trace, "data")
end
it_behaves_like 'read successfully with StringIO'
end
context 'when no sources exist' do
it_behaves_like 'failed to read'
end
end
describe 'trace handling' do
subject { trace.exist? }
context 'trace does not exist' do
it { expect(trace.exist?).to be(false) }
end
context 'when trace artifact exists' do
before do
create(:ci_job_artifact, :trace, job: build)
end
it { is_expected.to be_truthy }
context 'when the trace artifact has been erased' do
before do
trace.erase!
end
it { is_expected.to be_falsy }
it 'removes associations' do
expect(Ci::JobArtifact.exists?(job_id: build.id, file_type: :trace)).to be_falsy
end
end
end
context 'new trace path is used' do
before do
trace.send(:ensure_directory)
File.open(trace.send(:default_path), "w") do |file|
file.write("data")
end
end
it "trace exist" do
expect(trace.exist?).to be(true)
end
it "can be erased" do
trace.erase!
expect(trace.exist?).to be(false)
end
end
context 'deprecated path' do
let(:path) { trace.send(:deprecated_path) }
context 'with valid ci_id' do
before do
build.project.update(ci_id: 1000)
FileUtils.mkdir_p(File.dirname(path))
File.open(path, "w") do |file|
file.write("data")
end
end
it "trace exist" do
expect(trace.exist?).to be(true)
end
it "can be erased" do
trace.erase!
expect(trace.exist?).to be(false)
end
end
context 'without valid ci_id' do
it "does not return deprecated path" do
expect(path).to be_nil
end
end
end
context 'stored in database' do
before do
build.send(:write_attribute, :trace, "data")
end
it "trace exist" do
expect(trace.exist?).to be(true)
end
it "can be erased" do
trace.erase!
expect(trace.exist?).to be(false)
end
it "returns database data" do
expect(trace.raw).to eq("data")
end
end
end
describe '#archive!' do
subject { trace.archive! }
shared_examples 'archive trace file' do
it do
expect { subject }.to change { Ci::JobArtifact.count }.by(1)
build.reload
expect(build.trace.exist?).to be_truthy
expect(build.job_artifacts_trace.file.exists?).to be_truthy
expect(build.job_artifacts_trace.file.filename).to eq('job.log')
expect(File.exist?(src_path)).to be_falsy
expect(src_checksum)
.to eq(Digest::SHA256.file(build.job_artifacts_trace.file.path).hexdigest)
expect(build.job_artifacts_trace.file_sha256).to eq(src_checksum)
end
end
shared_examples 'source trace file stays intact' do |error:|
it do
expect { subject }.to raise_error(error)
build.reload
expect(build.trace.exist?).to be_truthy
expect(build.job_artifacts_trace).to be_nil
expect(File.exist?(src_path)).to be_truthy
end
end
shared_examples 'archive trace in database' do
it do
expect { subject }.to change { Ci::JobArtifact.count }.by(1)
build.reload
expect(build.trace.exist?).to be_truthy
expect(build.job_artifacts_trace.file.exists?).to be_truthy
expect(build.job_artifacts_trace.file.filename).to eq('job.log')
expect(build.old_trace).to be_nil
expect(src_checksum)
.to eq(Digest::SHA256.file(build.job_artifacts_trace.file.path).hexdigest)
expect(build.job_artifacts_trace.file_sha256).to eq(src_checksum)
end
end
shared_examples 'source trace in database stays intact' do |error:|
it do
expect { subject }.to raise_error(error)
build.reload
expect(build.trace.exist?).to be_truthy
expect(build.job_artifacts_trace).to be_nil
expect(build.old_trace).to eq(trace_content)
end
end
context 'when job does not have trace artifact' do
context 'when trace file stored in default path' do
let!(:build) { create(:ci_build, :success, :trace_live) }
let!(:src_path) { trace.read { |s| s.path } }
let!(:src_checksum) { Digest::SHA256.file(src_path).hexdigest }
it_behaves_like 'archive trace file'
context 'when failed to create clone file' do
before do
allow(IO).to receive(:copy_stream).and_return(0)
end
it_behaves_like 'source trace file stays intact', error: Gitlab::Ci::Trace::ArchiveError
end
context 'when failed to create job artifact record' do
before do
allow_any_instance_of(Ci::JobArtifact).to receive(:save).and_return(false)
allow_any_instance_of(Ci::JobArtifact).to receive_message_chain(:errors, :full_messages)
.and_return(%w[Error Error])
end
it_behaves_like 'source trace file stays intact', error: ActiveRecord::RecordInvalid
end
end
context 'when trace is stored in database' do
let(:build) { create(:ci_build, :success) }
let(:trace_content) { 'Sample trace' }
let(:src_checksum) { Digest::SHA256.hexdigest(trace_content) }
before do
build.update_column(:trace, trace_content)
end
it_behaves_like 'archive trace in database'
context 'when failed to create clone file' do
before do
allow(IO).to receive(:copy_stream).and_return(0)
end
it_behaves_like 'source trace in database stays intact', error: Gitlab::Ci::Trace::ArchiveError
end
context 'when failed to create job artifact record' do
before do
allow_any_instance_of(Ci::JobArtifact).to receive(:save).and_return(false)
allow_any_instance_of(Ci::JobArtifact).to receive_message_chain(:errors, :full_messages)
.and_return(%w[Error Error])
end
it_behaves_like 'source trace in database stays intact', error: ActiveRecord::RecordInvalid
end
context 'when there is a validation error on Ci::Build' do
before do
allow_any_instance_of(Ci::Build).to receive(:save).and_return(false)
allow_any_instance_of(Ci::Build).to receive_message_chain(:errors, :full_messages)
.and_return(%w[Error Error])
end
context "when erase old trace with 'save'" do
before do
build.send(:write_attribute, :trace, nil)
build.save
end
it 'old trace is not deleted' do
build.reload
expect(build.trace.raw).to eq(trace_content)
end
end
it_behaves_like 'archive trace in database'
end
end
end
context 'when job has trace artifact' do
before do
create(:ci_job_artifact, :trace, job: build)
end
it 'does not archive' do
expect_any_instance_of(described_class).not_to receive(:archive_stream!)
expect { subject }.to raise_error('Already archived')
expect(build.job_artifacts_trace.file.exists?).to be_truthy
end
end
context 'when job is not finished yet' do
let!(:build) { create(:ci_build, :running, :trace_live) }
it 'does not archive' do
expect_any_instance_of(described_class).not_to receive(:archive_stream!)
expect { subject }.to raise_error('Job is not finished yet')
expect(build.trace.exist?).to be_truthy
end
end
end
end
shared_examples_for 'trace with enabled live trace feature' do
it_behaves_like 'common trace features'
describe '#read' do
shared_examples 'read successfully with IO' do
it 'yields with source' do
trace.read do |stream|
expect(stream).to be_a(Gitlab::Ci::Trace::Stream)
expect(stream.stream).to be_a(IO)
end
end
end
shared_examples 'read successfully with ChunkedIO' do
it 'yields with source' do
trace.read do |stream|
expect(stream).to be_a(Gitlab::Ci::Trace::Stream)
expect(stream.stream).to be_a(Gitlab::Ci::Trace::ChunkedIO)
end
end
end
shared_examples 'failed to read' do
it 'yields without source' do
trace.read do |stream|
expect(stream).to be_a(Gitlab::Ci::Trace::Stream)
expect(stream.stream).to be_nil
end
end
end
context 'when trace artifact exists' do
before do
create(:ci_job_artifact, :trace, job: build)
end
it_behaves_like 'read successfully with IO'
end
context 'when live trace exists' do
before do
Gitlab::Ci::Trace::ChunkedIO.new(build) do |stream|
stream.write('abc')
end
end
it_behaves_like 'read successfully with ChunkedIO'
end
context 'when no sources exist' do
it_behaves_like 'failed to read'
end
end
describe 'trace handling' do
subject { trace.exist? }
context 'trace does not exist' do
it { expect(trace.exist?).to be(false) }
end
context 'when trace artifact exists' do
before do
create(:ci_job_artifact, :trace, job: build)
end
it { is_expected.to be_truthy }
context 'when the trace artifact has been erased' do
before do
trace.erase!
end
it { is_expected.to be_falsy }
it 'removes associations' do
expect(Ci::JobArtifact.exists?(job_id: build.id, file_type: :trace)).to be_falsy
end
end
end
context 'stored in live trace' do
before do
Gitlab::Ci::Trace::ChunkedIO.new(build) do |stream|
stream.write('abc')
end
end
it "trace exist" do
expect(trace.exist?).to be(true)
end
it "can be erased" do
trace.erase!
expect(trace.exist?).to be(false)
expect(Ci::BuildTraceChunk.where(build: build)).not_to be_exist
end
it "returns live trace data" do
expect(trace.raw).to eq("abc")
end
end
end
describe '#archive!' do
subject { trace.archive! }
shared_examples 'archive trace file in ChunkedIO' do
it do
expect { subject }.to change { Ci::JobArtifact.count }.by(1)
build.reload
expect(build.trace.exist?).to be_truthy
expect(build.job_artifacts_trace.file.exists?).to be_truthy
expect(build.job_artifacts_trace.file.filename).to eq('job.log')
expect(Ci::BuildTraceChunk.where(build: build)).not_to be_exist
expect(src_checksum)
.to eq(Digest::SHA256.file(build.job_artifacts_trace.file.path).hexdigest)
expect(build.job_artifacts_trace.file_sha256).to eq(src_checksum)
end
end
shared_examples 'source trace in ChunkedIO stays intact' do |error:|
it do
expect { subject }.to raise_error(error)
build.reload
expect(build.trace.exist?).to be_truthy
expect(build.job_artifacts_trace).to be_nil
Gitlab::Ci::Trace::ChunkedIO.new(build) do |stream|
expect(stream.read).to eq(trace_raw)
end
end
end
context 'when job does not have trace artifact' do
context 'when trace is stored in ChunkedIO' do
let!(:build) { create(:ci_build, :success, :trace_live) }
let!(:trace_raw) { build.trace.raw }
let!(:src_checksum) { Digest::SHA256.hexdigest(trace_raw) }
it_behaves_like 'archive trace file in ChunkedIO'
context 'when failed to create clone file' do
before do
allow(IO).to receive(:copy_stream).and_return(0)
end
it_behaves_like 'source trace in ChunkedIO stays intact', error: Gitlab::Ci::Trace::ArchiveError
end
context 'when failed to create job artifact record' do
before do
allow_any_instance_of(Ci::JobArtifact).to receive(:save).and_return(false)
allow_any_instance_of(Ci::JobArtifact).to receive_message_chain(:errors, :full_messages)
.and_return(%w[Error Error])
end
it_behaves_like 'source trace in ChunkedIO stays intact', error: ActiveRecord::RecordInvalid
end
end
end
context 'when job has trace artifact' do
before do
create(:ci_job_artifact, :trace, job: build)
end
it 'does not archive' do
expect_any_instance_of(described_class).not_to receive(:archive_stream!)
expect { subject }.to raise_error('Already archived')
expect(build.job_artifacts_trace.file.exists?).to be_truthy
end
end
context 'when job is not finished yet' do
let!(:build) { create(:ci_build, :running, :trace_live) }
it 'does not archive' do
expect_any_instance_of(described_class).not_to receive(:archive_stream!)
expect { subject }.to raise_error('Job is not finished yet')
expect(build.trace.exist?).to be_truthy
end
end
end
end
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment