Commit 8fd76a75 authored by Shinya Maeda's avatar Shinya Maeda

Merge branch 'live-trace-v2' into live-trace-v2-efficient-destroy-all

parents 07375df1 8b47980e
......@@ -9,6 +9,10 @@ terms.
[DCO + License](https://gitlab.com/gitlab-org/dco/blob/master/README.md)
All Documentation content that resides under the [doc/ directory](/doc) of this
repository is licensed under Creative Commons:
[CC BY-SA 4.0](https://creativecommons.org/licenses/by-sa/4.0/).
_This notice should stay as the first item in the CONTRIBUTING.md file._
---
......
......@@ -33,7 +33,7 @@ gem 'grape-route-helpers', '~> 2.1.0'
gem 'faraday', '~> 0.12'
# Authentication libraries
gem 'devise', '~> 4.2'
gem 'devise', '~> 4.4'
gem 'doorkeeper', '~> 4.3'
gem 'doorkeeper-openid_connect', '~> 1.3'
gem 'omniauth', '~> 1.8'
......@@ -41,7 +41,7 @@ gem 'omniauth-auth0', '~> 2.0.0'
gem 'omniauth-azure-oauth2', '~> 0.0.9'
gem 'omniauth-cas3', '~> 1.1.4'
gem 'omniauth-facebook', '~> 4.0.0'
gem 'omniauth-github', '~> 1.1.1'
gem 'omniauth-github', '~> 1.3'
gem 'omniauth-gitlab', '~> 1.0.2'
gem 'omniauth-google-oauth2', '~> 0.5.3'
gem 'omniauth-kerberos', '~> 0.3.0', group: :kerberos
......@@ -90,7 +90,7 @@ gem 'github-linguist', '~> 5.3.3', require: 'linguist'
# API
gem 'grape', '~> 1.0'
gem 'grape-entity', '~> 0.6.0'
gem 'grape-entity', '~> 0.7.1'
gem 'rack-cors', '~> 1.0.0', require: 'rack/cors'
# Disable strong_params so that Mash does not respond to :permitted?
......
......@@ -143,7 +143,7 @@ GEM
connection_pool (2.2.1)
crack (0.4.3)
safe_yaml (~> 1.0.0)
crass (1.0.3)
crass (1.0.4)
creole (0.5.0)
css_parser (1.5.0)
addressable
......@@ -162,10 +162,10 @@ GEM
descendants_tracker (0.0.4)
thread_safe (~> 0.3, >= 0.3.1)
device_detector (1.0.0)
devise (4.2.0)
devise (4.4.3)
bcrypt (~> 3.0)
orm_adapter (~> 0.1)
railties (>= 4.1.0, < 5.1)
railties (>= 4.1.0, < 6.0)
responders
warden (~> 1.2.3)
devise-two-factor (3.0.0)
......@@ -366,8 +366,8 @@ GEM
rack (>= 1.3.0)
rack-accept
virtus (>= 1.0.0)
grape-entity (0.6.0)
activesupport
grape-entity (0.7.1)
activesupport (>= 4.0)
multi_json (>= 1.3.2)
grape-route-helpers (2.1.0)
activesupport
......@@ -546,9 +546,9 @@ GEM
omniauth (~> 1.2)
omniauth-facebook (4.0.0)
omniauth-oauth2 (~> 1.2)
omniauth-github (1.1.2)
omniauth (~> 1.0)
omniauth-oauth2 (~> 1.1)
omniauth-github (1.3.0)
omniauth (~> 1.5)
omniauth-oauth2 (>= 1.4.0, < 2.0)
omniauth-gitlab (1.0.2)
omniauth (~> 1.0)
omniauth-oauth2 (~> 1.0)
......@@ -646,7 +646,7 @@ GEM
pry (>= 0.9.10)
public_suffix (3.0.2)
pyu-ruby-sasl (0.0.3.3)
rack (1.6.9)
rack (1.6.10)
rack-accept (0.4.5)
rack (>= 0.4)
rack-attack (4.4.1)
......@@ -694,7 +694,7 @@ GEM
rainbow (2.2.2)
rake
raindrops (0.18.0)
rake (12.3.0)
rake (12.3.1)
rb-fsevent (0.10.2)
rb-inotify (0.9.10)
ffi (>= 0.5.0, < 2)
......@@ -735,8 +735,9 @@ GEM
declarative-option (< 0.2.0)
uber (< 0.2.0)
request_store (1.3.1)
responders (2.3.0)
railties (>= 4.2.0, < 5.1)
responders (2.4.0)
actionpack (>= 4.2.0, < 5.3)
railties (>= 4.2.0, < 5.3)
rest-client (2.0.2)
http-cookie (>= 1.0.2, < 2.0)
mime-types (>= 1.16, < 4.0)
......@@ -966,7 +967,7 @@ GEM
descendants_tracker (~> 0.0, >= 0.0.3)
equalizer (~> 0.0, >= 0.0.9)
vmstat (2.3.0)
warden (1.2.6)
warden (1.2.7)
rack (>= 1.0)
webmock (2.3.2)
addressable (>= 2.3.6)
......@@ -1028,7 +1029,7 @@ DEPENDENCIES
deckar01-task_list (= 2.0.0)
default_value_for (~> 3.0.0)
device_detector
devise (~> 4.2)
devise (~> 4.4)
devise-two-factor (~> 3.0.0)
diffy (~> 3.1.0)
doorkeeper (~> 4.3)
......@@ -1072,7 +1073,7 @@ DEPENDENCIES
google-protobuf (= 3.5.1)
gpgme
grape (~> 1.0)
grape-entity (~> 0.6.0)
grape-entity (~> 0.7.1)
grape-route-helpers (~> 2.1.0)
grape_logging (~> 1.7)
grpc (~> 1.11.0)
......@@ -1113,7 +1114,7 @@ DEPENDENCIES
omniauth-azure-oauth2 (~> 0.0.9)
omniauth-cas3 (~> 1.1.4)
omniauth-facebook (~> 4.0.0)
omniauth-github (~> 1.1.1)
omniauth-github (~> 1.3)
omniauth-gitlab (~> 1.0.2)
omniauth-google-oauth2 (~> 0.5.3)
omniauth-kerberos (~> 0.3.0)
......
......@@ -162,6 +162,7 @@ GEM
activerecord (>= 3.2.0, < 5.2)
descendants_tracker (0.0.4)
thread_safe (~> 0.3, >= 0.3.1)
device_detector (1.0.1)
devise (4.4.1)
bcrypt (~> 3.0)
orm_adapter (~> 0.1)
......@@ -375,7 +376,7 @@ GEM
rake
grape_logging (1.7.0)
grape
grpc (1.10.0)
grpc (1.11.0)
google-protobuf (~> 3.1)
googleapis-common-protos-types (~> 1.0.0)
googleauth (>= 0.5.1, < 0.7)
......@@ -554,9 +555,6 @@ GEM
jwt (>= 1.5)
omniauth (>= 1.1.1)
omniauth-oauth2 (>= 1.5)
omniauth-jwt (0.0.2)
jwt
omniauth (~> 1.1)
omniauth-kerberos (0.3.0)
omniauth-multipassword
timfel-krb5-auth (~> 0.8)
......@@ -1033,6 +1031,7 @@ DEPENDENCIES
database_cleaner (~> 1.5.0)
deckar01-task_list (= 2.0.0)
default_value_for (~> 3.0.5)
device_detector
devise (~> 4.2)
devise-two-factor (~> 3.0.0)
diffy (~> 3.1.0)
......@@ -1080,7 +1079,7 @@ DEPENDENCIES
grape-entity (~> 0.6.0)
grape-route-helpers (~> 2.1.0)
grape_logging (~> 1.7)
grpc (~> 1.10.0)
grpc (~> 1.11.0)
haml_lint (~> 0.26.0)
hamlit (~> 2.6.1)
hashie-forbidden_attributes
......@@ -1121,7 +1120,6 @@ DEPENDENCIES
omniauth-github (~> 1.1.1)
omniauth-gitlab (~> 1.0.2)
omniauth-google-oauth2 (~> 0.5.3)
omniauth-jwt (~> 0.0.2)
omniauth-kerberos (~> 0.3.0)
omniauth-oauth2-generic (~> 0.2.2)
omniauth-saml (~> 1.10)
......
......@@ -5,3 +5,8 @@ Permission is hereby granted, free of charge, to any person obtaining a copy of
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
---
All Documentation content that resides under the doc/ directory of this
repository is licensed under Creative Commons: CC BY-SA 4.0.
......@@ -43,6 +43,7 @@
<div class="environments-container">
<loading-icon
class="prepend-top-default"
label="Loading environments"
v-if="isLoading"
size="3"
......
<script>
import playIconSvg from 'icons/_icon_play.svg';
import Icon from '~/vue_shared/components/icon.vue';
import eventHub from '../event_hub';
import loadingIcon from '../../vue_shared/components/loading_icon.vue';
import tooltip from '../../vue_shared/directives/tooltip';
......@@ -8,9 +8,9 @@
directives: {
tooltip,
},
components: {
loadingIcon,
Icon,
},
props: {
actions: {
......@@ -19,20 +19,16 @@
default: () => [],
},
},
data() {
return {
playIconSvg,
isLoading: false,
};
},
computed: {
title() {
return 'Deploy to...';
},
},
methods: {
onClickAction(endpoint) {
this.isLoading = true;
......@@ -65,7 +61,10 @@
:disabled="isLoading"
>
<span>
<span v-html="playIconSvg"></span>
<icon
name="play"
:size="12"
/>
<i
class="fa fa-caret-down"
aria-hidden="true"
......@@ -86,7 +85,10 @@
:class="{ disabled: isActionDisabled(action) }"
:disabled="isActionDisabled(action)"
>
<span v-html="playIconSvg"></span>
<icon
name="play"
:size="12"
/>
<span>
{{ action.name }}
</span>
......
<script>
import Icon from '~/vue_shared/components/icon.vue';
import tooltip from '../../vue_shared/directives/tooltip';
import { s__ } from '../../locale';
......@@ -6,6 +7,9 @@
* Renders the external url link in environments table.
*/
export default {
components: {
Icon,
},
directives: {
tooltip,
},
......@@ -15,7 +19,6 @@
required: true,
},
},
computed: {
title() {
return s__('Environments|Open');
......@@ -34,10 +37,9 @@
:aria-label="title"
:href="externalUrl"
>
<i
class="fa fa-external-link"
aria-hidden="true"
>
</i>
<icon
name="external-link"
:size="12"
/>
</a>
</template>
......@@ -2,20 +2,22 @@
/**
* Renders the Monitoring (Metrics) link in environments table.
*/
import Icon from '~/vue_shared/components/icon.vue';
import tooltip from '../../vue_shared/directives/tooltip';
export default {
components: {
Icon,
},
directives: {
tooltip,
},
props: {
monitoringUrl: {
type: String,
required: true,
},
},
computed: {
title() {
return 'Monitoring';
......@@ -33,10 +35,9 @@
:title="title"
:aria-label="title"
>
<i
class="fa fa-area-chart"
aria-hidden="true"
>
</i>
<icon
name="chart"
:size="12"
/>
</a>
</template>
......@@ -12,7 +12,6 @@
components: {
loadingIcon,
},
props: {
retryUrl: {
type: String,
......@@ -24,13 +23,11 @@
default: true,
},
},
data() {
return {
isLoading: false,
};
},
methods: {
onClick() {
this.isLoading = true;
......
......@@ -3,14 +3,16 @@
* Renders a terminal button to open a web terminal.
* Used in environments table.
*/
import terminalIconSvg from 'icons/_icon_terminal.svg';
import Icon from '~/vue_shared/components/icon.vue';
import tooltip from '../../vue_shared/directives/tooltip';
export default {
components: {
Icon,
},
directives: {
tooltip,
},
props: {
terminalPath: {
type: String,
......@@ -18,13 +20,6 @@
default: '',
},
},
data() {
return {
terminalIconSvg,
};
},
computed: {
title() {
return 'Terminal';
......@@ -40,7 +35,10 @@
:title="title"
:aria-label="title"
:href="terminalPath"
v-html="terminalIconSvg"
>
<icon
name="terminal"
:size="12"
/>
</a>
</template>
......@@ -43,7 +43,7 @@ export default {
return `${this.changedIcon}-solid`;
},
changedIconClass() {
return `multi-${this.changedIcon} prepend-left-5 pull-left`;
return `multi-${this.changedIcon} pull-left`;
},
tooltipTitle() {
if (!this.showTooltip) return undefined;
......@@ -79,13 +79,7 @@ export default {
class="ide-file-changed-icon"
>
<icon
v-if="file.staged && showStagedIcon"
:name="stagedIcon"
:size="12"
:css-classes="changedIconClass"
/>
<icon
v-if="file.changed || file.tempFile || (file.staged && !showStagedIcon)"
v-if="file.changed || file.tempFile || file.staged"
:name="changedIcon"
:size="12"
:css-classes="changedIconClass"
......
......@@ -15,17 +15,10 @@ export default {
type: String,
required: true,
},
committedStateSvgPath: {
type: String,
required: true,
},
},
computed: {
...mapState(['lastCommitMsg', 'rightPanelCollapsed']),
...mapState(['lastCommitMsg', 'rightPanelCollapsed', 'changedFiles', 'stagedFiles']),
...mapGetters(['collapseButtonIcon', 'collapseButtonTooltip']),
statusSvg() {
return this.lastCommitMsg ? this.committedStateSvgPath : this.noChangesStateSvgPath;
},
},
methods: {
...mapActions(['toggleRightPanelCollapsed']),
......@@ -35,6 +28,7 @@ export default {
<template>
<div
v-if="!lastCommitMsg"
class="multi-file-commit-panel-section ide-commit-empty-state js-empty-state"
>
<header
......@@ -64,12 +58,11 @@ export default {
v-if="!rightPanelCollapsed"
>
<div class="svg-content svg-80">
<img :src="statusSvg" />
<img :src="noChangesStateSvgPath" />
</div>
<div class="append-right-default prepend-left-default">
<div
class="text-content text-center"
v-if="!lastCommitMsg"
>
<h4>
{{ __('No changes') }}
......@@ -78,15 +71,6 @@ export default {
{{ __('Edit files in the editor and commit changes here') }}
</p>
</div>
<div
class="text-content text-center"
v-else
>
<h4>
{{ __('All changes are committed') }}
</h4>
<p v-html="lastCommitMsg"></p>
</div>
</div>
</div>
</div>
......
......@@ -36,7 +36,7 @@ export default {
return this.file.tempFile ? `file-addition${prefix}` : `file-modified${prefix}`;
},
iconClass() {
return `multi-file-${this.file.tempFile ? 'additions' : 'modified'} append-right-8`;
return `multi-file-${this.file.tempFile ? 'addition' : 'modified'} append-right-8`;
},
},
methods: {
......
<script>
import { mapState } from 'vuex';
export default {
props: {
committedStateSvgPath: {
type: String,
required: true,
},
},
computed: {
...mapState(['lastCommitMsg']),
},
};
</script>
<template>
<div
class="multi-file-commit-panel-success-message"
aria-live="assertive"
>
<div class="svg-content svg-80">
<img
:src="committedStateSvgPath"
alt=""
/>
</div>
<div class="append-right-default prepend-left-default">
<div
class="text-content text-center"
>
<h4>
{{ __('All changes are committed') }}
</h4>
<p v-html="lastCommitMsg"></p>
</div>
</div>
</div>
</template>
......@@ -7,6 +7,7 @@ import LoadingButton from '~/vue_shared/components/loading_button.vue';
import CommitFilesList from './commit_sidebar/list.vue';
import EmptyState from './commit_sidebar/empty_state.vue';
import CommitMessageField from './commit_sidebar/message_field.vue';
import SuccessMessage from './commit_sidebar/success_message.vue';
import * as consts from '../stores/modules/commit/constants';
import Actions from './commit_sidebar/actions.vue';
......@@ -16,6 +17,7 @@ export default {
Icon,
CommitFilesList,
EmptyState,
SuccessMessage,
Actions,
LoadingButton,
CommitMessageField,
......@@ -34,9 +36,15 @@ export default {
},
},
computed: {
...mapState(['changedFiles', 'stagedFiles', 'rightPanelCollapsed']),
showStageUnstageArea() {
return !!(this.someUncommitedChanges || this.lastCommitMsg || !this.unusedSeal);
},
someUncommitedChanges() {
return !!(this.changedFiles.length || this.stagedFiles.length);
},
...mapState(['changedFiles', 'stagedFiles', 'rightPanelCollapsed', 'lastCommitMsg', 'unusedSeal']),
...mapState('commit', ['commitMessage', 'submitCommitLoading']),
...mapGetters('commit', ['commitButtonDisabled', 'discardDraftButtonDisabled', 'branchName']),
...mapGetters('commit', ['commitButtonDisabled', 'discardDraftButtonDisabled']),
},
methods: {
...mapActions('commit', [
......@@ -69,7 +77,7 @@ export default {
</template>
</deprecated-modal>
<template
v-if="changedFiles.length || stagedFiles.length"
v-if="showStageUnstageArea"
>
<commit-files-list
icon-name="unstaged"
......@@ -89,11 +97,23 @@ export default {
:show-toggle="false"
:staged-list="true"
/>
</template>
<empty-state
v-if="unusedSeal"
:no-changes-state-svg-path="noChangesStateSvgPath"
/>
<div
class="multi-file-commit-panel-bottom"
>
<form
class="form-horizontal multi-file-commit-form"
@submit.prevent.stop="commitChanges"
v-if="!rightPanelCollapsed"
>
<success-message
v-if="lastCommitMsg && !someUncommitedChanges"
:committed-state-svg-path="committedStateSvgPath"
/>
<commit-message-field
:text="commitMessage"
@input="updateCommitMessage"
......@@ -117,11 +137,6 @@ export default {
</button>
</div>
</form>
</template>
<empty-state
v-else
:no-changes-state-svg-path="noChangesStateSvgPath"
:committed-state-svg-path="committedStateSvgPath"
/>
</div>
</div>
</template>
<script>
import { mapActions } from 'vuex';
import skeletonLoadingContainer from '~/vue_shared/components/skeleton_loading_container.vue';
import fileIcon from '~/vue_shared/components/file_icon.vue';
import { mapActions, mapGetters } from 'vuex';
import { n__, __, sprintf } from '~/locale';
import tooltip from '~/vue_shared/directives/tooltip';
import SkeletonLoadingContainer from '~/vue_shared/components/skeleton_loading_container.vue';
import Icon from '~/vue_shared/components/icon.vue';
import FileIcon from '~/vue_shared/components/file_icon.vue';
import router from '../ide_router';
import newDropdown from './new_dropdown/index.vue';
import fileStatusIcon from './repo_file_status_icon.vue';
import changedFileIcon from './changed_file_icon.vue';
import mrFileIcon from './mr_file_icon.vue';
import NewDropdown from './new_dropdown/index.vue';
import FileStatusIcon from './repo_file_status_icon.vue';
import ChangedFileIcon from './changed_file_icon.vue';
import MrFileIcon from './mr_file_icon.vue';
export default {
name: 'RepoFile',
directives: {
tooltip,
},
components: {
skeletonLoadingContainer,
newDropdown,
fileStatusIcon,
fileIcon,
changedFileIcon,
mrFileIcon,
SkeletonLoadingContainer,
NewDropdown,
FileStatusIcon,
FileIcon,
ChangedFileIcon,
MrFileIcon,
Icon,
},
props: {
file: {
......@@ -29,6 +36,34 @@ export default {
},
},
computed: {
...mapGetters([
'getChangesInFolder',
'getUnstagedFilesCountForPath',
'getStagedFilesCountForPath',
]),
folderUnstagedCount() {
return this.getUnstagedFilesCountForPath(this.file.path);
},
folderStagedCount() {
return this.getStagedFilesCountForPath(this.file.path);
},
changesCount() {
return this.getChangesInFolder(this.file.path);
},
folderChangesTooltip() {
if (this.changesCount === 0) return undefined;
if (this.folderUnstagedCount > 0 && this.folderStagedCount === 0) {
return n__('%d unstaged change', '%d unstaged changes', this.folderUnstagedCount);
} else if (this.folderUnstagedCount === 0 && this.folderStagedCount > 0) {
return n__('%d staged change', '%d staged changes', this.folderStagedCount);
}
return sprintf(__('%{unstaged} unstaged and %{staged} staged changes'), {
unstaged: this.folderUnstagedCount,
staged: this.folderStagedCount,
});
},
isTree() {
return this.file.type === 'tree';
},
......@@ -48,10 +83,19 @@ export default {
'is-open': this.file.opened,
};
},
showTreeChangesCount() {
return this.isTree && this.changesCount > 0 && !this.file.opened;
},
showChangedFileIcon() {
return this.file.changed || this.file.tempFile || this.file.staged;
},
},
updated() {
if (this.file.type === 'blob' && this.file.active) {
this.$el.scrollIntoView();
this.$el.scrollIntoView({
behavior: 'smooth',
block: 'nearest',
});
}
},
methods: {
......@@ -101,8 +145,23 @@ export default {
<mr-file-icon
v-if="file.mrChange"
/>
<span
v-if="showTreeChangesCount"
class="ide-tree-changes"
>
{{ changesCount }}
<icon
v-tooltip
:title="folderChangesTooltip"
data-container="body"
data-placement="right"
name="file-modified"
:size="12"
css-classes="prepend-left-5 multi-file-modified"
/>
</span>
<changed-file-icon
v-if="file.changed || file.tempFile || file.staged"
v-else-if="showChangedFileIcon"
:file="file"
:show-tooltip="true"
:show-staged-icon="true"
......
......@@ -149,6 +149,12 @@ export const updateTempFlagForEntry = ({ commit, dispatch, state }, { file, temp
export const toggleFileFinder = ({ commit }, fileFindVisible) =>
commit(types.TOGGLE_FILE_FINDER, fileFindVisible);
export const burstUnusedSeal = ({ state, commit }) => {
if (state.unusedSeal) {
commit(types.BURST_UNUSED_SEAL);
}
};
export * from './actions/tree';
export * from './actions/file';
export * from './actions/project';
......
......@@ -117,7 +117,7 @@ export const getRawFileData = ({ state, commit, dispatch }, { path, baseSha }) =
});
};
export const changeFileContent = ({ state, commit }, { path, content }) => {
export const changeFileContent = ({ commit, dispatch, state }, { path, content }) => {
const file = state.entries[path];
commit(types.UPDATE_FILE_CONTENT, { path, content });
......@@ -128,6 +128,8 @@ export const changeFileContent = ({ state, commit }, { path, content }) => {
} else if (!file.changed && indexOfChangedFile !== -1) {
commit(types.REMOVE_FILE_FROM_CHANGED, path);
}
dispatch('burstUnusedSeal', {}, { root: true });
};
export const setFileLanguage = ({ getters, commit }, { fileLanguage }) => {
......
import { __ } from '~/locale';
import { getChangesCountForFiles, filePathMatches } from './utils';
export const activeFile = state => state.openFiles.find(file => file.active) || null;
......@@ -55,7 +56,23 @@ export const allBlobs = state =>
}, [])
.sort((a, b) => b.lastOpenedAt - a.lastOpenedAt);
export const getChangedFile = state => path => state.changedFiles.find(f => f.path === path);
export const getStagedFile = state => path => state.stagedFiles.find(f => f.path === path);
export const getChangesInFolder = state => path => {
const changedFilesCount = state.changedFiles.filter(f => filePathMatches(f, path)).length;
const stagedFilesCount = state.stagedFiles.filter(
f => filePathMatches(f, path) && !getChangedFile(state)(f.path),
).length;
return changedFilesCount + stagedFilesCount;
};
export const getUnstagedFilesCountForPath = state => path =>
getChangesCountForFiles(state.changedFiles, path);
export const getStagedFilesCountForPath = state => path =>
getChangesCountForFiles(state.stagedFiles, path);
// prevent babel-plugin-rewire from generating an invalid default during karma tests
export default () => {};
......@@ -182,6 +182,10 @@ export const commitChanges = ({ commit, state, getters, dispatch, rootState }) =
}
commit(rootTypes.CLEAR_STAGED_CHANGES, null, { root: true });
setTimeout(() => {
commit(rootTypes.SET_LAST_COMMIT_MSG, '', { root: true });
}, 5000);
})
.then(() => dispatch('updateCommitAction', consts.COMMIT_TO_CURRENT_BRANCH));
})
......
......@@ -61,3 +61,4 @@ export const REMOVE_PENDING_TAB = 'REMOVE_PENDING_TAB';
export const UPDATE_TEMP_FLAG = 'UPDATE_TEMP_FLAG';
export const TOGGLE_FILE_FINDER = 'TOGGLE_FILE_FINDER';
export const BURST_UNUSED_SEAL = 'BURST_UNUSED_SEAL';
......@@ -128,6 +128,11 @@ export default {
}),
});
},
[types.BURST_UNUSED_SEAL](state) {
Object.assign(state, {
unusedSeal: false,
});
},
...projectMutations,
...mergeRequestMutation,
...fileMutations,
......
......@@ -18,5 +18,6 @@ export default () => ({
entries: {},
viewer: 'editor',
delayViewerUpdated: false,
unusedSeal: true,
fileFindVisible: false,
});
......@@ -33,7 +33,6 @@ export const dataStructure = () => ({
raw: '',
content: '',
parentTreeUrl: '',
parentPath: '',
renderError: false,
base64: false,
editorRow: 1,
......@@ -43,6 +42,7 @@ export const dataStructure = () => ({
viewMode: 'edit',
previewMode: null,
size: 0,
parentPath: null,
lastOpenedAt: 0,
});
......@@ -83,7 +83,6 @@ export const decorateData = entity => {
opened,
active,
parentTreeUrl,
parentPath,
changed,
renderError,
content,
......@@ -91,6 +90,7 @@ export const decorateData = entity => {
previewMode,
file_lock,
html,
parentPath,
};
};
......@@ -137,3 +137,9 @@ export const sortTree = sortedTree =>
}),
)
.sort(sortTreesByTypeAndName);
export const filePathMatches = (f, path) =>
f.path.replace(new RegExp(`${f.name}$`), '').indexOf(`${path}/`) === 0;
export const getChangesCountForFiles = (files, path) =>
files.filter(f => filePathMatches(f, path)).length;
......@@ -74,7 +74,11 @@ export function capitalizeFirstCharacter(text) {
* @param {*} replace
* @returns {String}
*/
export const stripHtml = (string, replace = '') => string.replace(/<[^>]*>/g, replace);
export const stripHtml = (string, replace = '') => {
if (!string) return string;
return string.replace(/<[^>]*>/g, replace);
};
/**
* Converts snake_case string to camelCase
......
......@@ -70,6 +70,9 @@
toggleMoreParticipants() {
this.isShowingMoreParticipants = !this.isShowingMoreParticipants;
},
onClickCollapsedIcon() {
this.$emit('toggleSidebar');
},
},
};
</script>
......@@ -82,6 +85,7 @@
data-container="body"
data-placement="left"
:title="participantLabel"
@click="onClickCollapsedIcon"
>
<i
class="fa fa-users"
......
<script>
import Store from '../../stores/sidebar_store';
import eventHub from '../../event_hub';
import Flash from '../../../flash';
import { __ } from '../../../locale';
import subscriptions from './subscriptions.vue';
......@@ -20,12 +19,6 @@ export default {
store: new Store(),
};
},
created() {
eventHub.$on('toggleSubscription', this.onToggleSubscription);
},
beforeDestroy() {
eventHub.$off('toggleSubscription', this.onToggleSubscription);
},
methods: {
onToggleSubscription() {
this.mediator.toggleSubscription()
......@@ -42,6 +35,7 @@ export default {
<subscriptions
:loading="store.isFetching.subscriptions"
:subscribed="store.subscribed"
@toggleSubscription="onToggleSubscription"
/>
</div>
</template>
......@@ -47,8 +47,25 @@
},
},
methods: {
/**
* We need to emit this event on both component & eventHub
* for 2 dependencies;
*
* 1. eventHub: This component is used in Issue Boards sidebar
* where component template is part of HAML
* and event listeners are tied to app's eventHub.
* 2. Component: This compone is also used in Epics in EE
* where listeners are tied to component event.
*/
toggleSubscription() {
// App's eventHub event emission.
eventHub.$emit('toggleSubscription', this.id);
// Component event emission.
this.$emit('toggleSubscription', this.id);
},
onClickCollapsedIcon() {
this.$emit('toggleSidebar');
},
},
};
......@@ -56,7 +73,10 @@
<template>
<div>
<div class="sidebar-collapsed-icon">
<div
class="sidebar-collapsed-icon"
@click="onClickCollapsedIcon"
>
<span
v-tooltip
:title="notificationTooltip"
......
<script>
export default {
name: 'time-tracking-spent-only-pane',
name: 'TimeTrackingSpentOnlyPane',
props: {
timeSpentHumanReadable: {
type: String,
required: true,
},
},
template: `
};
</script>
<template>
<div class="time-tracking-spend-only-pane">
<span class="bold">Spent:</span>
{{ timeSpentHumanReadable }}
</div>
`,
};
</template>
<script>
import TimeTrackingHelpState from './help_state.vue';
import TimeTrackingCollapsedState from './collapsed_state.vue';
import timeTrackingSpentOnlyPane from './spent_only_pane';
import TimeTrackingSpentOnlyPane from './spent_only_pane.vue';
import TimeTrackingNoTrackingPane from './no_tracking_pane.vue';
import TimeTrackingEstimateOnlyPane from './estimate_only_pane.vue';
import TimeTrackingComparisonPane from './comparison_pane.vue';
......@@ -13,7 +13,7 @@ export default {
components: {
TimeTrackingCollapsedState,
TimeTrackingEstimateOnlyPane,
'time-tracking-spent-only-pane': timeTrackingSpentOnlyPane,
TimeTrackingSpentOnlyPane,
TimeTrackingNoTrackingPane,
TimeTrackingComparisonPane,
TimeTrackingHelpState,
......
......@@ -317,6 +317,7 @@
a {
color: $gl-text-color;
word-wrap: break-word;
word-break: break-word;
margin-right: 2px;
}
}
......@@ -462,6 +463,7 @@
.issuable-header-text {
padding-right: 35px;
word-break: break-word;
> strong {
font-weight: $gl-font-weight-bold;
......
......@@ -44,6 +44,12 @@
}
}
.note-text {
table {
font-family: $font-family-sans-serif;
}
}
table {
width: 100%;
font-family: $monospace_font;
......
......@@ -549,6 +549,7 @@
margin-bottom: 0;
border-bottom: 1px solid $white-dark;
padding: $gl-btn-padding 0;
min-height: 56px;
}
.multi-file-commit-panel-header-title {
......@@ -602,14 +603,14 @@
}
}
.multi-file-additions,
.multi-file-additions-solid {
fill: $green-500;
.multi-file-addition,
.multi-file-addition-solid {
color: $green-500;
}
.multi-file-modified,
.multi-file-modified-solid {
fill: $orange-500;
color: $orange-500;
}
.multi-file-commit-list-collapsed {
......@@ -673,6 +674,24 @@
}
}
.multi-file-commit-panel-bottom {
position: relative;
.multi-file-commit-panel-success-message {
position: absolute;
top: 1px;
left: 3px;
bottom: 0;
right: 0;
z-index: 10;
background: $gray-light;
overflow: auto;
display: flex;
flex-direction: column;
justify-content: center;
}
}
.dirty-diff {
// !important need to override monaco inline style
width: 4px !important;
......@@ -972,6 +991,12 @@
resize: none;
}
.ide-tree-changes {
display: flex;
align-items: center;
font-size: 12px;
}
.ide-new-modal-label {
line-height: 34px;
}
......@@ -59,11 +59,14 @@ module Ci
end
def truncate(offset = 0)
self.append("", offset) if offset < size
raise ArgumentError, 'Offset is out of range' if offset > size || offset < 0
return if offset == size # Skip the following process as it doesn't affect anything
self.append("", offset)
end
def append(new_data, offset)
raise ArgumentError, 'Offset is out of range' if offset > data.bytesize || offset < 0
raise ArgumentError, 'Offset is out of range' if offset > size || offset < 0
raise ArgumentError, 'Chunk size overflow' if CHUNK_SIZE < (offset + new_data.bytesize)
set_data(data.byteslice(0, offset) + new_data)
......@@ -130,7 +133,7 @@ module Ci
def schedule_to_db
return if db?
BuildTraceChunkFlushToDbWorker.perform_async(id)
Ci::BuildTraceChunkFlushWorker.perform_async(id)
end
def fullfilled?
......
class Identity < ActiveRecord::Base
def self.uniqueness_scope
:provider
end
include Sortable
include CaseSensitivity
belongs_to :user
validates :provider, presence: true
validates :extern_uid, allow_blank: true, uniqueness: { scope: :provider, case_sensitive: false }
validates :user_id, uniqueness: { scope: :provider }
validates :extern_uid, allow_blank: true, uniqueness: { scope: uniqueness_scope, case_sensitive: false }
validates :user_id, uniqueness: { scope: uniqueness_scope }
before_save :ensure_normalized_extern_uid, if: :extern_uid_changed?
after_destroy :clear_user_synced_attributes, if: :user_synced_attributes_metadata_from_provider?
......
......@@ -323,7 +323,7 @@ class MergeRequest < ActiveRecord::Base
# updates `merge_jid` with the MergeWorker#jid.
# This helps tracking enqueued and ongoing merge jobs.
def merge_async(user_id, params)
jid = MergeWorker.perform_async(id, user_id, params)
jid = MergeWorker.perform_async(id, user_id, params.to_h)
update_column(:merge_jid, jid)
end
......
......@@ -35,7 +35,7 @@
= link_to avatar_icon_for_user(@user, 400), target: '_blank', rel: 'noopener noreferrer' do
= image_tag avatar_icon_for_user(@user, 90), class: "avatar s90", alt: ''
.user-info
.user-info.prepend-left-default.append-right-default
.cover-title
= @user.name
......
......@@ -66,7 +66,7 @@
- pipeline_processing:pipeline_update
- pipeline_processing:stage_update
- pipeline_processing:update_head_pipeline_for_merge_request
- pipeline_processing:build_trace_chunk_flush_to_db
- pipeline_processing:ci_build_trace_chunk_flush
- repository_check:repository_check_clear
- repository_check:repository_check_single_repository
......
class BuildTraceChunkFlushToDbWorker
include ApplicationWorker
include PipelineQueue
queue_namespace :pipeline_processing
def perform(build_trace_chunk_id)
Ci::BuildTraceChunk.find_by(id: build_trace_chunk_id).try do |build_trace_chunk|
build_trace_chunk.use_database!
end
end
end
module Ci
class BuildTraceChunkFlushWorker
include ApplicationWorker
queue_namespace :pipeline_processing
def perform(build_trace_chunk_id)
::Ci::BuildTraceChunk.find_by(id: build_trace_chunk_id).try do |build_trace_chunk|
build_trace_chunk.use_database!
end
end
end
end
---
title: Add loading icon padding for pipeline environments
merge_request: 18631
author: George Tsiolis
type: fixed
---
title: Add padding to profile description
merge_request: 18663
author: George Tsiolis
type: changed
---
title: Break issue title for board card title and issuable header text
merge_request: 18674
author: George Tsiolis
type: changed
---
title: Output some useful information when running the rails console
merge_request: 18697
author:
type: added
---
title: Change font for tables inside diff discussions
merge_request: 18660
author: George Tsiolis
type: changed
---
title: Add documentation about how to use variables to define deploy policies for
staging/production environments
merge_request: 18675
author:
type: other
---
title: Improve interaction on WebIDE commit panel
merge_request:
author:
type: changed
---
title: Move TimeTrackingSpentOnlyPane vue component
merge_request: 18710
author: George Tsiolis
type: performance
---
title: Update environment item action buttons icons
merge_request: 18632
author: George Tsiolis
type: changed
---
title: Gitaly handles repository forks by default
merge_request:
author:
type: other
# rubocop:disable Rails/Output
if defined?(Rails::Console)
# note that this will not print out when using `spring`
justify = 15
puts "-------------------------------------------------------------------------------------"
puts " Gitlab:".ljust(justify) + "#{Gitlab::VERSION} (#{Gitlab::REVISION})"
puts " Gitlab Shell:".ljust(justify) + Gitlab::Shell.new.version
puts " #{Gitlab::Database.adapter_name}:".ljust(justify) + Gitlab::Database.version
puts "-------------------------------------------------------------------------------------"
end
......@@ -22,3 +22,16 @@ end.compact
Rails.application.config.action_dispatch.trusted_proxies = (
['127.0.0.1', '::1'] + gitlab_trusted_proxies)
# A monkey patch to make trusted proxies work with Rails 5.0.
# Inspired by https://github.com/rails/rails/issues/5223#issuecomment-263778719
# Remove this monkey patch when upstream is fixed.
if Gitlab.rails5?
module TrustedProxyMonkeyPatch
def ip
@ip ||= (get_header("action_dispatch.remote_ip") || super).to_s
end
end
ActionDispatch::Request.send(:include, TrustedProxyMonkeyPatch)
end
......@@ -495,6 +495,7 @@ also be customized, and you can easily use a [custom buildpack](#custom-buildpac
| `POSTGRES_PASSWORD` | The PostgreSQL password; defaults to `testing-password`. Set it to use a custom password. |
| `POSTGRES_DB` | The PostgreSQL database name; defaults to the value of [`$CI_ENVIRONMENT_SLUG`](../../ci/variables/README.md#predefined-variables-environment-variables). Set it to use a custom database name. |
| `BUILDPACK_URL` | The buildpack's full URL. It can point to either Git repositories or a tarball URL. For Git repositories, it is possible to point to a specific `ref`, for example `https://github.com/heroku/heroku-buildpack-ruby.git#v142` |
| `STAGING_ENABLED` | From GitLab 10.8, this variable can be used to define a [deploy policy for staging and production environments](#deploy-policy-for-staging-and-production-environments). |
TIP: **Tip:**
Set up the replica variables using a
......@@ -561,6 +562,22 @@ service:
internalPort: 5000
```
#### Deploy policy for staging and production environments
> [Introduced](https://gitlab.com/gitlab-org/gitlab-ci-yml/merge_requests/160)
in GitLab 10.8.
The normal behavior of Auto DevOps is to use Continuous Deployment, pushing
automatically to the `production` environment every time a new pipeline is run
on the default branch. However, there are cases where you might want to use a
staging environment and deploy to production manually. For this scenario, the
`STAGING_ENABLED` environment variable was introduced.
If `STAGING_ENABLED` is defined in your project (e.g., set `STAGING_ENABLED` to
`1` as a secret variable), then the application will be automatically deployed
to a `staging` environment, and a `production_manual` job will be created for
you when you're ready to manually deploy to production.
## Currently supported languages
NOTE: **Note:**
......
......@@ -5,10 +5,6 @@ module SharedGroup
is_member_of(current_user.name, "Owned", Gitlab::Access::DEVELOPER)
end
step '"John Doe" is owner of group "Owned"' do
is_member_of("John Doe", "Owned", Gitlab::Access::OWNER)
end
step '"John Doe" is guest of group "Guest"' do
is_member_of("John Doe", "Guest", Gitlab::Access::GUEST)
end
......
......@@ -48,10 +48,6 @@ module SharedPaths
visit group_group_members_path(Group.find_by(name: "Owned"))
end
step 'I visit group "Owned" settings page' do
visit edit_group_path(Group.find_by(name: "Owned"))
end
step 'I visit group "Owned" projects page' do
visit projects_group_path(Group.find_by(name: "Owned"))
end
......
......@@ -149,11 +149,11 @@ module API
expose_url(api_v4_projects_path(id: project.id))
end
expose :issues, if: -> (*args) { issues_available?(*args) } do |project|
expose :issues, if: -> (project, options) { issues_available?(project, options) } do |project|
expose_url(api_v4_projects_issues_path(id: project.id))
end
expose :merge_requests, if: -> (*args) { mrs_available?(*args) } do |project|
expose :merge_requests, if: -> (project, options) { mrs_available?(project, options) } do |project|
expose_url(api_v4_projects_merge_requests_path(id: project.id))
end
......
......@@ -63,7 +63,8 @@ module Gitlab
end
def fork_repository(new_shard_name, new_repository_relative_path)
Gitlab::GitalyClient.migrate(:fork_repository) do |is_enabled|
Gitlab::GitalyClient.migrate(:fork_repository,
status: Gitlab::GitalyClient::MigrationStatus::OPT_OUT) do |is_enabled|
if is_enabled
gitaly_fork_repository(new_shard_name, new_repository_relative_path)
else
......
......@@ -161,6 +161,7 @@ feature 'Issues > User uses quick actions', :js do
before do
target_project.add_master(user)
gitlab_sign_out
sign_in(user)
visit project_issue_path(project, issue)
end
......@@ -220,6 +221,7 @@ feature 'Issues > User uses quick actions', :js do
before do
target_project.add_master(user)
gitlab_sign_out
sign_in(user)
visit project_issue_path(project, issue)
end
......
......@@ -24,42 +24,10 @@ describe('IDE commit panel empty state', () => {
resetStore(vm.$store);
});
describe('statusSvg', () => {
it('uses noChangesStateSvgPath when commit message is empty', () => {
expect(vm.statusSvg).toBe('no-changes');
expect(vm.$el.querySelector('img').getAttribute('src')).toBe(
'no-changes',
);
});
it('uses committedStateSvgPath when commit message exists', done => {
vm.$store.state.lastCommitMsg = 'testing';
Vue.nextTick(() => {
expect(vm.statusSvg).toBe('committed-state');
expect(vm.$el.querySelector('img').getAttribute('src')).toBe(
'committed-state',
);
done();
});
});
});
it('renders no changes text when last commit message is empty', () => {
expect(vm.$el.textContent).toContain('No changes');
});
it('renders last commit message when it exists', done => {
vm.$store.state.lastCommitMsg = 'testing commit message';
Vue.nextTick(() => {
expect(vm.$el.textContent).toContain('testing commit message');
done();
});
});
describe('toggle button', () => {
it('calls store action', () => {
spyOn(vm, 'toggleRightPanelCollapsed');
......
import Vue from 'vue';
import store from '~/ide/stores';
import successMessage from '~/ide/components/commit_sidebar/success_message.vue';
import { createComponentWithStore } from '../../../helpers/vue_mount_component_helper';
import { resetStore } from '../../helpers';
describe('IDE commit panel successful commit state', () => {
let vm;
beforeEach(() => {
const Component = Vue.extend(successMessage);
vm = createComponentWithStore(Component, store, {
committedStateSvgPath: 'committed-state',
});
vm.$mount();
});
afterEach(() => {
vm.$destroy();
resetStore(vm.$store);
});
it('renders last commit message when it exists', done => {
vm.$store.state.lastCommitMsg = 'testing commit message';
Vue.nextTick(() => {
expect(vm.$el.textContent).toContain('testing commit message');
done();
});
});
});
......@@ -48,6 +48,33 @@ describe('RepoFile', () => {
});
});
describe('folder', () => {
it('renders changes count inside folder', () => {
const f = {
...file('folder'),
path: 'testing',
type: 'tree',
branchId: 'master',
projectId: 'project',
};
store.state.changedFiles.push({
...file('fileName'),
path: 'testing/fileName',
});
createComponent({
file: f,
level: 0,
});
const treeChangesEl = vm.$el.querySelector('.ide-tree-changes');
expect(treeChangesEl).not.toBeNull();
expect(treeChangesEl.textContent).toContain('1');
});
});
describe('locked file', () => {
let f;
......@@ -72,8 +99,7 @@ describe('RepoFile', () => {
it('renders a tooltip', () => {
expect(
vm.$el.querySelector('.ide-file-name span:nth-child(2)').dataset
.originalTitle,
vm.$el.querySelector('.ide-file-name span:nth-child(2)').dataset.originalTitle,
).toContain('Locked by testuser');
});
});
......
......@@ -398,6 +398,20 @@ describe('IDE store file actions', () => {
})
.catch(done.fail);
});
it('bursts unused seal', done => {
store
.dispatch('changeFileContent', {
path: tmpFile.path,
content: 'content',
})
.then(() => {
expect(store.state.unusedSeal).toBe(false);
done();
})
.catch(done.fail);
});
});
describe('discardFileChanges', () => {
......
......@@ -84,4 +84,67 @@ describe('IDE store getters', () => {
expect(getters.allBlobs(localState)[0].name).toBe('blob');
});
});
describe('getChangesInFolder', () => {
it('returns length of changed files for a path', () => {
localState.changedFiles.push(
{
path: 'test/index',
name: 'index',
},
{
path: 'app/123',
name: '123',
},
);
expect(getters.getChangesInFolder(localState)('test')).toBe(1);
});
it('returns length of changed & staged files for a path', () => {
localState.changedFiles.push(
{
path: 'test/index',
name: 'index',
},
{
path: 'testing/123',
name: '123',
},
);
localState.stagedFiles.push(
{
path: 'test/123',
name: '123',
},
{
path: 'test/index',
name: 'index',
},
{
path: 'testing/12345',
name: '12345',
},
);
expect(getters.getChangesInFolder(localState)('test')).toBe(2);
});
it('returns length of changed & tempFiles files for a path', () => {
localState.changedFiles.push(
{
path: 'test/index',
name: 'index',
},
{
path: 'test/newfile',
name: 'newfile',
tempFile: true,
},
);
expect(getters.getChangesInFolder(localState)('test')).toBe(2);
});
});
});
......@@ -116,4 +116,14 @@ describe('Multi-file store mutations', () => {
expect(localState.fileFindVisible).toBe(true);
});
});
describe('BURST_UNUSED_SEAL', () => {
it('updates unusedSeal', () => {
expect(localState.unusedSeal).toBe(true);
mutations.BURST_UNUSED_SEAL(localState);
expect(localState.unusedSeal).toBe(false);
});
});
});
......@@ -75,6 +75,14 @@ describe('text_utility', () => {
'This is a text with html .',
);
});
it('passes through with null string input', () => {
expect(textUtils.stripHtml(null, ' ')).toEqual(null);
});
it('passes through with undefined string input', () => {
expect(textUtils.stripHtml(undefined, ' ')).toEqual(undefined);
});
});
describe('convertToCamelCase', () => {
......
......@@ -170,5 +170,19 @@ describe('Participants', function () {
expect(vm.isShowingMoreParticipants).toBe(true);
});
it('clicking on participants icon emits `toggleSidebar` event', () => {
vm = mountComponent(Participants, {
loading: false,
participants: PARTICIPANT_LIST,
numberOfLessParticipants: 2,
});
spyOn(vm, '$emit');
const participantsIconEl = vm.$el.querySelector('.sidebar-collapsed-icon');
participantsIconEl.click();
expect(vm.$emit).toHaveBeenCalledWith('toggleSidebar');
});
});
});
......@@ -3,7 +3,6 @@ import sidebarSubscriptions from '~/sidebar/components/subscriptions/sidebar_sub
import SidebarMediator from '~/sidebar/sidebar_mediator';
import SidebarService from '~/sidebar/services/sidebar_service';
import SidebarStore from '~/sidebar/stores/sidebar_store';
import eventHub from '~/sidebar/event_hub';
import mountComponent from 'spec/helpers/vue_mount_component_helper';
import Mock from './mock_data';
......@@ -32,7 +31,7 @@ describe('Sidebar Subscriptions', function () {
mediator,
});
eventHub.$emit('toggleSubscription');
vm.onToggleSubscription();
expect(mediator.toggleSubscription).toHaveBeenCalled();
});
......
import Vue from 'vue';
import subscriptions from '~/sidebar/components/subscriptions/subscriptions.vue';
import eventHub from '~/sidebar/event_hub';
import mountComponent from 'spec/helpers/vue_mount_component_helper';
describe('Subscriptions', function () {
......@@ -39,4 +40,22 @@ describe('Subscriptions', function () {
expect(vm.$refs.toggleButton.$el.querySelector('.project-feature-toggle')).toHaveClass('is-checked');
});
it('toggleSubscription method emits `toggleSubscription` event on eventHub and Component', () => {
vm = mountComponent(Subscriptions, { subscribed: true });
spyOn(eventHub, '$emit');
spyOn(vm, '$emit');
vm.toggleSubscription();
expect(eventHub.$emit).toHaveBeenCalledWith('toggleSubscription', jasmine.any(Object));
expect(vm.$emit).toHaveBeenCalledWith('toggleSubscription', jasmine.any(Object));
});
it('onClickCollapsedIcon method emits `toggleSidebar` event on component', () => {
vm = mountComponent(Subscriptions, { subscribed: true });
spyOn(vm, '$emit');
vm.onClickCollapsedIcon();
expect(vm.$emit).toHaveBeenCalledWith('toggleSidebar');
});
});
......@@ -4,678 +4,24 @@ describe Gitlab::Ci::Trace, :clean_gitlab_redis_cache do
let(:build) { create(:ci_build) }
let(:trace) { described_class.new(build) }
before do
stub_feature_flags(ci_enable_live_trace: true)
end
describe "associations" do
it { expect(trace).to respond_to(:job) }
it { expect(trace).to delegate_method(:old_trace).to(:job) }
end
describe '#html' do
before do
trace.set("12\n34")
end
it "returns formatted html" do
expect(trace.html).to eq("12<br>34")
end
it "returns last line of formatted html" do
expect(trace.html(last_lines: 1)).to eq("34")
end
end
describe '#raw' do
before do
trace.set("12\n34")
end
it "returns raw output" do
expect(trace.raw).to eq("12\n34")
end
it "returns last line of raw output" do
expect(trace.raw(last_lines: 1)).to eq("34")
end
end
describe '#extract_coverage' do
let(:regex) { '\(\d+.\d+\%\) covered' }
context 'matching coverage' do
before do
trace.set('Coverage 1033 / 1051 LOC (98.29%) covered')
end
it "returns valid coverage" do
expect(trace.extract_coverage(regex)).to eq("98.29")
end
end
context 'no coverage' do
before do
trace.set('No coverage')
end
it 'returs nil' do
expect(trace.extract_coverage(regex)).to be_nil
end
end
end
describe '#extract_sections' do
let(:log) { 'No sections' }
let(:sections) { trace.extract_sections }
before do
trace.set(log)
end
context 'no sections' do
it 'returs []' do
expect(trace.extract_sections).to eq([])
end
end
context 'multiple sections available' do
let(:log) { File.read(expand_fixture_path('trace/trace_with_sections')) }
let(:sections_data) do
[
{ name: 'prepare_script', lines: 2, duration: 3.seconds },
{ name: 'get_sources', lines: 4, duration: 1.second },
{ name: 'restore_cache', lines: 0, duration: 0.seconds },
{ name: 'download_artifacts', lines: 0, duration: 0.seconds },
{ name: 'build_script', lines: 2, duration: 1.second },
{ name: 'after_script', lines: 0, duration: 0.seconds },
{ name: 'archive_cache', lines: 0, duration: 0.seconds },
{ name: 'upload_artifacts', lines: 0, duration: 0.seconds }
]
end
it "returns valid sections" do
expect(sections).not_to be_empty
expect(sections.size).to eq(sections_data.size),
"expected #{sections_data.size} sections, got #{sections.size}"
buff = StringIO.new(log)
sections.each_with_index do |s, i|
expected = sections_data[i]
expect(s[:name]).to eq(expected[:name])
expect(s[:date_end] - s[:date_start]).to eq(expected[:duration])
buff.seek(s[:byte_start], IO::SEEK_SET)
length = s[:byte_end] - s[:byte_start]
lines = buff.read(length).count("\n")
expect(lines).to eq(expected[:lines])
end
end
end
context 'logs contains "section_start"' do
let(:log) { "section_start:1506417476:a_section\r\033[0Klooks like a section_start:invalid\nsection_end:1506417477:a_section\r\033[0K"}
it "returns only one section" do
expect(sections).not_to be_empty
expect(sections.size).to eq(1)
section = sections[0]
expect(section[:name]).to eq('a_section')
expect(section[:byte_start]).not_to eq(section[:byte_end]), "got an empty section"
end
end
context 'missing section_end' do
let(:log) { "section_start:1506417476:a_section\r\033[0KSome logs\nNo section_end\n"}
it "returns no sections" do
expect(sections).to be_empty
end
end
context 'missing section_start' do
let(:log) { "Some logs\nNo section_start\nsection_end:1506417476:a_section\r\033[0K"}
it "returns no sections" do
expect(sections).to be_empty
end
end
context 'inverted section_start section_end' do
let(:log) { "section_end:1506417476:a_section\r\033[0Klooks like a section_start:invalid\nsection_start:1506417477:a_section\r\033[0K"}
it "returns no sections" do
expect(sections).to be_empty
end
end
end
describe '#set' do
before do
trace.set("12")
end
it "returns trace" do
expect(trace.raw).to eq("12")
end
context 'overwrite trace' do
before do
trace.set("34")
end
it "returns new trace" do
expect(trace.raw).to eq("34")
end
end
context 'runners token' do
let(:token) { 'my_secret_token' }
before do
build.project.update(runners_token: token)
trace.set(token)
end
it "hides token" do
expect(trace.raw).not_to include(token)
end
end
context 'hides build token' do
let(:token) { 'my_secret_token' }
before do
build.update(token: token)
trace.set(token)
end
it "hides token" do
expect(trace.raw).not_to include(token)
end
end
end
describe '#append' do
before do
trace.set("1234")
end
it "returns correct trace" do
expect(trace.append("56", 4)).to eq(6)
expect(trace.raw).to eq("123456")
end
context 'tries to append trace at different offset' do
it "fails with append" do
expect(trace.append("56", 2)).to eq(-4)
expect(trace.raw).to eq("1234")
end
end
context 'runners token' do
let(:token) { 'my_secret_token' }
before do
build.project.update(runners_token: token)
trace.append(token, 0)
end
it "hides token" do
expect(trace.raw).not_to include(token)
end
end
context 'build token' do
let(:token) { 'my_secret_token' }
before do
build.update(token: token)
trace.append(token, 0)
end
it "hides token" do
expect(trace.raw).not_to include(token)
end
end
end
describe '#read' do
shared_examples 'read successfully with IO' do
it 'yields with source' do
trace.read do |stream|
expect(stream).to be_a(Gitlab::Ci::Trace::Stream)
expect(stream.stream).to be_a(IO)
end
end
end
shared_examples 'read successfully with StringIO' do
it 'yields with source' do
trace.read do |stream|
expect(stream).to be_a(Gitlab::Ci::Trace::Stream)
expect(stream.stream).to be_a(StringIO)
end
end
end
shared_examples 'read successfully with ChunkedIO' do
it 'yields with source' do
trace.read do |stream|
expect(stream).to be_a(Gitlab::Ci::Trace::Stream)
expect(stream.stream).to be_a(Gitlab::Ci::Trace::ChunkedIO)
end
end
end
shared_examples 'failed to read' do
it 'yields without source' do
trace.read do |stream|
expect(stream).to be_a(Gitlab::Ci::Trace::Stream)
expect(stream.stream).to be_nil
end
end
end
context 'when trace artifact exists' do
before do
create(:ci_job_artifact, :trace, job: build)
end
it_behaves_like 'read successfully with IO'
end
context 'when current_path (with project_id) exists' do
before do
expect(trace).to receive(:default_path) { expand_fixture_path('trace/sample_trace') }
end
it_behaves_like 'read successfully with IO'
end
context 'when current_path (with project_ci_id) exists' do
before do
expect(trace).to receive(:deprecated_path) { expand_fixture_path('trace/sample_trace') }
end
it_behaves_like 'read successfully with IO'
end
context 'when db trace exists' do
before do
build.send(:write_attribute, :trace, "data")
end
it_behaves_like 'read successfully with StringIO'
end
context 'when live trace exists' do
before do
Gitlab::Ci::Trace::ChunkedIO.new(build) do |stream|
stream.write('abc')
end
end
it_behaves_like 'read successfully with ChunkedIO'
end
context 'when no sources exist' do
it_behaves_like 'failed to read'
end
end
describe 'trace handling' do
subject { trace.exist? }
context 'trace does not exist' do
it { expect(trace.exist?).to be(false) }
end
context 'when trace artifact exists' do
before do
create(:ci_job_artifact, :trace, job: build)
end
it { is_expected.to be_truthy }
context 'when the trace artifact has been erased' do
before do
trace.erase!
end
it { is_expected.to be_falsy }
it 'removes associations' do
expect(Ci::JobArtifact.exists?(job_id: build.id, file_type: :trace)).to be_falsy
end
end
end
context 'new trace path is used' do
before do
trace.send(:ensure_directory)
File.open(trace.send(:default_path), "w") do |file|
file.write("data")
end
end
it "trace exist" do
expect(trace.exist?).to be(true)
end
it "can be erased" do
trace.erase!
expect(trace.exist?).to be(false)
end
end
context 'deprecated path' do
let(:path) { trace.send(:deprecated_path) }
context 'with valid ci_id' do
before do
build.project.update(ci_id: 1000)
FileUtils.mkdir_p(File.dirname(path))
File.open(path, "w") do |file|
file.write("data")
end
end
it "trace exist" do
expect(trace.exist?).to be(true)
end
it "can be erased" do
trace.erase!
expect(trace.exist?).to be(false)
end
end
context 'without valid ci_id' do
it "does not return deprecated path" do
expect(path).to be_nil
end
end
end
context 'stored in database' do
before do
build.send(:write_attribute, :trace, "data")
end
it "trace exist" do
expect(trace.exist?).to be(true)
end
it "can be erased" do
trace.erase!
expect(trace.exist?).to be(false)
end
it "returns database data" do
expect(trace.raw).to eq("data")
end
end
context 'stored in database' do
before do
Gitlab::Ci::Trace::ChunkedIO.new(build) do |stream|
stream.write('abc')
end
end
it "trace exist" do
expect(trace.exist?).to be(true)
end
it "can be erased" do
trace.erase!
expect(trace.exist?).to be(false)
expect(Ci::BuildTraceChunk.where(build: build)).not_to be_exist
end
it "returns live trace data" do
expect(trace.raw).to eq("abc")
end
end
end
describe '#archive!' do
subject { trace.archive! }
before do
stub_feature_flags(ci_enable_live_trace: false)
end
shared_examples 'archive trace file' do
it do
expect { subject }.to change { Ci::JobArtifact.count }.by(1)
build.reload
expect(build.trace.exist?).to be_truthy
expect(build.job_artifacts_trace.file.exists?).to be_truthy
expect(build.job_artifacts_trace.file.filename).to eq('job.log')
expect(File.exist?(src_path)).to be_falsy
expect(src_checksum)
.to eq(Digest::SHA256.file(build.job_artifacts_trace.file.path).hexdigest)
expect(build.job_artifacts_trace.file_sha256).to eq(src_checksum)
end
end
shared_examples 'source trace file stays intact' do |error:|
it do
expect { subject }.to raise_error(error)
build.reload
expect(build.trace.exist?).to be_truthy
expect(build.job_artifacts_trace).to be_nil
expect(File.exist?(src_path)).to be_truthy
end
end
shared_examples 'archive trace in database' do
it do
expect { subject }.to change { Ci::JobArtifact.count }.by(1)
build.reload
expect(build.trace.exist?).to be_truthy
expect(build.job_artifacts_trace.file.exists?).to be_truthy
expect(build.job_artifacts_trace.file.filename).to eq('job.log')
expect(build.old_trace).to be_nil
expect(src_checksum)
.to eq(Digest::SHA256.file(build.job_artifacts_trace.file.path).hexdigest)
expect(build.job_artifacts_trace.file_sha256).to eq(src_checksum)
end
end
shared_examples 'source trace in database stays intact' do |error:|
it do
expect { subject }.to raise_error(error)
build.reload
expect(build.trace.exist?).to be_truthy
expect(build.job_artifacts_trace).to be_nil
expect(build.old_trace).to eq(trace_content)
end
end
shared_examples 'archive trace file in ChunkedIO' do
it do
expect { subject }.to change { Ci::JobArtifact.count }.by(1)
build.reload
expect(build.trace.exist?).to be_truthy
expect(build.job_artifacts_trace.file.exists?).to be_truthy
expect(build.job_artifacts_trace.file.filename).to eq('job.log')
expect(Ci::BuildTraceChunk.where(build: build)).not_to be_exist
expect(src_checksum)
.to eq(Digest::SHA256.file(build.job_artifacts_trace.file.path).hexdigest)
expect(build.job_artifacts_trace.file_sha256).to eq(src_checksum)
end
end
shared_examples 'source trace in ChunkedIO stays intact' do |error:|
it do
expect { subject }.to raise_error(error)
build.reload
expect(build.trace.exist?).to be_truthy
expect(build.job_artifacts_trace).to be_nil
Gitlab::Ci::Trace::ChunkedIO.new(build) do |stream|
expect(stream.read).to eq(trace_raw)
end
end
end
context 'when job does not have trace artifact' do
context 'when trace file stored in default path' do
let(:build) { create(:ci_build, :success, :trace_live) }
let(:src_path) { trace.read { |s| s.path } }
let(:src_checksum) { Digest::SHA256.file(src_path).hexdigest }
before do
stub_feature_flags(ci_enable_live_trace: false)
build # Initialize after set feature flag
src_path
src_checksum
end
it_behaves_like 'archive trace file'
context 'when failed to create clone file' do
before do
allow(IO).to receive(:copy_stream).and_return(0)
end
it_behaves_like 'source trace file stays intact', error: Gitlab::Ci::Trace::ArchiveError
end
context 'when failed to create job artifact record' do
before do
allow_any_instance_of(Ci::JobArtifact).to receive(:save).and_return(false)
allow_any_instance_of(Ci::JobArtifact).to receive_message_chain(:errors, :full_messages)
.and_return(%w[Error Error])
end
it_behaves_like 'source trace file stays intact', error: ActiveRecord::RecordInvalid
end
end
context 'when trace is stored in database' do
let(:build) { create(:ci_build, :success) }
let(:trace_content) { 'Sample trace' }
let(:src_checksum) { Digest::SHA256.hexdigest(trace_content) }
context 'when live trace feature is disabled' do
before do
stub_feature_flags(ci_enable_live_trace: false)
build # Initialize after set feature flag
trace_content
src_checksum
build.update_column(:trace, trace_content)
end
it_behaves_like 'archive trace in database'
context 'when failed to create clone file' do
before do
allow(IO).to receive(:copy_stream).and_return(0)
it_behaves_like 'trace with disabled live trace feature'
end
it_behaves_like 'source trace in database stays intact', error: Gitlab::Ci::Trace::ArchiveError
end
context 'when failed to create job artifact record' do
before do
allow_any_instance_of(Ci::JobArtifact).to receive(:save).and_return(false)
allow_any_instance_of(Ci::JobArtifact).to receive_message_chain(:errors, :full_messages)
.and_return(%w[Error Error])
end
it_behaves_like 'source trace in database stays intact', error: ActiveRecord::RecordInvalid
end
context 'when there is a validation error on Ci::Build' do
before do
allow_any_instance_of(Ci::Build).to receive(:save).and_return(false)
allow_any_instance_of(Ci::Build).to receive_message_chain(:errors, :full_messages)
.and_return(%w[Error Error])
end
context "when erase old trace with 'save'" do
before do
build.send(:write_attribute, :trace, nil)
build.save
end
it 'old trace is not deleted' do
build.reload
expect(build.trace.raw).to eq(trace_content)
end
end
it_behaves_like 'archive trace in database'
end
end
context 'when trace is stored in ChunkedIO' do
let(:build) { create(:ci_build, :success, :trace_live) }
let(:trace_raw) { build.trace.raw }
let(:src_checksum) { Digest::SHA256.hexdigest(trace_raw) }
context 'when live trace feature is enabled' do
before do
stub_feature_flags(ci_enable_live_trace: true)
build # Initialize after set feature flag
trace_raw
src_checksum
end
it_behaves_like 'archive trace file in ChunkedIO'
context 'when failed to create clone file' do
before do
allow(IO).to receive(:copy_stream).and_return(0)
end
it_behaves_like 'source trace in ChunkedIO stays intact', error: Gitlab::Ci::Trace::ArchiveError
end
context 'when failed to create job artifact record' do
before do
allow_any_instance_of(Ci::JobArtifact).to receive(:save).and_return(false)
allow_any_instance_of(Ci::JobArtifact).to receive_message_chain(:errors, :full_messages)
.and_return(%w[Error Error])
end
it_behaves_like 'source trace in ChunkedIO stays intact', error: ActiveRecord::RecordInvalid
end
end
end
context 'when job has trace artifact' do
before do
create(:ci_job_artifact, :trace, job: build)
end
it 'does not archive' do
expect_any_instance_of(described_class).not_to receive(:archive_stream!)
expect { subject }.to raise_error('Already archived')
expect(build.job_artifacts_trace.file.exists?).to be_truthy
end
end
context 'when job is not finished yet' do
let!(:build) { create(:ci_build, :running, :trace_live) }
it 'does not archive' do
expect_any_instance_of(described_class).not_to receive(:archive_stream!)
expect { subject }.to raise_error('Job is not finished yet')
expect(build.trace.exist?).to be_truthy
end
end
it_behaves_like 'trace with enabled live trace feature'
end
end
......@@ -75,7 +75,7 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
let(:value) { 'a' * described_class::CHUNK_SIZE }
it 'schedules stashing data' do
expect(BuildTraceChunkFlushToDbWorker).to receive(:perform_async).once
expect(Ci::BuildTraceChunkFlushWorker).to receive(:perform_async).once
subject
end
......@@ -112,7 +112,7 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
context 'when fullfilled chunk size' do
it 'does not schedule stashing data' do
expect(BuildTraceChunkFlushToDbWorker).not_to receive(:perform_async)
expect(Ci::BuildTraceChunkFlushWorker).not_to receive(:perform_async)
subject
end
......@@ -141,11 +141,7 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
context 'when offset is bigger than data size' do
let(:offset) { data.bytesize + 1 }
it do
expect_any_instance_of(described_class).not_to receive(:append) { }
subject
end
it { expect { subject }.to raise_error('Offset is out of range') }
end
context 'when offset is 10' do
......
......@@ -1213,7 +1213,7 @@ describe MergeRequest do
it 'enqueues MergeWorker job and updates merge_jid' do
merge_request = create(:merge_request)
user_id = double(:user_id)
params = double(:params)
params = {}
merge_jid = 'hash-123'
expect(MergeWorker).to receive(:perform_async).with(merge_request.id, user_id, params) do
......
......@@ -2,7 +2,7 @@ require 'spec_helper'
describe API::Runner, :clean_gitlab_redis_shared_state do
include StubGitlabCalls
include ChunkedIOHelpers
include RedisHelpers
let(:registration_token) { 'abcdefg123456' }
......@@ -867,29 +867,51 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
end
end
context 'when redis had an outage' do
it "recovers" do
# GitLab-Runner patchs
context 'when trace is patched' do
before do
patch_the_trace
end
it 'has valid trace' do
expect(response.status).to eq(202)
expect(job.reload.trace.raw).to eq 'BUILD TRACE appended appended'
end
context 'when redis data are flushed' do
before do
redis_shared_state_cleanup!
end
# GitLab-Rails enxounters an outage on Redis
redis_shared_state_outage!
it 'has empty trace' do
expect(job.reload.trace.raw).to eq ''
end
# GitLab-Runner patchs
context 'when we perform partial patch' do
before do
patch_the_trace('hello', headers.merge({ 'Content-Range' => "28-32" }))
expect(response.status).to eq 202
end
it 'returns an error' do
expect(response.status).to eq(202)
expect(response.header).to have_key 'Range'
expect(response.header['Range']).to eq '0-0'
expect(job.reload.trace.raw).to eq ''
end
end
# GitLab-Runner re-patchs
context 'when we resend full trace' do
before do
patch_the_trace('BUILD TRACE appended appended hello')
end
it 'succeeds with updating trace' do
expect(response.status).to eq(202)
expect(job.reload.trace.raw).to eq 'BUILD TRACE appended appended hello'
end
end
end
end
end
context 'when Runner makes a force-patch' do
before do
......
......@@ -86,6 +86,7 @@ RSpec.configure do |config|
config.include WaitForRequests, :js
config.include LiveDebugger, :js
config.include MigrationsHelpers, :migration
config.include RedisHelpers
if ENV['CI']
# This includes the first try, i.e. tests will be run 4 times before failing.
......@@ -146,21 +147,27 @@ RSpec.configure do |config|
end
config.around(:each, :clean_gitlab_redis_cache) do |example|
Gitlab::Redis::Cache.with(&:flushall)
redis_cache_cleanup!
example.run
Gitlab::Redis::Cache.with(&:flushall)
redis_cache_cleanup!
end
config.around(:each, :clean_gitlab_redis_shared_state) do |example|
Gitlab::Redis::SharedState.with(&:flushall)
Sidekiq.redis(&:flushall)
redis_shared_state_cleanup!
example.run
Gitlab::Redis::SharedState.with(&:flushall)
Sidekiq.redis(&:flushall)
redis_shared_state_cleanup!
end
config.around(:each, :clean_gitlab_redis_queues) do |example|
redis_queues_cleanup!
example.run
redis_queues_cleanup!
end
# The :each scope runs "inside" the example, so this hook ensures the DB is in the
......
......@@ -8,9 +8,4 @@ module ChunkedIOHelpers
stub_const('Ci::BuildTraceChunk::CHUNK_SIZE', size)
stub_const('Gitlab::Ci::Trace::ChunkedIO::CHUNK_SIZE', size)
end
def redis_shared_state_outage!
Gitlab::Redis::SharedState.with(&:flushall)
Sidekiq.redis(&:flushall)
end
end
module RedisHelpers
# config/README.md
# Usage: performance enhancement
def redis_cache_cleanup!
Gitlab::Redis::Cache.with(&:flushall)
end
# Usage: SideKiq, Mailroom, CI Runner, Workhorse, push services
def redis_queues_cleanup!
Gitlab::Redis::Queues.with(&:flushall)
end
# Usage: session state, rate limiting
def redis_shared_state_cleanup!
Gitlab::Redis::SharedState.with(&:flushall)
end
end
shared_examples_for 'common trace features' do
describe '#html' do
before do
trace.set("12\n34")
end
it "returns formatted html" do
expect(trace.html).to eq("12<br>34")
end
it "returns last line of formatted html" do
expect(trace.html(last_lines: 1)).to eq("34")
end
end
describe '#raw' do
before do
trace.set("12\n34")
end
it "returns raw output" do
expect(trace.raw).to eq("12\n34")
end
it "returns last line of raw output" do
expect(trace.raw(last_lines: 1)).to eq("34")
end
end
describe '#extract_coverage' do
let(:regex) { '\(\d+.\d+\%\) covered' }
context 'matching coverage' do
before do
trace.set('Coverage 1033 / 1051 LOC (98.29%) covered')
end
it "returns valid coverage" do
expect(trace.extract_coverage(regex)).to eq("98.29")
end
end
context 'no coverage' do
before do
trace.set('No coverage')
end
it 'returs nil' do
expect(trace.extract_coverage(regex)).to be_nil
end
end
end
describe '#extract_sections' do
let(:log) { 'No sections' }
let(:sections) { trace.extract_sections }
before do
trace.set(log)
end
context 'no sections' do
it 'returs []' do
expect(trace.extract_sections).to eq([])
end
end
context 'multiple sections available' do
let(:log) { File.read(expand_fixture_path('trace/trace_with_sections')) }
let(:sections_data) do
[
{ name: 'prepare_script', lines: 2, duration: 3.seconds },
{ name: 'get_sources', lines: 4, duration: 1.second },
{ name: 'restore_cache', lines: 0, duration: 0.seconds },
{ name: 'download_artifacts', lines: 0, duration: 0.seconds },
{ name: 'build_script', lines: 2, duration: 1.second },
{ name: 'after_script', lines: 0, duration: 0.seconds },
{ name: 'archive_cache', lines: 0, duration: 0.seconds },
{ name: 'upload_artifacts', lines: 0, duration: 0.seconds }
]
end
it "returns valid sections" do
expect(sections).not_to be_empty
expect(sections.size).to eq(sections_data.size),
"expected #{sections_data.size} sections, got #{sections.size}"
buff = StringIO.new(log)
sections.each_with_index do |s, i|
expected = sections_data[i]
expect(s[:name]).to eq(expected[:name])
expect(s[:date_end] - s[:date_start]).to eq(expected[:duration])
buff.seek(s[:byte_start], IO::SEEK_SET)
length = s[:byte_end] - s[:byte_start]
lines = buff.read(length).count("\n")
expect(lines).to eq(expected[:lines])
end
end
end
context 'logs contains "section_start"' do
let(:log) { "section_start:1506417476:a_section\r\033[0Klooks like a section_start:invalid\nsection_end:1506417477:a_section\r\033[0K"}
it "returns only one section" do
expect(sections).not_to be_empty
expect(sections.size).to eq(1)
section = sections[0]
expect(section[:name]).to eq('a_section')
expect(section[:byte_start]).not_to eq(section[:byte_end]), "got an empty section"
end
end
context 'missing section_end' do
let(:log) { "section_start:1506417476:a_section\r\033[0KSome logs\nNo section_end\n"}
it "returns no sections" do
expect(sections).to be_empty
end
end
context 'missing section_start' do
let(:log) { "Some logs\nNo section_start\nsection_end:1506417476:a_section\r\033[0K"}
it "returns no sections" do
expect(sections).to be_empty
end
end
context 'inverted section_start section_end' do
let(:log) { "section_end:1506417476:a_section\r\033[0Klooks like a section_start:invalid\nsection_start:1506417477:a_section\r\033[0K"}
it "returns no sections" do
expect(sections).to be_empty
end
end
end
describe '#set' do
before do
trace.set("12")
end
it "returns trace" do
expect(trace.raw).to eq("12")
end
context 'overwrite trace' do
before do
trace.set("34")
end
it "returns new trace" do
expect(trace.raw).to eq("34")
end
end
context 'runners token' do
let(:token) { 'my_secret_token' }
before do
build.project.update(runners_token: token)
trace.set(token)
end
it "hides token" do
expect(trace.raw).not_to include(token)
end
end
context 'hides build token' do
let(:token) { 'my_secret_token' }
before do
build.update(token: token)
trace.set(token)
end
it "hides token" do
expect(trace.raw).not_to include(token)
end
end
end
describe '#append' do
before do
trace.set("1234")
end
it "returns correct trace" do
expect(trace.append("56", 4)).to eq(6)
expect(trace.raw).to eq("123456")
end
context 'tries to append trace at different offset' do
it "fails with append" do
expect(trace.append("56", 2)).to eq(-4)
expect(trace.raw).to eq("1234")
end
end
context 'runners token' do
let(:token) { 'my_secret_token' }
before do
build.project.update(runners_token: token)
trace.append(token, 0)
end
it "hides token" do
expect(trace.raw).not_to include(token)
end
end
context 'build token' do
let(:token) { 'my_secret_token' }
before do
build.update(token: token)
trace.append(token, 0)
end
it "hides token" do
expect(trace.raw).not_to include(token)
end
end
end
end
shared_examples_for 'trace with disabled live trace feature' do
it_behaves_like 'common trace features'
describe '#read' do
shared_examples 'read successfully with IO' do
it 'yields with source' do
trace.read do |stream|
expect(stream).to be_a(Gitlab::Ci::Trace::Stream)
expect(stream.stream).to be_a(IO)
end
end
end
shared_examples 'read successfully with StringIO' do
it 'yields with source' do
trace.read do |stream|
expect(stream).to be_a(Gitlab::Ci::Trace::Stream)
expect(stream.stream).to be_a(StringIO)
end
end
end
shared_examples 'failed to read' do
it 'yields without source' do
trace.read do |stream|
expect(stream).to be_a(Gitlab::Ci::Trace::Stream)
expect(stream.stream).to be_nil
end
end
end
context 'when trace artifact exists' do
before do
create(:ci_job_artifact, :trace, job: build)
end
it_behaves_like 'read successfully with IO'
end
context 'when current_path (with project_id) exists' do
before do
expect(trace).to receive(:default_path) { expand_fixture_path('trace/sample_trace') }
end
it_behaves_like 'read successfully with IO'
end
context 'when current_path (with project_ci_id) exists' do
before do
expect(trace).to receive(:deprecated_path) { expand_fixture_path('trace/sample_trace') }
end
it_behaves_like 'read successfully with IO'
end
context 'when db trace exists' do
before do
build.send(:write_attribute, :trace, "data")
end
it_behaves_like 'read successfully with StringIO'
end
context 'when no sources exist' do
it_behaves_like 'failed to read'
end
end
describe 'trace handling' do
subject { trace.exist? }
context 'trace does not exist' do
it { expect(trace.exist?).to be(false) }
end
context 'when trace artifact exists' do
before do
create(:ci_job_artifact, :trace, job: build)
end
it { is_expected.to be_truthy }
context 'when the trace artifact has been erased' do
before do
trace.erase!
end
it { is_expected.to be_falsy }
it 'removes associations' do
expect(Ci::JobArtifact.exists?(job_id: build.id, file_type: :trace)).to be_falsy
end
end
end
context 'new trace path is used' do
before do
trace.send(:ensure_directory)
File.open(trace.send(:default_path), "w") do |file|
file.write("data")
end
end
it "trace exist" do
expect(trace.exist?).to be(true)
end
it "can be erased" do
trace.erase!
expect(trace.exist?).to be(false)
end
end
context 'deprecated path' do
let(:path) { trace.send(:deprecated_path) }
context 'with valid ci_id' do
before do
build.project.update(ci_id: 1000)
FileUtils.mkdir_p(File.dirname(path))
File.open(path, "w") do |file|
file.write("data")
end
end
it "trace exist" do
expect(trace.exist?).to be(true)
end
it "can be erased" do
trace.erase!
expect(trace.exist?).to be(false)
end
end
context 'without valid ci_id' do
it "does not return deprecated path" do
expect(path).to be_nil
end
end
end
context 'stored in database' do
before do
build.send(:write_attribute, :trace, "data")
end
it "trace exist" do
expect(trace.exist?).to be(true)
end
it "can be erased" do
trace.erase!
expect(trace.exist?).to be(false)
end
it "returns database data" do
expect(trace.raw).to eq("data")
end
end
end
describe '#archive!' do
subject { trace.archive! }
shared_examples 'archive trace file' do
it do
expect { subject }.to change { Ci::JobArtifact.count }.by(1)
build.reload
expect(build.trace.exist?).to be_truthy
expect(build.job_artifacts_trace.file.exists?).to be_truthy
expect(build.job_artifacts_trace.file.filename).to eq('job.log')
expect(File.exist?(src_path)).to be_falsy
expect(src_checksum)
.to eq(Digest::SHA256.file(build.job_artifacts_trace.file.path).hexdigest)
expect(build.job_artifacts_trace.file_sha256).to eq(src_checksum)
end
end
shared_examples 'source trace file stays intact' do |error:|
it do
expect { subject }.to raise_error(error)
build.reload
expect(build.trace.exist?).to be_truthy
expect(build.job_artifacts_trace).to be_nil
expect(File.exist?(src_path)).to be_truthy
end
end
shared_examples 'archive trace in database' do
it do
expect { subject }.to change { Ci::JobArtifact.count }.by(1)
build.reload
expect(build.trace.exist?).to be_truthy
expect(build.job_artifacts_trace.file.exists?).to be_truthy
expect(build.job_artifacts_trace.file.filename).to eq('job.log')
expect(build.old_trace).to be_nil
expect(src_checksum)
.to eq(Digest::SHA256.file(build.job_artifacts_trace.file.path).hexdigest)
expect(build.job_artifacts_trace.file_sha256).to eq(src_checksum)
end
end
shared_examples 'source trace in database stays intact' do |error:|
it do
expect { subject }.to raise_error(error)
build.reload
expect(build.trace.exist?).to be_truthy
expect(build.job_artifacts_trace).to be_nil
expect(build.old_trace).to eq(trace_content)
end
end
context 'when job does not have trace artifact' do
context 'when trace file stored in default path' do
let!(:build) { create(:ci_build, :success, :trace_live) }
let!(:src_path) { trace.read { |s| s.path } }
let!(:src_checksum) { Digest::SHA256.file(src_path).hexdigest }
it_behaves_like 'archive trace file'
context 'when failed to create clone file' do
before do
allow(IO).to receive(:copy_stream).and_return(0)
end
it_behaves_like 'source trace file stays intact', error: Gitlab::Ci::Trace::ArchiveError
end
context 'when failed to create job artifact record' do
before do
allow_any_instance_of(Ci::JobArtifact).to receive(:save).and_return(false)
allow_any_instance_of(Ci::JobArtifact).to receive_message_chain(:errors, :full_messages)
.and_return(%w[Error Error])
end
it_behaves_like 'source trace file stays intact', error: ActiveRecord::RecordInvalid
end
end
context 'when trace is stored in database' do
let(:build) { create(:ci_build, :success) }
let(:trace_content) { 'Sample trace' }
let(:src_checksum) { Digest::SHA256.hexdigest(trace_content) }
before do
build.update_column(:trace, trace_content)
end
it_behaves_like 'archive trace in database'
context 'when failed to create clone file' do
before do
allow(IO).to receive(:copy_stream).and_return(0)
end
it_behaves_like 'source trace in database stays intact', error: Gitlab::Ci::Trace::ArchiveError
end
context 'when failed to create job artifact record' do
before do
allow_any_instance_of(Ci::JobArtifact).to receive(:save).and_return(false)
allow_any_instance_of(Ci::JobArtifact).to receive_message_chain(:errors, :full_messages)
.and_return(%w[Error Error])
end
it_behaves_like 'source trace in database stays intact', error: ActiveRecord::RecordInvalid
end
context 'when there is a validation error on Ci::Build' do
before do
allow_any_instance_of(Ci::Build).to receive(:save).and_return(false)
allow_any_instance_of(Ci::Build).to receive_message_chain(:errors, :full_messages)
.and_return(%w[Error Error])
end
context "when erase old trace with 'save'" do
before do
build.send(:write_attribute, :trace, nil)
build.save
end
it 'old trace is not deleted' do
build.reload
expect(build.trace.raw).to eq(trace_content)
end
end
it_behaves_like 'archive trace in database'
end
end
end
context 'when job has trace artifact' do
before do
create(:ci_job_artifact, :trace, job: build)
end
it 'does not archive' do
expect_any_instance_of(described_class).not_to receive(:archive_stream!)
expect { subject }.to raise_error('Already archived')
expect(build.job_artifacts_trace.file.exists?).to be_truthy
end
end
context 'when job is not finished yet' do
let!(:build) { create(:ci_build, :running, :trace_live) }
it 'does not archive' do
expect_any_instance_of(described_class).not_to receive(:archive_stream!)
expect { subject }.to raise_error('Job is not finished yet')
expect(build.trace.exist?).to be_truthy
end
end
end
end
shared_examples_for 'trace with enabled live trace feature' do
it_behaves_like 'common trace features'
describe '#read' do
shared_examples 'read successfully with IO' do
it 'yields with source' do
trace.read do |stream|
expect(stream).to be_a(Gitlab::Ci::Trace::Stream)
expect(stream.stream).to be_a(IO)
end
end
end
shared_examples 'read successfully with ChunkedIO' do
it 'yields with source' do
trace.read do |stream|
expect(stream).to be_a(Gitlab::Ci::Trace::Stream)
expect(stream.stream).to be_a(Gitlab::Ci::Trace::ChunkedIO)
end
end
end
shared_examples 'failed to read' do
it 'yields without source' do
trace.read do |stream|
expect(stream).to be_a(Gitlab::Ci::Trace::Stream)
expect(stream.stream).to be_nil
end
end
end
context 'when trace artifact exists' do
before do
create(:ci_job_artifact, :trace, job: build)
end
it_behaves_like 'read successfully with IO'
end
context 'when live trace exists' do
before do
Gitlab::Ci::Trace::ChunkedIO.new(build) do |stream|
stream.write('abc')
end
end
it_behaves_like 'read successfully with ChunkedIO'
end
context 'when no sources exist' do
it_behaves_like 'failed to read'
end
end
describe 'trace handling' do
subject { trace.exist? }
context 'trace does not exist' do
it { expect(trace.exist?).to be(false) }
end
context 'when trace artifact exists' do
before do
create(:ci_job_artifact, :trace, job: build)
end
it { is_expected.to be_truthy }
context 'when the trace artifact has been erased' do
before do
trace.erase!
end
it { is_expected.to be_falsy }
it 'removes associations' do
expect(Ci::JobArtifact.exists?(job_id: build.id, file_type: :trace)).to be_falsy
end
end
end
context 'stored in live trace' do
before do
Gitlab::Ci::Trace::ChunkedIO.new(build) do |stream|
stream.write('abc')
end
end
it "trace exist" do
expect(trace.exist?).to be(true)
end
it "can be erased" do
trace.erase!
expect(trace.exist?).to be(false)
expect(Ci::BuildTraceChunk.where(build: build)).not_to be_exist
end
it "returns live trace data" do
expect(trace.raw).to eq("abc")
end
end
end
describe '#archive!' do
subject { trace.archive! }
shared_examples 'archive trace file in ChunkedIO' do
it do
expect { subject }.to change { Ci::JobArtifact.count }.by(1)
build.reload
expect(build.trace.exist?).to be_truthy
expect(build.job_artifacts_trace.file.exists?).to be_truthy
expect(build.job_artifacts_trace.file.filename).to eq('job.log')
expect(Ci::BuildTraceChunk.where(build: build)).not_to be_exist
expect(src_checksum)
.to eq(Digest::SHA256.file(build.job_artifacts_trace.file.path).hexdigest)
expect(build.job_artifacts_trace.file_sha256).to eq(src_checksum)
end
end
shared_examples 'source trace in ChunkedIO stays intact' do |error:|
it do
expect { subject }.to raise_error(error)
build.reload
expect(build.trace.exist?).to be_truthy
expect(build.job_artifacts_trace).to be_nil
Gitlab::Ci::Trace::ChunkedIO.new(build) do |stream|
expect(stream.read).to eq(trace_raw)
end
end
end
context 'when job does not have trace artifact' do
context 'when trace is stored in ChunkedIO' do
let!(:build) { create(:ci_build, :success, :trace_live) }
let!(:trace_raw) { build.trace.raw }
let!(:src_checksum) { Digest::SHA256.hexdigest(trace_raw) }
it_behaves_like 'archive trace file in ChunkedIO'
context 'when failed to create clone file' do
before do
allow(IO).to receive(:copy_stream).and_return(0)
end
it_behaves_like 'source trace in ChunkedIO stays intact', error: Gitlab::Ci::Trace::ArchiveError
end
context 'when failed to create job artifact record' do
before do
allow_any_instance_of(Ci::JobArtifact).to receive(:save).and_return(false)
allow_any_instance_of(Ci::JobArtifact).to receive_message_chain(:errors, :full_messages)
.and_return(%w[Error Error])
end
it_behaves_like 'source trace in ChunkedIO stays intact', error: ActiveRecord::RecordInvalid
end
end
end
context 'when job has trace artifact' do
before do
create(:ci_job_artifact, :trace, job: build)
end
it 'does not archive' do
expect_any_instance_of(described_class).not_to receive(:archive_stream!)
expect { subject }.to raise_error('Already archived')
expect(build.job_artifacts_trace.file.exists?).to be_truthy
end
end
context 'when job is not finished yet' do
let!(:build) { create(:ci_build, :running, :trace_live) }
it 'does not archive' do
expect_any_instance_of(described_class).not_to receive(:archive_stream!)
expect { subject }.to raise_error('Job is not finished yet')
expect(build.trace.exist?).to be_truthy
end
end
end
end
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment