Commit 8e38899d authored by Valery Sizov's avatar Valery Sizov

Merge branch 'master' of gitlab.com:gitlab-org/gitlab-ce into ce_upstream[ci skip]

parents 71c8b3e7 6306e797
image: "dev.gitlab.org:5005/gitlab/gitlab-build-images:ruby-2.3.3-golang-1.8-git-2.13-phantomjs-2.1-node-8.x-yarn-1.0-postgresql-9.6"
image: "dev.gitlab.org:5005/gitlab/gitlab-build-images:ruby-2.3.5-golang-1.8-git-2.13-phantomjs-2.1-node-8.x-yarn-1.0-postgresql-9.6"
.default-cache: &default-cache
key: "ruby-233-with-yarn"
key: "ruby-235-with-yarn"
paths:
- vendor/ruby
- .yarn-cache/
......@@ -480,7 +480,11 @@ db:migrate:reset-mysql:
variables:
SETUP_DB: "false"
script:
<<<<<<< HEAD
- git fetch origin v8.14.10-ee
=======
- git fetch origin v9.3.0
>>>>>>> 6306e797acca358c79c120e5b12c29a5ec604571
- git checkout -f FETCH_HEAD
- bundle install $BUNDLE_INSTALL_FLAGS
- cp config/gitlab.yml.example config/gitlab.yml
......@@ -576,7 +580,7 @@ karma:
<<: *dedicated-runner
<<: *except-docs
<<: *pull-cache
image: "dev.gitlab.org:5005/gitlab/gitlab-build-images:ruby-2.3.3-golang-1.8-git-2.13-chrome-61.0-node-8.x-yarn-1.0-postgresql-9.6"
image: "dev.gitlab.org:5005/gitlab/gitlab-build-images:ruby-2.3.5-golang-1.8-git-2.13-chrome-61.0-node-8.x-yarn-1.0-postgresql-9.6"
stage: test
variables:
BABEL_ENV: "coverage"
......
......@@ -93,7 +93,7 @@ gem 'kaminari', '~> 1.0'
gem 'hamlit', '~> 2.6.1'
# Files attachments
gem 'carrierwave', '~> 1.1'
gem 'carrierwave', '~> 1.2'
# Drag and Drop UI
gem 'dropzonejs-rails', '~> 0.7.1'
......@@ -414,7 +414,7 @@ group :ed25519 do
end
# Gitaly GRPC client
gem 'gitaly-proto', '~> 0.48.0', require: 'gitaly'
gem 'gitaly-proto', '~> 0.51.0', require: 'gitaly'
gem 'toml-rb', '~> 0.3.15', require: false
......
......@@ -115,7 +115,7 @@ GEM
capybara-screenshot (1.0.14)
capybara (>= 1.0, < 3)
launchy
carrierwave (1.1.0)
carrierwave (1.2.1)
activemodel (>= 4.0.0)
activesupport (>= 4.0.0)
mime-types (>= 1.16)
......@@ -297,7 +297,7 @@ GEM
po_to_json (>= 1.0.0)
rails (>= 3.2.0)
gherkin-ruby (0.3.2)
gitaly-proto (0.48.0)
gitaly-proto (0.51.0)
google-protobuf (~> 3.1)
grpc (~> 1.0)
github-linguist (4.7.6)
......@@ -1021,7 +1021,7 @@ DEPENDENCIES
bundler-audit (~> 0.5.0)
capybara (~> 2.15.0)
capybara-screenshot (~> 1.0.0)
carrierwave (~> 1.1)
carrierwave (~> 1.2)
charlock_holmes (~> 0.7.5)
chronic (~> 0.10.2)
chronic_duration (~> 0.10.6)
......@@ -1065,7 +1065,7 @@ DEPENDENCIES
gettext (~> 3.2.2)
gettext_i18n_rails (~> 1.8.0)
gettext_i18n_rails_js (~> 1.2.0)
gitaly-proto (~> 0.48.0)
gitaly-proto (~> 0.51.0)
github-linguist (~> 4.7.0)
gitlab-flowdock-git-hook (~> 1.0.1)
gitlab-license (~> 1.0)
......
/* eslint-disable func-names, no-underscore-dangle, space-before-function-paren, no-var, one-var, one-var-declaration-per-line, prefer-rest-params, max-len, vars-on-top, wrap-iife, no-unused-vars, quotes, no-shadow, no-cond-assign, prefer-arrow-callback, no-return-assign, no-else-return, camelcase, comma-dangle, no-lonely-if, guard-for-in, no-restricted-syntax, consistent-return, prefer-template, no-param-reassign, no-loop-func, no-mixed-operators */
/* global fuzzaldrinPlus */
import _ from 'underscore';
import fuzzaldrinPlus from 'fuzzaldrin-plus';
import { isObject } from './lib/utils/type_utility';
var GitLabDropdown, GitLabDropdownFilter, GitLabDropdownRemote, GitLabDropdownInput;
......
......@@ -12,7 +12,6 @@ import svg4everybody from 'svg4everybody';
// libraries with import side-effects
import 'mousetrap';
import 'mousetrap/plugins/pause/mousetrap-pause';
import 'vendor/fuzzaldrin-plus';
// expose common libraries as globals (TODO: remove these)
window.jQuery = jQuery;
......
/* eslint-disable func-names, space-before-function-paren, no-var, prefer-rest-params, wrap-iife, quotes, consistent-return, one-var, one-var-declaration-per-line, no-cond-assign, max-len, object-shorthand, no-param-reassign, comma-dangle, prefer-template, no-unused-vars, no-return-assign */
/* global fuzzaldrinPlus */
import fuzzaldrinPlus from 'fuzzaldrin-plus';
(function() {
this.ProjectFindFile = (function() {
......
......@@ -27,6 +27,7 @@
toggleModalOpen() {
this.openModal = !this.openModal;
},
<<<<<<< HEAD
<<<<<<< HEAD
createNewEntryInStore(options, openEditMode = true) {
RepoHelper.createNewEntry(options, openEditMode);
......@@ -43,6 +44,8 @@
eventHub.$off('createNewEntry', this.createNewEntryInStore);
=======
>>>>>>> e24d1890aea9c550e02d9145f50e8e1ae153a3a3
=======
>>>>>>> 6306e797acca358c79c120e5b12c29a5ec604571
},
};
</script>
......@@ -75,7 +78,7 @@
</li>
<li>
<upload
:current-path="currentPath"
:path="path"
/>
</li>
<li>
......
......@@ -5,11 +5,15 @@
export default {
props: {
<<<<<<< HEAD
path: {
=======
type: {
>>>>>>> 6306e797acca358c79c120e5b12c29a5ec604571
type: String,
required: true,
},
type: {
path: {
type: String,
required: true,
},
......@@ -27,12 +31,17 @@
'createTempEntry',
]),
createEntryInStore() {
<<<<<<< HEAD
<<<<<<< HEAD
eventHub.$emit('createNewEntry', {
name: this.entryName,
=======
this.createTempEntry({
name: this.entryName.replace(new RegExp(`^${this.path}/`), ''),
>>>>>>> 6306e797acca358c79c120e5b12c29a5ec604571
type: this.type,
toggleModal: true,
});
<<<<<<< HEAD
=======
this.createTempEntry({
name: this.entryName.replace(new RegExp(`^${this.path}/`), ''),
......@@ -41,6 +50,10 @@
this.toggleModalOpen();
>>>>>>> e24d1890aea9c550e02d9145f50e8e1ae153a3a3
=======
this.toggleModalOpen();
>>>>>>> 6306e797acca358c79c120e5b12c29a5ec604571
},
toggleModalOpen() {
this.$emit('toggle');
......
<script>
import eventHub from '../../event_hub';
import { mapActions } from 'vuex';
export default {
props: {
currentPath: {
path: {
type: String,
required: true,
},
},
methods: {
...mapActions([
'createTempEntry',
]),
createFile(target, file, isText) {
const { name } = file;
const nameWithPath = `${this.currentPath !== '' ? `${this.currentPath}/` : ''}${name}`;
let { result } = target;
if (!isText) {
result = result.split('base64,')[1];
}
eventHub.$emit('createNewEntry', {
name: nameWithPath,
this.createTempEntry({
name,
type: 'blob',
content: result,
toggleModal: false,
base64: !isText,
}, isText);
});
},
readFile(file) {
const reader = new FileReader();
......
......@@ -38,6 +38,7 @@ export default {
makeCommit(newBranch = false) {
const createNewBranch = newBranch || this.startNewMR;
<<<<<<< HEAD
<<<<<<< HEAD
makeCommit(newBranch) {
// see https://docs.gitlab.com/ce/api/commits.html#create-a-commit-with-multiple-files-and-actions
......@@ -51,6 +52,8 @@ export default {
const branch = newBranch ? `${this.currentBranch}-${this.currentShortHash}` : this.currentBranch;
=======
>>>>>>> e24d1890aea9c550e02d9145f50e8e1ae153a3a3
=======
>>>>>>> 6306e797acca358c79c120e5b12c29a5ec604571
const payload = {
branch: createNewBranch ? `${this.currentBranch}-${new Date().getTime().toString()}` : this.currentBranch,
commit_message: this.commitMessage,
......
......@@ -45,11 +45,15 @@ export default {
</p>
</div>
<div
<<<<<<< HEAD
<<<<<<< HEAD
v-else-if="activeFile.tooLarge"
=======
v-else-if="renderErrorTooLarge"
>>>>>>> e24d1890aea9c550e02d9145f50e8e1ae153a3a3
=======
v-else-if="renderErrorTooLarge"
>>>>>>> 6306e797acca358c79c120e5b12c29a5ec604571
class="vertical-center render-error">
<p class="text-center">
The source could not be displayed because it is too large. You can <a :href="activeFile.rawPath" download>download</a> it instead.
......
import Service from '../services/repo_service';
import Store from '../stores/repo_store';
import Flash from '../../flash';
const RepoHelper = {
monacoInstance: null,
getDefaultActiveFile() {
return {
id: '',
active: true,
binary: false,
extension: '',
html: '',
mime_type: '',
name: '',
plain: '',
size: 0,
url: '',
raw: false,
newContent: '',
changed: false,
loading: false,
};
},
key: '',
Time: window.performance
&& window.performance.now
? window.performance
: Date,
getFileExtension(fileName) {
return fileName.split('.').pop();
},
getLanguageIDForFile(file, langs) {
const ext = RepoHelper.getFileExtension(file.name);
const foundLang = RepoHelper.findLanguage(ext, langs);
return foundLang ? foundLang.id : 'plaintext';
},
setMonacoModelFromLanguage() {
RepoHelper.monacoInstance.setModel(null);
const languages = RepoHelper.monaco.languages.getLanguages();
const languageID = RepoHelper.getLanguageIDForFile(Store.activeFile, languages);
const newModel = RepoHelper.monaco.editor.createModel(Store.blobRaw, languageID);
RepoHelper.monacoInstance.setModel(newModel);
},
findLanguage(ext, langs) {
return langs.find(lang => lang.extensions && lang.extensions.indexOf(`.${ext}`) > -1);
},
setDirectoryOpen(tree, title) {
if (!tree) return;
Object.assign(tree, {
opened: true,
});
RepoHelper.updateHistoryEntry(tree.url, title);
Store.path = tree.path;
},
setDirectoryToClosed(entry) {
Object.assign(entry, {
opened: false,
files: [],
});
},
isRenderable() {
const okExts = ['md', 'svg'];
return okExts.indexOf(Store.activeFile.extension) > -1;
},
setBinaryDataAsBase64(file) {
Service.getBase64Content(file.raw_path)
.then((response) => {
Store.blobRaw = response;
file.base64 = response; // eslint-disable-line no-param-reassign
})
.catch(RepoHelper.loadingError);
},
getContent(treeOrFile, emptyFiles = false) {
let file = treeOrFile;
if (!Store.files.length) {
Store.loading.tree = true;
}
return Service.getContent()
.then((response) => {
const data = response.data;
if (response.headers && response.headers['page-title']) data.pageTitle = decodeURI(response.headers['page-title']);
if (data.path && !Store.isInitialRoot) {
Store.isRoot = data.path === '/';
Store.isInitialRoot = Store.isRoot;
}
if (file && file.type === 'blob') {
if (!file) file = data;
Store.binary = data.binary;
if (data.binary) {
// file might be undefined
RepoHelper.setBinaryDataAsBase64(data);
Store.setViewToPreview();
} else if (!Store.isPreviewView() && !data.render_error) {
Service.getRaw(data)
.then((rawResponse) => {
Store.blobRaw = rawResponse.data;
data.plain = rawResponse.data;
RepoHelper.setFile(data, file);
}).catch(RepoHelper.loadingError);
}
if (Store.isPreviewView()) {
RepoHelper.setFile(data, file);
}
} else {
Store.loading.tree = false;
RepoHelper.setDirectoryOpen(file, data.pageTitle || data.name);
if (emptyFiles) {
Store.files = [];
}
this.addToDirectory(file, data);
Store.prevURL = Service.blobURLtoParentTree(Service.url);
}
}).catch(RepoHelper.loadingError);
},
addToDirectory(file, data) {
const tree = file || Store;
// TODO: Figure out why `popstate` is being trigger in the specs
if (!tree.files) return;
const files = tree.files.concat(this.dataToListOfFiles(data, file ? file.level + 1 : 0));
tree.files = files;
},
setFile(data, file) {
const newFile = data;
newFile.url = file.url || Service.url; // Grab the URL from service, happens on page refresh.
if (newFile.render_error === 'too_large' || newFile.render_error === 'collapsed') {
newFile.tooLarge = true;
}
newFile.newContent = file.newContent ? file.newContent : '';
Store.addToOpenedFiles(newFile);
Store.setActiveFiles(newFile);
},
serializeRepoEntity(type, entity, level = 0) {
const {
id,
url,
name,
icon,
last_commit,
tree_url,
path,
tempFile,
active,
opened,
} = entity;
return {
id,
type,
name,
url,
tree_url,
path,
level,
tempFile,
icon: `fa-${icon}`,
files: [],
loading: false,
opened,
active,
// eslint-disable-next-line camelcase
lastCommit: last_commit ? {
url: `${Store.projectUrl}/commit/${last_commit.id}`,
message: last_commit.message,
updatedAt: last_commit.committed_date,
} : {},
};
},
scrollTabsRight() {
const tabs = document.getElementById('tabs');
if (!tabs) return;
tabs.scrollLeft = tabs.scrollWidth;
},
dataToListOfFiles(data, level) {
const { blobs, trees, submodules } = data;
return [
...trees.map(tree => RepoHelper.serializeRepoEntity('tree', tree, level)),
...submodules.map(submodule => RepoHelper.serializeRepoEntity('submodule', submodule, level)),
...blobs.map(blob => RepoHelper.serializeRepoEntity('blob', blob, level)),
];
},
genKey() {
return RepoHelper.Time.now().toFixed(3);
},
updateHistoryEntry(url, title) {
const history = window.history;
RepoHelper.key = RepoHelper.genKey();
if (document.location.pathname !== url) {
history.pushState({ key: RepoHelper.key }, '', url);
}
if (title) {
document.title = title;
}
},
findOpenedFileFromActive() {
return Store.openedFiles.find(openedFile => Store.activeFile.id === openedFile.id);
},
getFileFromPath(path) {
return Store.openedFiles.find(file => file.url === path);
},
loadingError() {
Flash('Unable to load this content at this time.');
},
openEditMode() {
Store.editMode = true;
Store.currentBlobView = 'repo-editor';
},
updateStorePath(path) {
Store.path = path;
},
findOrCreateEntry(type, tree, name) {
let exists = true;
let foundEntry = tree.files.find(dir => dir.type === type && dir.name === name);
if (!foundEntry) {
foundEntry = RepoHelper.serializeRepoEntity(type, {
id: name,
name,
path: tree.path ? `${tree.path}/${name}` : name,
icon: type === 'tree' ? 'folder' : 'file-text-o',
tempFile: true,
opened: true,
active: true,
}, tree.level !== undefined ? tree.level + 1 : 0);
exists = false;
tree.files.push(foundEntry);
}
return {
entry: foundEntry,
exists,
};
},
removeAllTmpFiles(storeFilesKey) {
Store[storeFilesKey] = Store[storeFilesKey].filter(f => !f.tempFile);
},
createNewEntry(options, openEditMode = true) {
const {
name,
type,
content = '',
base64 = false,
} = options;
const originalPath = Store.path;
let entryName = name;
if (entryName.indexOf(`${originalPath}/`) !== 0) {
this.updateStorePath('');
} else {
entryName = entryName.replace(`${originalPath}/`, '');
}
if (entryName === '') return;
const fileName = type === 'tree' ? '.gitkeep' : entryName;
let tree = Store;
if (type === 'tree') {
const dirNames = entryName.split('/');
dirNames.forEach((dirName) => {
if (dirName === '') return;
tree = this.findOrCreateEntry('tree', tree, dirName).entry;
});
}
if ((type === 'tree' && tree.tempFile) || type === 'blob') {
const file = this.findOrCreateEntry('blob', tree, fileName);
if (file.exists) {
Flash(`The name "${file.entry.name}" is already taken in this directory.`);
} else {
const { entry } = file;
entry.newContent = content;
entry.base64 = base64;
if (entry.base64) {
entry.render_error = true;
}
this.setFile(entry, entry);
if (openEditMode) {
this.openEditMode();
} else {
file.entry.render_error = 'asdsad';
}
}
}
this.updateStorePath(originalPath);
},
};
export default RepoHelper;
import axios from 'axios';
import csrf from '../../lib/utils/csrf';
import Store from '../stores/repo_store';
import Api from '../../api';
import Helper from '../helpers/repo_helper';
axios.defaults.headers.common[csrf.headerKey] = csrf.token;
const RepoService = {
url: '',
options: {
params: {
format: 'json',
},
},
createBranchPath: '/api/:version/projects/:id/repository/branches',
richExtensionRegExp: /md/,
getRaw(file) {
if (file.tempFile) {
return Promise.resolve({
data: file.newContent ? file.newContent : '',
});
}
return axios.get(file.raw_path, {
// Stop Axios from parsing a JSON file into a JS object
transformResponse: [res => res],
});
},
buildParams(url = this.url) {
// shallow clone object without reference
const params = Object.assign({}, this.options.params);
if (this.urlIsRichBlob(url)) params.viewer = 'rich';
return params;
},
urlIsRichBlob(url = this.url) {
const extension = Helper.getFileExtension(url);
return this.richExtensionRegExp.test(extension);
},
getContent(url = this.url) {
const params = this.buildParams(url);
return axios.get(url, {
params,
});
},
getBase64Content(url = this.url) {
const request = axios.get(url, {
responseType: 'arraybuffer',
});
return request.then(response => this.bufferToBase64(response.data));
},
bufferToBase64(data) {
return new Buffer(data, 'binary').toString('base64');
},
blobURLtoParentTree(url) {
const urlArray = url.split('/');
urlArray.pop();
const blobIndex = urlArray.lastIndexOf('blob');
if (blobIndex > -1) urlArray[blobIndex] = 'tree';
return urlArray.join('/');
},
getBranch() {
return Api.branchSingle(Store.projectId, Store.currentBranch);
},
commitFiles(payload) {
return Api.commitMultiple(Store.projectId, payload)
.then(this.commitFlash);
},
createBranch(payload) {
const url = Api.buildUrl(this.createBranchPath)
.replace(':id', Store.projectId);
return axios.post(url, payload);
},
commitFlash(data) {
if (data.short_id && data.stats) {
window.Flash(`Your changes have been committed. Commit ${data.short_id} with ${data.stats.additions} additions, ${data.stats.deletions} deletions.`, 'notice');
} else {
window.Flash(data.message);
}
},
};
export default RepoService;
......@@ -19,11 +19,7 @@ module NavHelper
end
elsif current_path?('jobs#show')
%w[page-gutter build-sidebar right-sidebar-expanded]
elsif current_path?('wikis#show') ||
current_path?('wikis#edit') ||
current_path?('wikis#update') ||
current_path?('wikis#history') ||
current_path?('wikis#git_access')
elsif current_controller?('wikis') && current_action?('show', 'create', 'edit', 'update', 'history', 'git_access')
%w[page-gutter wiki-sidebar right-sidebar-expanded]
else
[]
......
......@@ -7,8 +7,15 @@ class Identity < ActiveRecord::Base
validates :extern_uid, allow_blank: true, uniqueness: { scope: :provider }
validates :user_id, uniqueness: { scope: :provider }
<<<<<<< HEAD
scope :with_provider, ->(provider) { where(provider: provider) }
scope :with_extern_uid, ->(provider, extern_uid) { where(extern_uid: extern_uid, provider: provider) }
=======
scope :with_extern_uid, ->(provider, extern_uid) do
extern_uid = Gitlab::LDAP::Person.normalize_dn(extern_uid) if provider.starts_with?('ldap')
where(extern_uid: extern_uid, provider: provider)
end
>>>>>>> 6306e797acca358c79c120e5b12c29a5ec604571
def ldap?
provider.starts_with?('ldap')
......
......@@ -1092,6 +1092,7 @@ class Project < ActiveRecord::Base
def hook_attrs(backward: true)
attrs = {
id: id,
name: name,
description: description,
web_url: web_url,
......
......@@ -4,5 +4,8 @@ class MergeRequestBasicEntity < IssuableSidebarEntity
expose :merge_error
expose :state
expose :source_branch_exists?, as: :source_branch_exists
<<<<<<< HEAD
expose :rebase_in_progress?, as: :rebase_in_progress
=======
>>>>>>> 6306e797acca358c79c120e5b12c29a5ec604571
end
module Milestones
class PromoteService < Milestones::BaseService
<<<<<<< HEAD
prepend EE::Milestones::PromoteService
=======
>>>>>>> 6306e797acca358c79c120e5b12c29a5ec604571
PromoteMilestoneError = Class.new(StandardError)
def execute(milestone)
......
---
title: Adds project_id to pipeline hook data
merge_request: 15044
author: Jacopo Beschi @jacopo-beschi
type: added
---
title: Fix overlap of right-sidebar and main content when creating a Wiki page
merge_request:
author:
type: fixed
---
title: Bump carrierwave to 1.2.1
merge_request: 15072
author: Takuya Noguchi
type: other
---
title: Normalize LDAP DN when looking up identity
merge_request:
author:
type: fixed
---
title: Fix missing Import/Export issue assignees
merge_request:
author:
type: fixed
---
title: Returns a ssh url for go-get=1
merge_request: 14990
author: gvieira37
type: fixed
---
title: Upgrade Ruby to 2.3.5 to include security patches
merge_request: 15099
author:
type: security
......@@ -19,24 +19,30 @@ For example, for GitLab version 10.5.7:
* `5` represents minor version
* `7` represents patch number
## Security releases
## Patch releases
The current stable release will receive security patches and bug fixes
(eg. `8.9.0` -> `8.9.1`).
Patch releases usually only include bug fixes and are only done for the current
stable release. That said, in some cases, we may backport it to previous stable
release, depending on the severity of the bug.
Feature releases will mark the next supported stable
release where the minor version is increased numerically by increments of one
(eg. `8.9 -> 8.10`).
For instance, if we release `10.1.1` with a fix for a severe bug introduced in
`10.0.0`, we could backport the fix to a new `10.0.x` patch release.
Our current policy is to support one stable release at any given time.
For medium-level security issues, we may consider backporting to the previous two
### Security releases
Security releases are a special kind of patch release that only include security
fixes and patches (see below).
Our current policy is to support one stable release at any given time, but for
medium-level security issues, we may backport security fixes to the previous two
monthly releases.
For very serious security issues, there is [precedent](https://about.gitlab.com/2016/05/02/cve-2016-4340-patches/)
to backport security fixes to even more monthly releases of GitLab. This decision
is made on a case-by-case basis.
For very serious security issues, there is
[precedent](https://about.gitlab.com/2016/05/02/cve-2016-4340-patches/)
to backport security fixes to even more monthly releases of GitLab.
This decision is made on a case-by-case basis.
## Version support
## Upgrade recommendations
We encourage everyone to run the latest stable release to ensure that you can
easily upgrade to the most secure and feature-rich GitLab experience. In order
......@@ -70,7 +76,6 @@ Please see the table below for some examples:
| -------------- | ------------ | ------------------------ | ---------------- |
| 9.4.5 | 8.13.4 | `8.13.4` -> `8.17.7` -> `9.4.5` | `8.17.7` is the last version in version `8` |
| 10.1.4 | 8.13.4 | `8.13.4` -> `8.17.7` -> `9.5.8` -> `10.1.4` | `8.17.7` is the last version in version `8`, `9.5.8` is the last version in version `9` |
|
More information about the release procedures can be found in our
[release-tools documentation][rel]. You may also want to read our
......
......@@ -101,6 +101,7 @@ X-Gitlab-Event: Push Hook
"user_avatar": "https://s.gravatar.com/avatar/d4c74594d841139328695756648b6bd6?s=8://s.gravatar.com/avatar/d4c74594d841139328695756648b6bd6?s=80",
"project_id": 15,
"project":{
"id": 15,
"name":"Diaspora",
"description":"",
"web_url":"http://example.com/mike/diaspora",
......@@ -181,6 +182,7 @@ X-Gitlab-Event: Tag Push Hook
"user_avatar": "https://s.gravatar.com/avatar/d4c74594d841139328695756648b6bd6?s=8://s.gravatar.com/avatar/d4c74594d841139328695756648b6bd6?s=80",
"project_id": 1,
"project":{
"id": 1,
"name":"Example",
"description":"",
"web_url":"http://example.com/jsmith/example",
......@@ -231,6 +233,7 @@ X-Gitlab-Event: Issue Hook
"avatar_url": "http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=40\u0026d=identicon"
},
"project": {
"id": 1,
"name":"Gitlab Test",
"description":"Aut reprehenderit ut est.",
"web_url":"http://example.com/gitlabhq/gitlab-test",
......@@ -360,6 +363,7 @@ X-Gitlab-Event: Note Hook
},
"project_id": 5,
"project":{
"id": 5,
"name":"Gitlab Test",
"description":"Aut reprehenderit ut est.",
"web_url":"http://example.com/gitlabhq/gitlab-test",
......@@ -439,6 +443,7 @@ X-Gitlab-Event: Note Hook
},
"project_id": 5,
"project":{
"id": 5,
"name":"Gitlab Test",
"description":"Aut reprehenderit ut est.",
"web_url":"http://example.com/gitlab-org/gitlab-test",
......@@ -565,6 +570,7 @@ X-Gitlab-Event: Note Hook
},
"project_id": 5,
"project":{
"id": 5,
"name":"Gitlab Test",
"description":"Aut reprehenderit ut est.",
"web_url":"http://example.com/gitlab-org/gitlab-test",
......@@ -643,6 +649,7 @@ X-Gitlab-Event: Note Hook
},
"project_id": 5,
"project":{
"id": 5,
"name":"Gitlab Test",
"description":"Aut reprehenderit ut est.",
"web_url":"http://example.com/gitlab-org/gitlab-test",
......@@ -717,6 +724,7 @@ X-Gitlab-Event: Merge Request Hook
"avatar_url": "http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=40\u0026d=identicon"
},
"project": {
"id": 1,
"name":"Gitlab Test",
"description":"Aut reprehenderit ut est.",
"web_url":"http://example.com/gitlabhq/gitlab-test",
......@@ -873,6 +881,7 @@ X-Gitlab-Event: Wiki Page Hook
"avatar_url": "http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80\u0026d=identicon"
},
"project": {
"id": 1,
"name": "awesome-project",
"description": "This is awesome",
"web_url": "http://example.com/root/awesome-project",
......@@ -944,6 +953,7 @@ X-Gitlab-Event: Pipeline Hook
"avatar_url": "http://www.gravatar.com/avatar/e32bd13e2add097461cb96824b7a829c?s=80\u0026d=identicon"
},
"project":{
"id": 1,
"name": "Gitlab Test",
"description": "Atque in sunt eos similique dolores voluptatem.",
"web_url": "http://192.168.64.1:3005/gitlab-org/gitlab-test",
......
......@@ -35,10 +35,14 @@ module Gitlab
end
def delete_page(page_path, commit_details)
assert_type!(commit_details, CommitDetails)
gollum_wiki.delete_page(gollum_page_by_path(page_path), commit_details.to_h)
nil
@repository.gitaly_migrate(:wiki_delete_page) do |is_enabled|
if is_enabled
gitaly_delete_page(page_path, commit_details)
gollum_wiki.clear_cache
else
gollum_delete_page(page_path, commit_details)
end
end
end
def update_page(page_path, title, format, content, commit_details)
......@@ -54,14 +58,13 @@ module Gitlab
end
def page(title:, version: nil, dir: nil)
if version
version = Gitlab::Git::Commit.find(@repository, version).id
@repository.gitaly_migrate(:wiki_find_page) do |is_enabled|
if is_enabled
gitaly_find_page(title: title, version: version, dir: dir)
else
gollum_find_page(title: title, version: version, dir: dir)
end
end
gollum_page = gollum_wiki.page(title, version, dir)
return unless gollum_page
new_page(gollum_page)
end
def file(name, version)
......@@ -135,9 +138,38 @@ module Gitlab
raise Gitlab::Git::Wiki::DuplicatePageError, e.message
end
def gollum_delete_page(page_path, commit_details)
assert_type!(commit_details, CommitDetails)
gollum_wiki.delete_page(gollum_page_by_path(page_path), commit_details.to_h)
nil
end
def gollum_find_page(title:, version: nil, dir: nil)
if version
version = Gitlab::Git::Commit.find(@repository, version).id
end
gollum_page = gollum_wiki.page(title, version, dir)
return unless gollum_page
new_page(gollum_page)
end
def gitaly_write_page(name, format, content, commit_details)
gitaly_wiki_client.write_page(name, format, content, commit_details)
end
def gitaly_delete_page(page_path, commit_details)
gitaly_wiki_client.delete_page(page_path, commit_details)
end
def gitaly_find_page(title:, version: nil, dir: nil)
wiki_page, version = gitaly_wiki_client.find_page(title: title, version: version, dir: dir)
return unless wiki_page
Gitlab::Git::WikiPage.new(wiki_page, version)
end
end
end
end
module Gitlab
module GitalyClient
class WikiPage
FIELDS = %i(title format url_path path name historical raw_data).freeze
attr_accessor(*FIELDS)
def initialize(params)
params = params.with_indifferent_access
FIELDS.each do |field|
instance_variable_set("@#{field}", params[field])
end
end
def historical?
@historical
end
def format
@format.to_sym
end
end
end
end
......@@ -15,11 +15,7 @@ module Gitlab
repository: @gitaly_repo,
name: GitalyClient.encode(name),
format: format.to_s,
commit_details: Gitaly::WikiCommitDetails.new(
name: GitalyClient.encode(commit_details.name),
email: GitalyClient.encode(commit_details.email),
message: GitalyClient.encode(commit_details.message)
)
commit_details: gitaly_commit_details(commit_details)
)
strio = StringIO.new(content)
......@@ -40,6 +36,59 @@ module Gitlab
raise Gitlab::Git::Wiki::DuplicatePageError, error
end
end
def delete_page(page_path, commit_details)
request = Gitaly::WikiDeletePageRequest.new(
repository: @gitaly_repo,
page_path: GitalyClient.encode(page_path),
commit_details: gitaly_commit_details(commit_details)
)
GitalyClient.call(@repository.storage, :wiki_service, :wiki_delete_page, request)
end
def find_page(title:, version: nil, dir: nil)
request = Gitaly::WikiFindPageRequest.new(
repository: @gitaly_repo,
title: GitalyClient.encode(title),
revision: GitalyClient.encode(version),
directory: GitalyClient.encode(dir)
)
response = GitalyClient.call(@repository.storage, :wiki_service, :wiki_find_page, request)
wiki_page = version = nil
response.each do |message|
page = message.page
next unless page
if wiki_page
wiki_page.raw_data << page.raw_data
else
wiki_page = GitalyClient::WikiPage.new(page.to_h)
# All gRPC strings in a response are frozen, so we get
# an unfrozen version here so appending in the else clause below doesn't blow up.
wiki_page.raw_data = wiki_page.raw_data.dup
version = Gitlab::Git::WikiPageVersion.new(
Gitlab::Git::Commit.decorate(@repository, page.version.commit),
page.version.format
)
end
end
[wiki_page, version]
end
private
def gitaly_commit_details(commit_details)
Gitaly::WikiCommitDetails.new(
name: GitalyClient.encode(commit_details.name),
email: GitalyClient.encode(commit_details.email),
message: GitalyClient.encode(commit_details.message)
)
end
end
end
end
......@@ -19,6 +19,7 @@ project_tree:
- milestone:
- events:
- :push_event_payload
- :issue_assignees
- snippets:
- :award_emoji
- notes:
......
......@@ -4,7 +4,7 @@ module Gitlab
module LDAP
class AuthHash < Gitlab::OAuth::AuthHash
def uid
Gitlab::LDAP::Person.normalize_dn(super)
@uid ||= Gitlab::LDAP::Person.normalize_dn(super)
end
private
......
......@@ -11,10 +11,11 @@ module Gitlab
class << self
def find_by_uid_and_provider(uid, provider)
# LDAP distinguished name is case-insensitive
uid = Gitlab::LDAP::Person.normalize_dn(uid)
identity = ::Identity
.where(provider: provider)
.iwhere(extern_uid: uid).last
.where(extern_uid: uid).last
identity && identity.user
end
end
......
......@@ -4,6 +4,7 @@ module Gitlab
module Middleware
class Go
include ActionView::Helpers::TagHelper
include Gitlab::CurrentSettings
PROJECT_PATH_REGEX = %r{\A(#{Gitlab::PathRegex.full_namespace_route_regex}/#{Gitlab::PathRegex.project_route_regex})/}.freeze
......@@ -37,10 +38,20 @@ module Gitlab
end
def go_body(path)
project_url = URI.join(Gitlab.config.gitlab.url, path)
config = Gitlab.config
project_url = URI.join(config.gitlab.url, path)
import_prefix = strip_url(project_url.to_s)
meta_tag = tag :meta, name: 'go-import', content: "#{import_prefix} git #{project_url}.git"
repository_url = case current_application_settings.enabled_git_access_protocol
when 'ssh'
shell = config.gitlab_shell
port = ":#{shell.ssh_port}" unless shell.ssh_port == 22
"ssh://#{shell.ssh_user}@#{shell.ssh_host}#{port}/#{path}.git"
when 'http', nil
"#{project_url}.git"
end
meta_tag = tag :meta, name: 'go-import', content: "#{import_prefix} git #{repository_url}"
head_tag = content_tag :head, meta_tag
content_tag :html, head_tag
end
......
......@@ -5,7 +5,7 @@ module SystemCheck
set_check_pass -> { "yes (#{self.current_version})" }
def self.required_version
@required_version ||= Gitlab::VersionInfo.new(2, 3, 3)
@required_version ||= Gitlab::VersionInfo.new(2, 3, 5)
end
def self.current_version
......
......@@ -8,6 +8,7 @@ module QA
autoload :Release, 'qa/runtime/release'
autoload :User, 'qa/runtime/user'
autoload :Namespace, 'qa/runtime/namespace'
autoload :Scenario, 'qa/runtime/scenario'
end
##
......@@ -80,6 +81,11 @@ module QA
module Admin
autoload :Menu, 'qa/page/admin/menu'
end
module Mattermost
autoload :Main, 'qa/page/mattermost/main'
autoload :Login, 'qa/page/mattermost/login'
end
end
##
......
module QA
module Page
module Mattermost
class Login < Page::Base
def initialize
visit(Runtime::Scenario.mattermost + '/login')
end
def sign_in_using_oauth
click_link class: 'btn btn-custom-login gitlab'
if page.has_content?('Authorize GitLab Mattermost to use your account?')
click_button 'Authorize'
end
end
end
end
end
end
module QA
module Page
module Mattermost
class Main < Page::Base
def initialize
visit(Runtime::Scenario.mattermost)
end
end
end
end
end
module QA
module Runtime
module Scenario
extend self
attr_accessor :mattermost
end
end
end
......@@ -8,6 +8,11 @@ module QA
#
class Mattermost < Scenario::Entrypoint
tags :core, :mattermost
def perform(address, mattermost, *files)
Runtime::Scenario.mattermost = mattermost
super(address, files)
end
end
end
end
......
module QA
feature 'logging in to Mattermost', :mattermost do
scenario 'can use gitlab oauth' do
Page::Main::Entry.act { sign_in_using_credentials }
Page::Mattermost::Login.act { sign_in_using_oauth }
Page::Mattermost::Main.perform do |page|
expect(page).to have_content(/(Welcome to: Mattermost|Logout GitLab Mattermost)/)
end
end
end
end
describe QA::Scenario::Entrypoint do
subject do
Class.new(QA::Scenario::Entrypoint) do
tags :rspec
end
end
context '#perform' do
let(:config) { spy('Specs::Config') }
let(:release) { spy('Runtime::Release') }
let(:runner) { spy('Specs::Runner') }
before do
allow(config).to receive(:perform) { |&block| block.call config }
allow(runner).to receive(:perform) { |&block| block.call runner }
stub_const('QA::Specs::Config', config)
stub_const('QA::Runtime::Release', release)
stub_const('QA::Specs::Runner', runner)
end
it 'should set address' do
subject.perform("hello")
expect(config).to have_received(:address=).with("hello")
end
context 'no paths' do
it 'should call runner with default arguments' do
subject.perform("test")
expect(runner).to have_received(:rspec)
.with(hash_including(files: 'qa/specs/features'))
end
end
context 'specifying paths' do
it 'should call runner with paths' do
subject.perform('test', 'path1', 'path2')
expect(runner).to have_received(:rspec)
.with(hash_including(files: %w(path1 path2)))
end
end
end
end
......@@ -10,7 +10,10 @@
"title": { "type": "string" },
"moved_to_id": { "type": ["integer", "null"] },
"project_id": { "type": "integer" },
<<<<<<< HEAD
"weight": { "type": ["integer", "null"] },
=======
>>>>>>> 6306e797acca358c79c120e5b12c29a5ec604571
"web_url": { "type": "string" },
"state": { "type": "string" },
"create_note_path": { "type": "string" },
......
......@@ -9,7 +9,10 @@
"human_time_estimate": { "type": ["string", "null"] },
"human_total_time_spent": { "type": ["string", "null"] },
"merge_error": { "type": ["string", "null"] },
<<<<<<< HEAD
"rebase_in_progress": { "type": "boolean" },
=======
>>>>>>> 6306e797acca358c79c120e5b12c29a5ec604571
"assignee_id": { "type": ["integer", "null"] },
"subscribed": { "type": ["boolean", "null"] },
"participants": { "type": "array" }
......
......@@ -68,6 +68,7 @@ describe('new dropdown component', () => {
.catch(done.fail);
});
});
<<<<<<< HEAD
<<<<<<< HEAD
describe('createEntryInStore', () => {
......@@ -226,4 +227,6 @@ describe('new dropdown component', () => {
});
=======
>>>>>>> e24d1890aea9c550e02d9145f50e8e1ae153a3a3
=======
>>>>>>> 6306e797acca358c79c120e5b12c29a5ec604571
});
......@@ -195,6 +195,7 @@ describe('new file modal component', () => {
vm.$el.remove();
});
<<<<<<< HEAD
<<<<<<< HEAD
describe('createEntryInStore', () => {
......@@ -218,4 +219,6 @@ describe('new file modal component', () => {
});
=======
>>>>>>> e24d1890aea9c550e02d9145f50e8e1ae153a3a3
=======
>>>>>>> 6306e797acca358c79c120e5b12c29a5ec604571
});
import Vue from 'vue';
import upload from '~/repo/components/new_dropdown/upload.vue';
import eventHub from '~/repo/event_hub';
import createComponent from '../../../helpers/vue_mount_component_helper';
import store from '~/repo/stores';
import { createComponentWithStore } from '../../../helpers/vue_mount_component_helper';
import { resetStore } from '../../helpers';
describe('new dropdown upload', () => {
let vm;
......@@ -9,13 +10,17 @@ describe('new dropdown upload', () => {
beforeEach(() => {
const Component = Vue.extend(upload);
vm = createComponent(Component, {
currentPath: '',
vm = createComponentWithStore(Component, store, {
path: '',
});
vm.$mount();
});
afterEach(() => {
vm.$destroy();
resetStore(vm.$store);
});
describe('readFile', () => {
......@@ -56,45 +61,43 @@ describe('new dropdown upload', () => {
name: 'file',
};
beforeEach(() => {
spyOn(eventHub, '$emit');
});
it('emits createNewEntry event', () => {
it('creates new file', (done) => {
vm.createFile(target, file, true);
expect(eventHub.$emit).toHaveBeenCalledWith('createNewEntry', {
name: 'file',
type: 'blob',
content: 'content',
toggleModal: false,
base64: false,
}, true);
vm.$nextTick(() => {
expect(vm.$store.state.tree.length).toBe(1);
expect(vm.$store.state.tree[0].name).toBe(file.name);
expect(vm.$store.state.tree[0].content).toBe(target.result);
done();
});
});
it('createNewEntry event name contains current path', () => {
vm.currentPath = 'testing';
it('creates new file in path', (done) => {
vm.$store.state.path = 'testing';
vm.createFile(target, file, true);
expect(eventHub.$emit).toHaveBeenCalledWith('createNewEntry', {
name: 'testing/file',
type: 'blob',
content: 'content',
toggleModal: false,
base64: false,
}, true);
vm.$nextTick(() => {
expect(vm.$store.state.tree.length).toBe(1);
expect(vm.$store.state.tree[0].name).toBe(file.name);
expect(vm.$store.state.tree[0].content).toBe(target.result);
expect(vm.$store.state.tree[0].path).toBe(`testing/${file.name}`);
done();
});
});
it('splits content on base64 if binary', () => {
it('splits content on base64 if binary', (done) => {
vm.createFile(binaryTarget, file, false);
expect(eventHub.$emit).toHaveBeenCalledWith('createNewEntry', {
name: 'file',
type: 'blob',
content: 'base64content',
toggleModal: false,
base64: true,
}, false);
vm.$nextTick(() => {
expect(vm.$store.state.tree.length).toBe(1);
expect(vm.$store.state.tree[0].name).toBe(file.name);
expect(vm.$store.state.tree[0].content).toBe(binaryTarget.result.split('base64,')[1]);
expect(vm.$store.state.tree[0].base64).toBe(true);
done();
});
});
});
});
......@@ -3,7 +3,6 @@
import '~/gl_dropdown';
import '~/search_autocomplete';
import '~/lib/utils/common_utils';
import 'vendor/fuzzaldrin-plus';
(function() {
var assertLinks, dashboardIssuesPath, dashboardMRsPath, groupIssuesPath, groupMRsPath, groupName, mockDashboardOptions, mockGroupOptions, mockProjectOptions, projectIssuesPath, projectMRsPath, projectName, userId, widget;
......
......@@ -257,6 +257,7 @@ describe Gitlab::Database do
end
end
<<<<<<< HEAD
describe '#disable_prepared_statements' do
it 'disables prepared statements' do
config = {}
......@@ -305,6 +306,8 @@ describe Gitlab::Database do
end
end
=======
>>>>>>> 6306e797acca358c79c120e5b12c29a5ec604571
describe '#sanitize_timestamp' do
let(:max_timestamp) { Time.at((1 << 31) - 1) }
......
......@@ -314,3 +314,6 @@ timelogs:
- user
push_event_payload:
- event
issue_assignees:
- issue
- assignee
\ No newline at end of file
......@@ -43,7 +43,7 @@
"issues": [
{
"id": 40,
"title": "Voluptatem amet doloribus deleniti eos maxime repudiandae molestias.",
"title": "Voluptatem",
"assignee_id": 1,
"author_id": 22,
"project_id": 5,
......@@ -60,6 +60,12 @@
"due_date": null,
"moved_to_id": null,
"test_ee_field": "test",
"issue_assignees": [
{
"user_id": 1,
"issue_id": 1
}
],
"milestone": {
"id": 1,
"title": "test milestone",
......
......@@ -63,6 +63,10 @@ describe Gitlab::ImportExport::ProjectTreeRestorer do
expect(issue.reload.updated_at.to_s).to eq('2016-06-14 15:02:47 UTC')
end
it 'has issue assignees' do
expect(Issue.where(title: 'Voluptatem').first.issue_assignees).not_to be_empty
end
it 'contains the merge access levels on a protected branch' do
expect(ProtectedBranch.first.merge_access_levels).not_to be_empty
end
......
......@@ -81,6 +81,10 @@ describe Gitlab::ImportExport::ProjectTreeSaver do
expect(saved_project_json['issues'].first['notes']).not_to be_empty
end
it 'has issue assignees' do
expect(saved_project_json['issues'].first['issue_assignees']).not_to be_empty
end
it 'has author on issue comments' do
expect(saved_project_json['issues'].first['notes'].first['author']).not_to be_empty
end
......
......@@ -526,3 +526,6 @@ ProjectAutoDevops:
- project_id
- created_at
- updated_at
IssueAssignee:
- user_id
- issue_id
\ No newline at end of file
require 'spec_helper'
describe Gitlab::LDAP::Authentication do
let(:user) { create(:omniauth_user, extern_uid: dn) }
let(:dn) { 'uid=john,ou=people,dc=example,dc=com' }
let(:dn) { 'uid=John Smith, ou=People, dc=example, dc=com' }
let(:user) { create(:omniauth_user, extern_uid: Gitlab::LDAP::Person.normalize_dn(dn)) }
let(:login) { 'john' }
let(:password) { 'password' }
......
......@@ -47,23 +47,25 @@ describe Gitlab::LDAP::User do
end
describe '.find_by_uid_and_provider' do
let(:dn) { 'CN=John Åström, CN=Users, DC=Example, DC=com' }
it 'retrieves the correct user' do
special_info = {
name: 'John Åström',
email: 'john@example.com',
nickname: 'jastrom'
}
special_hash = OmniAuth::AuthHash.new(uid: 'CN=John Åström,CN=Users,DC=Example,DC=com', provider: 'ldapmain', info: special_info)
special_hash = OmniAuth::AuthHash.new(uid: dn, provider: 'ldapmain', info: special_info)
special_chars_user = described_class.new(special_hash)
user = special_chars_user.save
expect(described_class.find_by_uid_and_provider(special_hash.uid, special_hash.provider)).to eq user
expect(described_class.find_by_uid_and_provider(dn, 'ldapmain')).to eq user
end
end
describe 'find or create' do
it "finds the user if already existing" do
create(:omniauth_user, extern_uid: 'uid=John Smith,ou=People,dc=example,dc=com', provider: 'ldapmain')
create(:omniauth_user, extern_uid: 'uid=john smith,ou=people,dc=example,dc=com', provider: 'ldapmain')
expect { ldap_user.save }.not_to change { User.count }
end
......
......@@ -17,89 +17,115 @@ describe Gitlab::Middleware::Go do
describe 'when go-get=1' do
let(:current_user) { nil }
context 'with simple 2-segment project path' do
let!(:project) { create(:project, :private) }
shared_examples 'go-get=1' do |enabled_protocol:|
context 'with simple 2-segment project path' do
let!(:project) { create(:project, :private) }
context 'with subpackages' do
let(:path) { "#{project.full_path}/subpackage" }
context 'with subpackages' do
let(:path) { "#{project.full_path}/subpackage" }
it 'returns the full project path' do
expect_response_with_path(go, project.full_path)
end
end
context 'without subpackages' do
let(:path) { project.full_path }
it 'returns the full project path' do
expect_response_with_path(go, project.full_path)
it 'returns the full project path' do
expect_response_with_path(go, enabled_protocol, project.full_path)
end
end
end
end
context 'with a nested project path' do
let(:group) { create(:group, :nested) }
let!(:project) { create(:project, :public, namespace: group) }
context 'without subpackages' do
let(:path) { project.full_path }
shared_examples 'a nested project' do
context 'when the project is public' do
it 'returns the full project path' do
expect_response_with_path(go, project.full_path)
expect_response_with_path(go, enabled_protocol, project.full_path)
end
end
end
context 'when the project is private' do
before do
project.update_attribute(:visibility_level, Project::PRIVATE)
end
context 'with a nested project path' do
let(:group) { create(:group, :nested) }
let!(:project) { create(:project, :public, namespace: group) }
context 'with access to the project' do
let(:current_user) { project.creator }
shared_examples 'a nested project' do
context 'when the project is public' do
it 'returns the full project path' do
expect_response_with_path(go, enabled_protocol, project.full_path)
end
end
context 'when the project is private' do
before do
project.team.add_master(current_user)
project.update_attribute(:visibility_level, Project::PRIVATE)
end
it 'returns the full project path' do
expect_response_with_path(go, project.full_path)
context 'with access to the project' do
let(:current_user) { project.creator }
before do
project.team.add_master(current_user)
end
it 'returns the full project path' do
expect_response_with_path(go, enabled_protocol, project.full_path)
end
end
end
context 'without access to the project' do
it 'returns the 2-segment group path' do
expect_response_with_path(go, group.full_path)
context 'without access to the project' do
it 'returns the 2-segment group path' do
expect_response_with_path(go, enabled_protocol, group.full_path)
end
end
end
end
end
context 'with subpackages' do
let(:path) { "#{project.full_path}/subpackage" }
context 'with subpackages' do
let(:path) { "#{project.full_path}/subpackage" }
it_behaves_like 'a nested project'
end
it_behaves_like 'a nested project'
end
context 'with a subpackage that is not a valid project path' do
let(:path) { "#{project.full_path}/---subpackage" }
context 'with a subpackage that is not a valid project path' do
let(:path) { "#{project.full_path}/---subpackage" }
it_behaves_like 'a nested project'
end
context 'without subpackages' do
let(:path) { project.full_path }
it_behaves_like 'a nested project'
it_behaves_like 'a nested project'
end
end
context 'without subpackages' do
let(:path) { project.full_path }
context 'with a bogus path' do
let(:path) { "http:;url=http:&sol;&sol;www.example.com'http-equiv='refresh'x='?go-get=1" }
it 'skips go-import generation' do
expect(app).to receive(:call).and_return('no-go')
it_behaves_like 'a nested project'
go
end
end
end
context 'with SSH disabled' do
before do
stub_application_setting(enabled_git_access_protocol: 'http')
end
include_examples 'go-get=1', enabled_protocol: :http
end
context 'with a bogus path' do
let(:path) { "http:;url=http:&sol;&sol;www.example.com'http-equiv='refresh'x='?go-get=1" }
context 'with HTTP disabled' do
before do
stub_application_setting(enabled_git_access_protocol: 'ssh')
end
it 'skips go-import generation' do
expect(app).to receive(:call).and_return('no-go')
include_examples 'go-get=1', enabled_protocol: :ssh
end
go
context 'with nothing disabled' do
before do
stub_application_setting(enabled_git_access_protocol: nil)
end
include_examples 'go-get=1', enabled_protocol: nil
end
end
......@@ -113,10 +139,16 @@ describe Gitlab::Middleware::Go do
middleware.call(env)
end
def expect_response_with_path(response, path)
def expect_response_with_path(response, protocol, path)
repository_url = case protocol
when :ssh
"ssh://git@#{Gitlab.config.gitlab.host}/#{path}.git"
when :http, nil
"http://#{Gitlab.config.gitlab.host}/#{path}.git"
end
expect(response[0]).to eq(200)
expect(response[1]['Content-Type']).to eq('text/html')
expected_body = %{<html><head><meta name="go-import" content="#{Gitlab.config.gitlab.host}/#{path} git http://#{Gitlab.config.gitlab.host}/#{path}.git" /></head></html>}
expected_body = %{<html><head><meta name="go-import" content="#{Gitlab.config.gitlab.host}/#{path} git #{repository_url}" /></head></html>}
expect(response[2].body).to eq([expected_body])
end
end
......
......@@ -4,7 +4,7 @@ describe Gitlab::OAuth::User do
let(:oauth_user) { described_class.new(auth_hash) }
let(:gl_user) { oauth_user.gl_user }
let(:uid) { 'my-uid' }
let(:dn) { 'uid=user1,ou=People,dc=example' }
let(:dn) { 'uid=user1,ou=people,dc=example' }
let(:provider) { 'my-provider' }
let(:auth_hash) { OmniAuth::AuthHash.new(uid: uid, provider: provider, info: info_hash) }
let(:info_hash) do
......
......@@ -7,7 +7,7 @@ describe Gitlab::Saml::User do
let(:saml_user) { described_class.new(auth_hash) }
let(:gl_user) { saml_user.gl_user }
let(:uid) { 'my-uid' }
let(:dn) { 'uid=user1,ou=People,dc=example' }
let(:dn) { 'uid=user1,ou=people,dc=example' }
let(:provider) { 'saml' }
let(:raw_info_attr) { { 'groups' => %w(Developers Freelancers Designers) } }
let(:auth_hash) { OmniAuth::AuthHash.new(uid: uid, provider: provider, info: info_hash, extra: { raw_info: OneLogin::RubySaml::Attributes.new(raw_info_attr) }) }
......
require 'spec_helper'
RSpec.describe Identity do
describe Identity do
describe 'relations' do
it { is_expected.to belong_to(:user) }
end
......@@ -22,4 +22,16 @@ RSpec.describe Identity do
expect(other_identity.ldap?).to be_falsey
end
end
describe '.with_extern_uid' do
context 'LDAP identity' do
let!(:ldap_identity) { create(:identity, provider: 'ldapmain', extern_uid: 'uid=john smith,ou=people,dc=example,dc=com') }
it 'finds the identity when the DN is formatted differently' do
identity = described_class.with_extern_uid('ldapmain', 'uid=John Smith, ou=People, dc=example, dc=com').first
expect(identity).to eq(ldap_identity)
end
end
end
end
......@@ -125,31 +125,41 @@ describe ProjectWiki do
end
describe "#find_page" do
before do
create_page("index page", "This is an awesome Gollum Wiki")
end
shared_examples 'finding a wiki page' do
before do
create_page("index page", "This is an awesome Gollum Wiki")
end
after do
destroy_page(subject.pages.first.page)
end
after do
destroy_page(subject.pages.first.page)
end
it "returns the latest version of the page if it exists" do
page = subject.find_page("index page")
expect(page.title).to eq("index page")
end
it "returns the latest version of the page if it exists" do
page = subject.find_page("index page")
expect(page.title).to eq("index page")
end
it "returns nil if the page does not exist" do
expect(subject.find_page("non-existant")).to eq(nil)
it "returns nil if the page does not exist" do
expect(subject.find_page("non-existant")).to eq(nil)
end
it "can find a page by slug" do
page = subject.find_page("index-page")
expect(page.title).to eq("index page")
end
it "returns a WikiPage instance" do
page = subject.find_page("index page")
expect(page).to be_a WikiPage
end
end
it "can find a page by slug" do
page = subject.find_page("index-page")
expect(page.title).to eq("index page")
context 'when Gitaly wiki_find_page is enabled' do
it_behaves_like 'finding a wiki page'
end
it "returns a WikiPage instance" do
page = subject.find_page("index page")
expect(page).to be_a WikiPage
context 'when Gitaly wiki_find_page is disabled', :skip_gitaly_mock do
it_behaves_like 'finding a wiki page'
end
end
......@@ -273,23 +283,33 @@ describe ProjectWiki do
end
describe "#delete_page" do
before do
create_page("index", "some content")
@page = subject.wiki.page(title: "index")
end
shared_examples 'deleting a wiki page' do
before do
create_page("index", "some content")
@page = subject.wiki.page(title: "index")
end
it "deletes the page" do
subject.delete_page(@page)
expect(subject.pages.count).to eq(0)
end
it "deletes the page" do
subject.delete_page(@page)
expect(subject.pages.count).to eq(0)
end
it 'updates project activity' do
subject.delete_page(@page)
it 'updates project activity' do
subject.delete_page(@page)
project.reload
project.reload
expect(project.last_activity_at).to be_within(1.minute).of(Time.now)
expect(project.last_repository_updated_at).to be_within(1.minute).of(Time.now)
expect(project.last_activity_at).to be_within(1.minute).of(Time.now)
expect(project.last_repository_updated_at).to be_within(1.minute).of(Time.now)
end
end
context 'when Gitaly wiki_delete_page is enabled' do
it_behaves_like 'deleting a wiki page'
end
context 'when Gitaly wiki_delete_page is disabled', :skip_gitaly_mock do
it_behaves_like 'deleting a wiki page'
end
end
......@@ -351,6 +371,6 @@ describe ProjectWiki do
end
def destroy_page(page)
subject.delete_page(page, commit_details)
subject.delete_page(page, "test commit")
end
end
......@@ -402,7 +402,7 @@ describe WikiPage do
def destroy_page(title)
page = wiki.wiki.page(title: title)
wiki.delete_page(page, commit_details)
wiki.delete_page(page, "test commit")
end
def get_slugs(page_or_dir)
......
This diff is collapsed.
......@@ -2675,6 +2675,10 @@ function-bind@^1.1.1, function-bind@~1.1.0:
version "1.1.1"
resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.1.tgz#a56899d3ea3c9bab874bb9773b7c5ede92f4895d"
fuzzaldrin-plus@^0.5.0:
version "0.5.0"
resolved "https://registry.yarnpkg.com/fuzzaldrin-plus/-/fuzzaldrin-plus-0.5.0.tgz#ef5f26f0c2fc7e9e9a16ea149a802d6cb4804b1e"
gauge@~2.7.3:
version "2.7.4"
resolved "https://registry.yarnpkg.com/gauge/-/gauge-2.7.4.tgz#2c03405c7538c39d7eb37b317022e325fb018bf7"
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment