Commit 8e38899d authored by Valery Sizov's avatar Valery Sizov

Merge branch 'master' of gitlab.com:gitlab-org/gitlab-ce into ce_upstream[ci skip]

parents 71c8b3e7 6306e797
image: "dev.gitlab.org:5005/gitlab/gitlab-build-images:ruby-2.3.3-golang-1.8-git-2.13-phantomjs-2.1-node-8.x-yarn-1.0-postgresql-9.6"
image: "dev.gitlab.org:5005/gitlab/gitlab-build-images:ruby-2.3.5-golang-1.8-git-2.13-phantomjs-2.1-node-8.x-yarn-1.0-postgresql-9.6"
.default-cache: &default-cache
key: "ruby-233-with-yarn"
key: "ruby-235-with-yarn"
paths:
- vendor/ruby
- .yarn-cache/
......@@ -480,7 +480,11 @@ db:migrate:reset-mysql:
variables:
SETUP_DB: "false"
script:
<<<<<<< HEAD
- git fetch origin v8.14.10-ee
=======
- git fetch origin v9.3.0
>>>>>>> 6306e797acca358c79c120e5b12c29a5ec604571
- git checkout -f FETCH_HEAD
- bundle install $BUNDLE_INSTALL_FLAGS
- cp config/gitlab.yml.example config/gitlab.yml
......@@ -576,7 +580,7 @@ karma:
<<: *dedicated-runner
<<: *except-docs
<<: *pull-cache
image: "dev.gitlab.org:5005/gitlab/gitlab-build-images:ruby-2.3.3-golang-1.8-git-2.13-chrome-61.0-node-8.x-yarn-1.0-postgresql-9.6"
image: "dev.gitlab.org:5005/gitlab/gitlab-build-images:ruby-2.3.5-golang-1.8-git-2.13-chrome-61.0-node-8.x-yarn-1.0-postgresql-9.6"
stage: test
variables:
BABEL_ENV: "coverage"
......
......@@ -93,7 +93,7 @@ gem 'kaminari', '~> 1.0'
gem 'hamlit', '~> 2.6.1'
# Files attachments
gem 'carrierwave', '~> 1.1'
gem 'carrierwave', '~> 1.2'
# Drag and Drop UI
gem 'dropzonejs-rails', '~> 0.7.1'
......@@ -414,7 +414,7 @@ group :ed25519 do
end
# Gitaly GRPC client
gem 'gitaly-proto', '~> 0.48.0', require: 'gitaly'
gem 'gitaly-proto', '~> 0.51.0', require: 'gitaly'
gem 'toml-rb', '~> 0.3.15', require: false
......
......@@ -115,7 +115,7 @@ GEM
capybara-screenshot (1.0.14)
capybara (>= 1.0, < 3)
launchy
carrierwave (1.1.0)
carrierwave (1.2.1)
activemodel (>= 4.0.0)
activesupport (>= 4.0.0)
mime-types (>= 1.16)
......@@ -297,7 +297,7 @@ GEM
po_to_json (>= 1.0.0)
rails (>= 3.2.0)
gherkin-ruby (0.3.2)
gitaly-proto (0.48.0)
gitaly-proto (0.51.0)
google-protobuf (~> 3.1)
grpc (~> 1.0)
github-linguist (4.7.6)
......@@ -1021,7 +1021,7 @@ DEPENDENCIES
bundler-audit (~> 0.5.0)
capybara (~> 2.15.0)
capybara-screenshot (~> 1.0.0)
carrierwave (~> 1.1)
carrierwave (~> 1.2)
charlock_holmes (~> 0.7.5)
chronic (~> 0.10.2)
chronic_duration (~> 0.10.6)
......@@ -1065,7 +1065,7 @@ DEPENDENCIES
gettext (~> 3.2.2)
gettext_i18n_rails (~> 1.8.0)
gettext_i18n_rails_js (~> 1.2.0)
gitaly-proto (~> 0.48.0)
gitaly-proto (~> 0.51.0)
github-linguist (~> 4.7.0)
gitlab-flowdock-git-hook (~> 1.0.1)
gitlab-license (~> 1.0)
......
/* eslint-disable func-names, no-underscore-dangle, space-before-function-paren, no-var, one-var, one-var-declaration-per-line, prefer-rest-params, max-len, vars-on-top, wrap-iife, no-unused-vars, quotes, no-shadow, no-cond-assign, prefer-arrow-callback, no-return-assign, no-else-return, camelcase, comma-dangle, no-lonely-if, guard-for-in, no-restricted-syntax, consistent-return, prefer-template, no-param-reassign, no-loop-func, no-mixed-operators */
/* global fuzzaldrinPlus */
import _ from 'underscore';
import fuzzaldrinPlus from 'fuzzaldrin-plus';
import { isObject } from './lib/utils/type_utility';
var GitLabDropdown, GitLabDropdownFilter, GitLabDropdownRemote, GitLabDropdownInput;
......
......@@ -12,7 +12,6 @@ import svg4everybody from 'svg4everybody';
// libraries with import side-effects
import 'mousetrap';
import 'mousetrap/plugins/pause/mousetrap-pause';
import 'vendor/fuzzaldrin-plus';
// expose common libraries as globals (TODO: remove these)
window.jQuery = jQuery;
......
/* eslint-disable func-names, space-before-function-paren, no-var, prefer-rest-params, wrap-iife, quotes, consistent-return, one-var, one-var-declaration-per-line, no-cond-assign, max-len, object-shorthand, no-param-reassign, comma-dangle, prefer-template, no-unused-vars, no-return-assign */
/* global fuzzaldrinPlus */
import fuzzaldrinPlus from 'fuzzaldrin-plus';
(function() {
this.ProjectFindFile = (function() {
......
......@@ -27,6 +27,7 @@
toggleModalOpen() {
this.openModal = !this.openModal;
},
<<<<<<< HEAD
<<<<<<< HEAD
createNewEntryInStore(options, openEditMode = true) {
RepoHelper.createNewEntry(options, openEditMode);
......@@ -43,6 +44,8 @@
eventHub.$off('createNewEntry', this.createNewEntryInStore);
=======
>>>>>>> e24d1890aea9c550e02d9145f50e8e1ae153a3a3
=======
>>>>>>> 6306e797acca358c79c120e5b12c29a5ec604571
},
};
</script>
......@@ -75,7 +78,7 @@
</li>
<li>
<upload
:current-path="currentPath"
:path="path"
/>
</li>
<li>
......
......@@ -5,11 +5,15 @@
export default {
props: {
<<<<<<< HEAD
path: {
=======
type: {
>>>>>>> 6306e797acca358c79c120e5b12c29a5ec604571
type: String,
required: true,
},
type: {
path: {
type: String,
required: true,
},
......@@ -27,12 +31,17 @@
'createTempEntry',
]),
createEntryInStore() {
<<<<<<< HEAD
<<<<<<< HEAD
eventHub.$emit('createNewEntry', {
name: this.entryName,
=======
this.createTempEntry({
name: this.entryName.replace(new RegExp(`^${this.path}/`), ''),
>>>>>>> 6306e797acca358c79c120e5b12c29a5ec604571
type: this.type,
toggleModal: true,
});
<<<<<<< HEAD
=======
this.createTempEntry({
name: this.entryName.replace(new RegExp(`^${this.path}/`), ''),
......@@ -41,6 +50,10 @@
this.toggleModalOpen();
>>>>>>> e24d1890aea9c550e02d9145f50e8e1ae153a3a3
=======
this.toggleModalOpen();
>>>>>>> 6306e797acca358c79c120e5b12c29a5ec604571
},
toggleModalOpen() {
this.$emit('toggle');
......
<script>
import eventHub from '../../event_hub';
import { mapActions } from 'vuex';
export default {
props: {
currentPath: {
path: {
type: String,
required: true,
},
},
methods: {
...mapActions([
'createTempEntry',
]),
createFile(target, file, isText) {
const { name } = file;
const nameWithPath = `${this.currentPath !== '' ? `${this.currentPath}/` : ''}${name}`;
let { result } = target;
if (!isText) {
result = result.split('base64,')[1];
}
eventHub.$emit('createNewEntry', {
name: nameWithPath,
this.createTempEntry({
name,
type: 'blob',
content: result,
toggleModal: false,
base64: !isText,
}, isText);
});
},
readFile(file) {
const reader = new FileReader();
......
......@@ -38,6 +38,7 @@ export default {
makeCommit(newBranch = false) {
const createNewBranch = newBranch || this.startNewMR;
<<<<<<< HEAD
<<<<<<< HEAD
makeCommit(newBranch) {
// see https://docs.gitlab.com/ce/api/commits.html#create-a-commit-with-multiple-files-and-actions
......@@ -51,6 +52,8 @@ export default {
const branch = newBranch ? `${this.currentBranch}-${this.currentShortHash}` : this.currentBranch;
=======
>>>>>>> e24d1890aea9c550e02d9145f50e8e1ae153a3a3
=======
>>>>>>> 6306e797acca358c79c120e5b12c29a5ec604571
const payload = {
branch: createNewBranch ? `${this.currentBranch}-${new Date().getTime().toString()}` : this.currentBranch,
commit_message: this.commitMessage,
......
......@@ -45,11 +45,15 @@ export default {
</p>
</div>
<div
<<<<<<< HEAD
<<<<<<< HEAD
v-else-if="activeFile.tooLarge"
=======
v-else-if="renderErrorTooLarge"
>>>>>>> e24d1890aea9c550e02d9145f50e8e1ae153a3a3
=======
v-else-if="renderErrorTooLarge"
>>>>>>> 6306e797acca358c79c120e5b12c29a5ec604571
class="vertical-center render-error">
<p class="text-center">
The source could not be displayed because it is too large. You can <a :href="activeFile.rawPath" download>download</a> it instead.
......
import Service from '../services/repo_service';
import Store from '../stores/repo_store';
import Flash from '../../flash';
const RepoHelper = {
monacoInstance: null,
getDefaultActiveFile() {
return {
id: '',
active: true,
binary: false,
extension: '',
html: '',
mime_type: '',
name: '',
plain: '',
size: 0,
url: '',
raw: false,
newContent: '',
changed: false,
loading: false,
};
},
key: '',
Time: window.performance
&& window.performance.now
? window.performance
: Date,
getFileExtension(fileName) {
return fileName.split('.').pop();
},
getLanguageIDForFile(file, langs) {
const ext = RepoHelper.getFileExtension(file.name);
const foundLang = RepoHelper.findLanguage(ext, langs);
return foundLang ? foundLang.id : 'plaintext';
},
setMonacoModelFromLanguage() {
RepoHelper.monacoInstance.setModel(null);
const languages = RepoHelper.monaco.languages.getLanguages();
const languageID = RepoHelper.getLanguageIDForFile(Store.activeFile, languages);
const newModel = RepoHelper.monaco.editor.createModel(Store.blobRaw, languageID);
RepoHelper.monacoInstance.setModel(newModel);
},
findLanguage(ext, langs) {
return langs.find(lang => lang.extensions && lang.extensions.indexOf(`.${ext}`) > -1);
},
setDirectoryOpen(tree, title) {
if (!tree) return;
Object.assign(tree, {
opened: true,
});
RepoHelper.updateHistoryEntry(tree.url, title);
Store.path = tree.path;
},
setDirectoryToClosed(entry) {
Object.assign(entry, {
opened: false,
files: [],
});
},
isRenderable() {
const okExts = ['md', 'svg'];
return okExts.indexOf(Store.activeFile.extension) > -1;
},
setBinaryDataAsBase64(file) {
Service.getBase64Content(file.raw_path)
.then((response) => {
Store.blobRaw = response;
file.base64 = response; // eslint-disable-line no-param-reassign
})
.catch(RepoHelper.loadingError);
},
getContent(treeOrFile, emptyFiles = false) {
let file = treeOrFile;
if (!Store.files.length) {
Store.loading.tree = true;
}
return Service.getContent()
.then((response) => {
const data = response.data;
if (response.headers && response.headers['page-title']) data.pageTitle = decodeURI(response.headers['page-title']);
if (data.path && !Store.isInitialRoot) {
Store.isRoot = data.path === '/';
Store.isInitialRoot = Store.isRoot;
}
if (file && file.type === 'blob') {
if (!file) file = data;
Store.binary = data.binary;
if (data.binary) {
// file might be undefined
RepoHelper.setBinaryDataAsBase64(data);
Store.setViewToPreview();
} else if (!Store.isPreviewView() && !data.render_error) {
Service.getRaw(data)
.then((rawResponse) => {
Store.blobRaw = rawResponse.data;
data.plain = rawResponse.data;
RepoHelper.setFile(data, file);
}).catch(RepoHelper.loadingError);
}
if (Store.isPreviewView()) {
RepoHelper.setFile(data, file);
}
} else {
Store.loading.tree = false;
RepoHelper.setDirectoryOpen(file, data.pageTitle || data.name);
if (emptyFiles) {
Store.files = [];
}
this.addToDirectory(file, data);
Store.prevURL = Service.blobURLtoParentTree(Service.url);
}
}).catch(RepoHelper.loadingError);
},
addToDirectory(file, data) {
const tree = file || Store;
// TODO: Figure out why `popstate` is being trigger in the specs
if (!tree.files) return;
const files = tree.files.concat(this.dataToListOfFiles(data, file ? file.level + 1 : 0));
tree.files = files;
},
setFile(data, file) {
const newFile = data;
newFile.url = file.url || Service.url; // Grab the URL from service, happens on page refresh.
if (newFile.render_error === 'too_large' || newFile.render_error === 'collapsed') {
newFile.tooLarge = true;
}
newFile.newContent = file.newContent ? file.newContent : '';
Store.addToOpenedFiles(newFile);
Store.setActiveFiles(newFile);
},
serializeRepoEntity(type, entity, level = 0) {
const {
id,
url,
name,
icon,
last_commit,
tree_url,
path,
tempFile,
active,
opened,
} = entity;
return {
id,
type,
name,
url,
tree_url,
path,
level,
tempFile,
icon: `fa-${icon}`,
files: [],
loading: false,
opened,
active,
// eslint-disable-next-line camelcase
lastCommit: last_commit ? {
url: `${Store.projectUrl}/commit/${last_commit.id}`,
message: last_commit.message,
updatedAt: last_commit.committed_date,
} : {},
};
},
scrollTabsRight() {
const tabs = document.getElementById('tabs');
if (!tabs) return;
tabs.scrollLeft = tabs.scrollWidth;
},
dataToListOfFiles(data, level) {
const { blobs, trees, submodules } = data;
return [
...trees.map(tree => RepoHelper.serializeRepoEntity('tree', tree, level)),
...submodules.map(submodule => RepoHelper.serializeRepoEntity('submodule', submodule, level)),
...blobs.map(blob => RepoHelper.serializeRepoEntity('blob', blob, level)),
];
},
genKey() {
return RepoHelper.Time.now().toFixed(3);
},
updateHistoryEntry(url, title) {
const history = window.history;
RepoHelper.key = RepoHelper.genKey();
if (document.location.pathname !== url) {
history.pushState({ key: RepoHelper.key }, '', url);
}
if (title) {
document.title = title;
}
},
findOpenedFileFromActive() {
return Store.openedFiles.find(openedFile => Store.activeFile.id === openedFile.id);
},
getFileFromPath(path) {
return Store.openedFiles.find(file => file.url === path);
},
loadingError() {
Flash('Unable to load this content at this time.');
},
openEditMode() {
Store.editMode = true;
Store.currentBlobView = 'repo-editor';
},
updateStorePath(path) {
Store.path = path;
},
findOrCreateEntry(type, tree, name) {
let exists = true;
let foundEntry = tree.files.find(dir => dir.type === type && dir.name === name);
if (!foundEntry) {
foundEntry = RepoHelper.serializeRepoEntity(type, {
id: name,
name,
path: tree.path ? `${tree.path}/${name}` : name,
icon: type === 'tree' ? 'folder' : 'file-text-o',
tempFile: true,
opened: true,
active: true,
}, tree.level !== undefined ? tree.level + 1 : 0);
exists = false;
tree.files.push(foundEntry);
}
return {
entry: foundEntry,
exists,
};
},
removeAllTmpFiles(storeFilesKey) {
Store[storeFilesKey] = Store[storeFilesKey].filter(f => !f.tempFile);
},
createNewEntry(options, openEditMode = true) {
const {
name,
type,
content = '',
base64 = false,
} = options;
const originalPath = Store.path;
let entryName = name;
if (entryName.indexOf(`${originalPath}/`) !== 0) {
this.updateStorePath('');
} else {
entryName = entryName.replace(`${originalPath}/`, '');
}
if (entryName === '') return;
const fileName = type === 'tree' ? '.gitkeep' : entryName;
let tree = Store;
if (type === 'tree') {
const dirNames = entryName.split('/');
dirNames.forEach((dirName) => {
if (dirName === '') return;
tree = this.findOrCreateEntry('tree', tree, dirName).entry;
});
}
if ((type === 'tree' && tree.tempFile) || type === 'blob') {
const file = this.findOrCreateEntry('blob', tree, fileName);
if (file.exists) {
Flash(`The name "${file.entry.name}" is already taken in this directory.`);
} else {
const { entry } = file;
entry.newContent = content;
entry.base64 = base64;
if (entry.base64) {
entry.render_error = true;
}
this.setFile(entry, entry);
if (openEditMode) {
this.openEditMode();
} else {
file.entry.render_error = 'asdsad';
}
}
}
this.updateStorePath(originalPath);
},
};
export default RepoHelper;
import axios from 'axios';
import csrf from '../../lib/utils/csrf';
import Store from '../stores/repo_store';
import Api from '../../api';
import Helper from '../helpers/repo_helper';
axios.defaults.headers.common[csrf.headerKey] = csrf.token;
const RepoService = {
url: '',
options: {
params: {
format: 'json',
},
},
createBranchPath: '/api/:version/projects/:id/repository/branches',
richExtensionRegExp: /md/,
getRaw(file) {
if (file.tempFile) {
return Promise.resolve({
data: file.newContent ? file.newContent : '',
});
}
return axios.get(file.raw_path, {
// Stop Axios from parsing a JSON file into a JS object
transformResponse: [res => res],
});
},
buildParams(url = this.url) {
// shallow clone object without reference
const params = Object.assign({}, this.options.params);
if (this.urlIsRichBlob(url)) params.viewer = 'rich';
return params;
},
urlIsRichBlob(url = this.url) {
const extension = Helper.getFileExtension(url);
return this.richExtensionRegExp.test(extension);
},
getContent(url = this.url) {
const params = this.buildParams(url);
return axios.get(url, {
params,
});
},
getBase64Content(url = this.url) {
const request = axios.get(url, {
responseType: 'arraybuffer',
});
return request.then(response => this.bufferToBase64(response.data));
},
bufferToBase64(data) {
return new Buffer(data, 'binary').toString('base64');
},
blobURLtoParentTree(url) {
const urlArray = url.split('/');
urlArray.pop();
const blobIndex = urlArray.lastIndexOf('blob');
if (blobIndex > -1) urlArray[blobIndex] = 'tree';
return urlArray.join('/');
},
getBranch() {
return Api.branchSingle(Store.projectId, Store.currentBranch);
},
commitFiles(payload) {
return Api.commitMultiple(Store.projectId, payload)
.then(this.commitFlash);
},
createBranch(payload) {
const url = Api.buildUrl(this.createBranchPath)
.replace(':id', Store.projectId);
return axios.post(url, payload);
},
commitFlash(data) {
if (data.short_id && data.stats) {
window.Flash(`Your changes have been committed. Commit ${data.short_id} with ${data.stats.additions} additions, ${data.stats.deletions} deletions.`, 'notice');
} else {
window.Flash(data.message);
}
},
};
export default RepoService;
......@@ -19,11 +19,7 @@ module NavHelper
end
elsif current_path?('jobs#show')
%w[page-gutter build-sidebar right-sidebar-expanded]
elsif current_path?('wikis#show') ||
current_path?('wikis#edit') ||
current_path?('wikis#update') ||
current_path?('wikis#history') ||
current_path?('wikis#git_access')
elsif current_controller?('wikis') && current_action?('show', 'create', 'edit', 'update', 'history', 'git_access')
%w[page-gutter wiki-sidebar right-sidebar-expanded]
else
[]
......
......@@ -7,8 +7,15 @@ class Identity < ActiveRecord::Base
validates :extern_uid, allow_blank: true, uniqueness: { scope: :provider }
validates :user_id, uniqueness: { scope: :provider }
<<<<<<< HEAD
scope :with_provider, ->(provider) { where(provider: provider) }
scope :with_extern_uid, ->(provider, extern_uid) { where(extern_uid: extern_uid, provider: provider) }
=======
scope :with_extern_uid, ->(provider, extern_uid) do
extern_uid = Gitlab::LDAP::Person.normalize_dn(extern_uid) if provider.starts_with?('ldap')
where(extern_uid: extern_uid, provider: provider)
end
>>>>>>> 6306e797acca358c79c120e5b12c29a5ec604571
def ldap?
provider.starts_with?('ldap')
......
......@@ -1092,6 +1092,7 @@ class Project < ActiveRecord::Base
def hook_attrs(backward: true)
attrs = {
id: id,
name: name,
description: description,
web_url: web_url,
......
......@@ -4,5 +4,8 @@ class MergeRequestBasicEntity < IssuableSidebarEntity
expose :merge_error
expose :state
expose :source_branch_exists?, as: :source_branch_exists
<<<<<<< HEAD
expose :rebase_in_progress?, as: :rebase_in_progress
=======
>>>>>>> 6306e797acca358c79c120e5b12c29a5ec604571
end
module Milestones
class PromoteService < Milestones::BaseService
<<<<<<< HEAD
prepend EE::Milestones::PromoteService
=======
>>>>>>> 6306e797acca358c79c120e5b12c29a5ec604571
PromoteMilestoneError = Class.new(StandardError)
def execute(milestone)
......
---
title: Adds project_id to pipeline hook data
merge_request: 15044
author: Jacopo Beschi @jacopo-beschi
type: added
---
title: Fix overlap of right-sidebar and main content when creating a Wiki page
merge_request:
author:
type: fixed
---
title: Bump carrierwave to 1.2.1
merge_request: 15072
author: Takuya Noguchi
type: other
---
title: Normalize LDAP DN when looking up identity
merge_request:
author:
type: fixed
---
title: Fix missing Import/Export issue assignees
merge_request:
author:
type: fixed
---
title: Returns a ssh url for go-get=1
merge_request: 14990
author: gvieira37
type: fixed
---
title: Upgrade Ruby to 2.3.5 to include security patches
merge_request: 15099
author:
type: security
......@@ -19,24 +19,30 @@ For example, for GitLab version 10.5.7:
* `5` represents minor version
* `7` represents patch number
## Security releases
## Patch releases
The current stable release will receive security patches and bug fixes
(eg. `8.9.0` -> `8.9.1`).
Patch releases usually only include bug fixes and are only done for the current
stable release. That said, in some cases, we may backport it to previous stable
release, depending on the severity of the bug.
Feature releases will mark the next supported stable
release where the minor version is increased numerically by increments of one
(eg. `8.9 -> 8.10`).
For instance, if we release `10.1.1` with a fix for a severe bug introduced in
`10.0.0`, we could backport the fix to a new `10.0.x` patch release.
Our current policy is to support one stable release at any given time.
For medium-level security issues, we may consider backporting to the previous two
### Security releases
Security releases are a special kind of patch release that only include security
fixes and patches (see below).
Our current policy is to support one stable release at any given time, but for
medium-level security issues, we may backport security fixes to the previous two
monthly releases.
For very serious security issues, there is [precedent](https://about.gitlab.com/2016/05/02/cve-2016-4340-patches/)
to backport security fixes to even more monthly releases of GitLab. This decision
is made on a case-by-case basis.
For very serious security issues, there is
[precedent](https://about.gitlab.com/2016/05/02/cve-2016-4340-patches/)
to backport security fixes to even more monthly releases of GitLab.
This decision is made on a case-by-case basis.
## Version support
## Upgrade recommendations
We encourage everyone to run the latest stable release to ensure that you can
easily upgrade to the most secure and feature-rich GitLab experience. In order
......@@ -70,7 +76,6 @@ Please see the table below for some examples:
| -------------- | ------------ | ------------------------ | ---------------- |
| 9.4.5 | 8.13.4 | `8.13.4` -> `8.17.7` -> `9.4.5` | `8.17.7` is the last version in version `8` |
| 10.1.4 | 8.13.4 | `8.13.4` -> `8.17.7` -> `9.5.8` -> `10.1.4` | `8.17.7` is the last version in version `8`, `9.5.8` is the last version in version `9` |
|
More information about the release procedures can be found in our
[release-tools documentation][rel]. You may also want to read our
......
......@@ -101,6 +101,7 @@ X-Gitlab-Event: Push Hook
"user_avatar": "https://s.gravatar.com/avatar/d4c74594d841139328695756648b6bd6?s=8://s.gravatar.com/avatar/d4c74594d841139328695756648b6bd6?s=80",
"project_id": 15,
"project":{
"id": 15,
"name":"Diaspora",
"description":"",
"web_url":"http://example.com/mike/diaspora",
......@@ -181,6 +182,7 @@ X-Gitlab-Event: Tag Push Hook
"user_avatar": "https://s.gravatar.com/avatar/d4c74594d841139328695756648b6bd6?s=8://s.gravatar.com/avatar/d4c74594d841139328695756648b6bd6?s=80",
"project_id": 1,
"project":{
"id": 1,
"name":"Example",
"description":"",
"web_url":"http://example.com/jsmith/example",
......@@ -231,6 +233,7 @@ X-Gitlab-Event: Issue Hook
"avatar_url": "http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=40\u0026d=identicon"
},
"project": {
"id": 1,
"name":"Gitlab Test",
"description":"Aut reprehenderit ut est.",
"web_url":"http://example.com/gitlabhq/gitlab-test",
......@@ -360,6 +363,7 @@ X-Gitlab-Event: Note Hook
},
"project_id": 5,
"project":{
"id": 5,
"name":"Gitlab Test",
"description":"Aut reprehenderit ut est.",
"web_url":"http://example.com/gitlabhq/gitlab-test",
......@@ -439,6 +443,7 @@ X-Gitlab-Event: Note Hook
},
"project_id": 5,
"project":{
"id": 5,
"name":"Gitlab Test",
"description":"Aut reprehenderit ut est.",
"web_url":"http://example.com/gitlab-org/gitlab-test",
......@@ -565,6 +570,7 @@ X-Gitlab-Event: Note Hook
},
"project_id": 5,
"project":{
"id": 5,
"name":"Gitlab Test",
"description":"Aut reprehenderit ut est.",
"web_url":"http://example.com/gitlab-org/gitlab-test",
......@@ -643,6 +649,7 @@ X-Gitlab-Event: Note Hook
},
"project_id": 5,
"project":{
"id": 5,
"name":"Gitlab Test",
"description":"Aut reprehenderit ut est.",
"web_url":"http://example.com/gitlab-org/gitlab-test",
......@@ -717,6 +724,7 @@ X-Gitlab-Event: Merge Request Hook
"avatar_url": "http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=40\u0026d=identicon"
},
"project": {
"id": 1,
"name":"Gitlab Test",
"description":"Aut reprehenderit ut est.",
"web_url":"http://example.com/gitlabhq/gitlab-test",
......@@ -873,6 +881,7 @@ X-Gitlab-Event: Wiki Page Hook
"avatar_url": "http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80\u0026d=identicon"
},
"project": {
"id": 1,
"name": "awesome-project",
"description": "This is awesome",
"web_url": "http://example.com/root/awesome-project",
......@@ -944,6 +953,7 @@ X-Gitlab-Event: Pipeline Hook
"avatar_url": "http://www.gravatar.com/avatar/e32bd13e2add097461cb96824b7a829c?s=80\u0026d=identicon"
},
"project":{
"id": 1,
"name": "Gitlab Test",
"description": "Atque in sunt eos similique dolores voluptatem.",
"web_url": "http://192.168.64.1:3005/gitlab-org/gitlab-test",
......
......@@ -35,10 +35,14 @@ module Gitlab
end
def delete_page(page_path, commit_details)
assert_type!(commit_details, CommitDetails)
gollum_wiki.delete_page(gollum_page_by_path(page_path), commit_details.to_h)
nil
@repository.gitaly_migrate(:wiki_delete_page) do |is_enabled|
if is_enabled
gitaly_delete_page(page_path, commit_details)
gollum_wiki.clear_cache
else
gollum_delete_page(page_path, commit_details)
end
end
end
def update_page(page_path, title, format, content, commit_details)
......@@ -54,14 +58,13 @@ module Gitlab
end
def page(title:, version: nil, dir: nil)
if version
version = Gitlab::Git::Commit.find(@repository, version).id
@repository.gitaly_migrate(:wiki_find_page) do |is_enabled|
if is_enabled
gitaly_find_page(title: title, version: version, dir: dir)
else
gollum_find_page(title: title, version: version, dir: dir)
end
end
gollum_page = gollum_wiki.page(title, version, dir)
return unless gollum_page
new_page(gollum_page)
end
def file(name, version)
......@@ -135,9 +138,38 @@ module Gitlab
raise Gitlab::Git::Wiki::DuplicatePageError, e.message
end
def gollum_delete_page(page_path, commit_details)
assert_type!(commit_details, CommitDetails)
gollum_wiki.delete_page(gollum_page_by_path(page_path), commit_details.to_h)
nil
end
def gollum_find_page(title:, version: nil, dir: nil)
if version
version = Gitlab::Git::Commit.find(@repository, version).id
end
gollum_page = gollum_wiki.page(title, version, dir)
return unless gollum_page
new_page(gollum_page)
end
def gitaly_write_page(name, format, content, commit_details)
gitaly_wiki_client.write_page(name, format, content, commit_details)
end
def gitaly_delete_page(page_path, commit_details)
gitaly_wiki_client.delete_page(page_path, commit_details)
end
def gitaly_find_page(title:, version: nil, dir: nil)
wiki_page, version = gitaly_wiki_client.find_page(title: title, version: version, dir: dir)
return unless wiki_page
Gitlab::Git::WikiPage.new(wiki_page, version)
end
end
end
end
module Gitlab
module GitalyClient
class WikiPage
FIELDS = %i(title format url_path path name historical raw_data).freeze
attr_accessor(*FIELDS)
def initialize(params)
params = params.with_indifferent_access
FIELDS.each do |field|
instance_variable_set("@#{field}", params[field])
end
end
def historical?
@historical
end
def format
@format.to_sym
end
end
end
end
......@@ -15,11 +15,7 @@ module Gitlab
repository: @gitaly_repo,
name: GitalyClient.encode(name),
format: format.to_s,
commit_details: Gitaly::WikiCommitDetails.new(
name: GitalyClient.encode(commit_details.name),
email: GitalyClient.encode(commit_details.email),
message: GitalyClient.encode(commit_details.message)
)
commit_details: gitaly_commit_details(commit_details)
)
strio = StringIO.new(content)
......@@ -40,6 +36,59 @@ module Gitlab
raise Gitlab::Git::Wiki::DuplicatePageError, error
end
end
def delete_page(page_path, commit_details)
request = Gitaly::WikiDeletePageRequest.new(
repository: @gitaly_repo,
page_path: GitalyClient.encode(page_path),
commit_details: gitaly_commit_details(commit_details)
)
GitalyClient.call(@repository.storage, :wiki_service, :wiki_delete_page, request)
end
def find_page(title:, version: nil, dir: nil)
request = Gitaly::WikiFindPageRequest.new(
repository: @gitaly_repo,
title: GitalyClient.encode(title),
revision: GitalyClient.encode(version),
directory: GitalyClient.encode(dir)
)
response = GitalyClient.call(@repository.storage, :wiki_service, :wiki_find_page, request)
wiki_page = version = nil
response.each do |message|
page = message.page
next unless page
if wiki_page
wiki_page.raw_data << page.raw_data
else
wiki_page = GitalyClient::WikiPage.new(page.to_h)
# All gRPC strings in a response are frozen, so we get
# an unfrozen version here so appending in the else clause below doesn't blow up.
wiki_page.raw_data = wiki_page.raw_data.dup
version = Gitlab::Git::WikiPageVersion.new(
Gitlab::Git::Commit.decorate(@repository, page.version.commit),
page.version.format
)
end
end
[wiki_page, version]
end
private
def gitaly_commit_details(commit_details)
Gitaly::WikiCommitDetails.new(
name: GitalyClient.encode(commit_details.name),
email: GitalyClient.encode(commit_details.email),
message: GitalyClient.encode(commit_details.message)
)
end
end
end
end
......@@ -19,6 +19,7 @@ project_tree:
- milestone:
- events:
- :push_event_payload
- :issue_assignees
- snippets:
- :award_emoji
- notes:
......
......@@ -4,7 +4,7 @@ module Gitlab
module LDAP
class AuthHash < Gitlab::OAuth::AuthHash
def uid
Gitlab::LDAP::Person.normalize_dn(super)
@uid ||= Gitlab::LDAP::Person.normalize_dn(super)
end
private
......
......@@ -11,10 +11,11 @@ module Gitlab
class << self
def find_by_uid_and_provider(uid, provider)
# LDAP distinguished name is case-insensitive
uid = Gitlab::LDAP::Person.normalize_dn(uid)
identity = ::Identity
.where(provider: provider)
.iwhere(extern_uid: uid).last
.where(extern_uid: uid).last
identity && identity.user
end
end
......
......@@ -4,6 +4,7 @@ module Gitlab
module Middleware
class Go
include ActionView::Helpers::TagHelper
include Gitlab::CurrentSettings
PROJECT_PATH_REGEX = %r{\A(#{Gitlab::PathRegex.full_namespace_route_regex}/#{Gitlab::PathRegex.project_route_regex})/}.freeze
......@@ -37,10 +38,20 @@ module Gitlab
end
def go_body(path)
project_url = URI.join(Gitlab.config.gitlab.url, path)
config = Gitlab.config
project_url = URI.join(config.gitlab.url, path)
import_prefix = strip_url(project_url.to_s)
meta_tag = tag :meta, name: 'go-import', content: "#{import_prefix} git #{project_url}.git"
repository_url = case current_application_settings.enabled_git_access_protocol
when 'ssh'
shell = config.gitlab_shell
port = ":#{shell.ssh_port}" unless shell.ssh_port == 22
"ssh://#{shell.ssh_user}@#{shell.ssh_host}#{port}/#{path}.git"
when 'http', nil
"#{project_url}.git"
end
meta_tag = tag :meta, name: 'go-import', content: "#{import_prefix} git #{repository_url}"
head_tag = content_tag :head, meta_tag
content_tag :html, head_tag
end
......
......@@ -5,7 +5,7 @@ module SystemCheck
set_check_pass -> { "yes (#{self.current_version})" }
def self.required_version
@required_version ||= Gitlab::VersionInfo.new(2, 3, 3)
@required_version ||= Gitlab::VersionInfo.new(2, 3, 5)
end
def self.current_version
......
......@@ -8,6 +8,7 @@ module QA
autoload :Release, 'qa/runtime/release'
autoload :User, 'qa/runtime/user'
autoload :Namespace, 'qa/runtime/namespace'
autoload :Scenario, 'qa/runtime/scenario'
end
##
......@@ -80,6 +81,11 @@ module QA
module Admin
autoload :Menu, 'qa/page/admin/menu'
end
module Mattermost
autoload :Main, 'qa/page/mattermost/main'
autoload :Login, 'qa/page/mattermost/login'
end
end
##
......
module QA
module Page
module Mattermost
class Login < Page::Base
def initialize
visit(Runtime::Scenario.mattermost + '/login')
end
def sign_in_using_oauth
click_link class: 'btn btn-custom-login gitlab'
if page.has_content?('Authorize GitLab Mattermost to use your account?')
click_button 'Authorize'
end
end
end
end
end
end
module QA
module Page
module Mattermost
class Main < Page::Base
def initialize
visit(Runtime::Scenario.mattermost)
end
end
end
end
end
module QA
module Runtime
module Scenario
extend self
attr_accessor :mattermost
end
end
end
......@@ -8,6 +8,11 @@ module QA
#
class Mattermost < Scenario::Entrypoint
tags :core, :mattermost
def perform(address, mattermost, *files)
Runtime::Scenario.mattermost = mattermost
super(address, files)
end
end
end
end
......
module QA
feature 'logging in to Mattermost', :mattermost do
scenario 'can use gitlab oauth' do
Page::Main::Entry.act { sign_in_using_credentials }
Page::Mattermost::Login.act { sign_in_using_oauth }
Page::Mattermost::Main.perform do |page|
expect(page).to have_content(/(Welcome to: Mattermost|Logout GitLab Mattermost)/)
end
end
end
end
describe QA::Scenario::Entrypoint do
subject do
Class.new(QA::Scenario::Entrypoint) do
tags :rspec
end
end
context '#perform' do
let(:config) { spy('Specs::Config') }
let(:release) { spy('Runtime::Release') }
let(:runner) { spy('Specs::Runner') }
before do
allow(config).to receive(:perform) { |&block| block.call config }
allow(runner).to receive(:perform) { |&block| block.call runner }
stub_const('QA::Specs::Config', config)
stub_const('QA::Runtime::Release', release)
stub_const('QA::Specs::Runner', runner)
end
it 'should set address' do
subject.perform("hello")
expect(config).to have_received(:address=).with("hello")
end
context 'no paths' do
it 'should call runner with default arguments' do
subject.perform("test")
expect(runner).to have_received(:rspec)
.with(hash_including(files: 'qa/specs/features'))
end
end
context 'specifying paths' do
it 'should call runner with paths' do
subject.perform('test', 'path1', 'path2')
expect(runner).to have_received(:rspec)
.with(hash_including(files: %w(path1 path2)))
end
end
end
end
......@@ -10,7 +10,10 @@
"title": { "type": "string" },
"moved_to_id": { "type": ["integer", "null"] },
"project_id": { "type": "integer" },
<<<<<<< HEAD
"weight": { "type": ["integer", "null"] },
=======
>>>>>>> 6306e797acca358c79c120e5b12c29a5ec604571
"web_url": { "type": "string" },
"state": { "type": "string" },
"create_note_path": { "type": "string" },
......
......@@ -9,7 +9,10 @@
"human_time_estimate": { "type": ["string", "null"] },
"human_total_time_spent": { "type": ["string", "null"] },
"merge_error": { "type": ["string", "null"] },
<<<<<<< HEAD
"rebase_in_progress": { "type": "boolean" },
=======
>>>>>>> 6306e797acca358c79c120e5b12c29a5ec604571
"assignee_id": { "type": ["integer", "null"] },
"subscribed": { "type": ["boolean", "null"] },
"participants": { "type": "array" }
......
......@@ -68,6 +68,7 @@ describe('new dropdown component', () => {
.catch(done.fail);
});
});
<<<<<<< HEAD
<<<<<<< HEAD
describe('createEntryInStore', () => {
......@@ -226,4 +227,6 @@ describe('new dropdown component', () => {
});
=======
>>>>>>> e24d1890aea9c550e02d9145f50e8e1ae153a3a3
=======
>>>>>>> 6306e797acca358c79c120e5b12c29a5ec604571
});
......@@ -195,6 +195,7 @@ describe('new file modal component', () => {
vm.$el.remove();
});
<<<<<<< HEAD
<<<<<<< HEAD
describe('createEntryInStore', () => {
......@@ -218,4 +219,6 @@ describe('new file modal component', () => {
});
=======
>>>>>>> e24d1890aea9c550e02d9145f50e8e1ae153a3a3
=======
>>>>>>> 6306e797acca358c79c120e5b12c29a5ec604571
});
import Vue from 'vue';
import upload from '~/repo/components/new_dropdown/upload.vue';
import eventHub from '~/repo/event_hub';
import createComponent from '../../../helpers/vue_mount_component_helper';
import store from '~/repo/stores';
import { createComponentWithStore } from '../../../helpers/vue_mount_component_helper';
import { resetStore } from '../../helpers';
describe('new dropdown upload', () => {
let vm;
......@@ -9,13 +10,17 @@ describe('new dropdown upload', () => {
beforeEach(() => {
const Component = Vue.extend(upload);
vm = createComponent(Component, {
currentPath: '',
vm = createComponentWithStore(Component, store, {
path: '',
});
vm.$mount();
});
afterEach(() => {
vm.$destroy();
resetStore(vm.$store);
});
describe('readFile', () => {
......@@ -56,45 +61,43 @@ describe('new dropdown upload', () => {
name: 'file',
};
beforeEach(() => {
spyOn(eventHub, '$emit');
});
it('emits createNewEntry event', () => {
it('creates new file', (done) => {
vm.createFile(target, file, true);
expect(eventHub.$emit).toHaveBeenCalledWith('createNewEntry', {
name: 'file',
type: 'blob',
content: 'content',
toggleModal: false,
base64: false,
}, true);
vm.$nextTick(() => {
expect(vm.$store.state.tree.length).toBe(1);
expect(vm.$store.state.tree[0].name).toBe(file.name);
expect(vm.$store.state.tree[0].content).toBe(target.result);
done();
});
});
it('createNewEntry event name contains current path', () => {
vm.currentPath = 'testing';
it('creates new file in path', (done) => {
vm.$store.state.path = 'testing';
vm.createFile(target, file, true);
expect(eventHub.$emit).toHaveBeenCalledWith('createNewEntry', {
name: 'testing/file',
type: 'blob',
content: 'content',
toggleModal: false,
base64: false,
}, true);
vm.$nextTick(() => {
expect(vm.$store.state.tree.length).toBe(1);
expect(vm.$store.state.tree[0].name).toBe(file.name);
expect(vm.$store.state.tree[0].content).toBe(target.result);
expect(vm.$store.state.tree[0].path).toBe(`testing/${file.name}`);
done();
});
});
it('splits content on base64 if binary', () => {
it('splits content on base64 if binary', (done) => {
vm.createFile(binaryTarget, file, false);
expect(eventHub.$emit).toHaveBeenCalledWith('createNewEntry', {
name: 'file',
type: 'blob',
content: 'base64content',
toggleModal: false,
base64: true,
}, false);
vm.$nextTick(() => {
expect(vm.$store.state.tree.length).toBe(1);
expect(vm.$store.state.tree[0].name).toBe(file.name);
expect(vm.$store.state.tree[0].content).toBe(binaryTarget.result.split('base64,')[1]);
expect(vm.$store.state.tree[0].base64).toBe(true);
done();
});
});
});
});
......@@ -3,7 +3,6 @@
import '~/gl_dropdown';
import '~/search_autocomplete';
import '~/lib/utils/common_utils';
import 'vendor/fuzzaldrin-plus';
(function() {
var assertLinks, dashboardIssuesPath, dashboardMRsPath, groupIssuesPath, groupMRsPath, groupName, mockDashboardOptions, mockGroupOptions, mockProjectOptions, projectIssuesPath, projectMRsPath, projectName, userId, widget;
......
......@@ -257,6 +257,7 @@ describe Gitlab::Database do
end
end
<<<<<<< HEAD
describe '#disable_prepared_statements' do
it 'disables prepared statements' do
config = {}
......@@ -305,6 +306,8 @@ describe Gitlab::Database do
end
end
=======
>>>>>>> 6306e797acca358c79c120e5b12c29a5ec604571
describe '#sanitize_timestamp' do
let(:max_timestamp) { Time.at((1 << 31) - 1) }
......
......@@ -314,3 +314,6 @@ timelogs:
- user
push_event_payload:
- event
issue_assignees:
- issue
- assignee
\ No newline at end of file
......@@ -43,7 +43,7 @@
"issues": [
{
"id": 40,
"title": "Voluptatem amet doloribus deleniti eos maxime repudiandae molestias.",
"title": "Voluptatem",
"assignee_id": 1,
"author_id": 22,
"project_id": 5,
......@@ -60,6 +60,12 @@
"due_date": null,
"moved_to_id": null,
"test_ee_field": "test",
"issue_assignees": [
{
"user_id": 1,
"issue_id": 1
}
],
"milestone": {
"id": 1,
"title": "test milestone",
......
......@@ -63,6 +63,10 @@ describe Gitlab::ImportExport::ProjectTreeRestorer do
expect(issue.reload.updated_at.to_s).to eq('2016-06-14 15:02:47 UTC')
end
it 'has issue assignees' do
expect(Issue.where(title: 'Voluptatem').first.issue_assignees).not_to be_empty
end
it 'contains the merge access levels on a protected branch' do
expect(ProtectedBranch.first.merge_access_levels).not_to be_empty
end
......
......@@ -81,6 +81,10 @@ describe Gitlab::ImportExport::ProjectTreeSaver do
expect(saved_project_json['issues'].first['notes']).not_to be_empty
end
it 'has issue assignees' do
expect(saved_project_json['issues'].first['issue_assignees']).not_to be_empty
end
it 'has author on issue comments' do
expect(saved_project_json['issues'].first['notes'].first['author']).not_to be_empty
end
......
......@@ -526,3 +526,6 @@ ProjectAutoDevops:
- project_id
- created_at
- updated_at
IssueAssignee:
- user_id
- issue_id
\ No newline at end of file
require 'spec_helper'
describe Gitlab::LDAP::Authentication do
let(:user) { create(:omniauth_user, extern_uid: dn) }
let(:dn) { 'uid=john,ou=people,dc=example,dc=com' }
let(:dn) { 'uid=John Smith, ou=People, dc=example, dc=com' }
let(:user) { create(:omniauth_user, extern_uid: Gitlab::LDAP::Person.normalize_dn(dn)) }
let(:login) { 'john' }
let(:password) { 'password' }
......
......@@ -47,23 +47,25 @@ describe Gitlab::LDAP::User do
end
describe '.find_by_uid_and_provider' do
let(:dn) { 'CN=John Åström, CN=Users, DC=Example, DC=com' }
it 'retrieves the correct user' do
special_info = {
name: 'John Åström',
email: 'john@example.com',
nickname: 'jastrom'
}
special_hash = OmniAuth::AuthHash.new(uid: 'CN=John Åström,CN=Users,DC=Example,DC=com', provider: 'ldapmain', info: special_info)
special_hash = OmniAuth::AuthHash.new(uid: dn, provider: 'ldapmain', info: special_info)
special_chars_user = described_class.new(special_hash)
user = special_chars_user.save
expect(described_class.find_by_uid_and_provider(special_hash.uid, special_hash.provider)).to eq user
expect(described_class.find_by_uid_and_provider(dn, 'ldapmain')).to eq user
end
end
describe 'find or create' do
it "finds the user if already existing" do
create(:omniauth_user, extern_uid: 'uid=John Smith,ou=People,dc=example,dc=com', provider: 'ldapmain')
create(:omniauth_user, extern_uid: 'uid=john smith,ou=people,dc=example,dc=com', provider: 'ldapmain')
expect { ldap_user.save }.not_to change { User.count }
end
......
......@@ -17,6 +17,7 @@ describe Gitlab::Middleware::Go do
describe 'when go-get=1' do
let(:current_user) { nil }
shared_examples 'go-get=1' do |enabled_protocol:|
context 'with simple 2-segment project path' do
let!(:project) { create(:project, :private) }
......@@ -24,7 +25,7 @@ describe Gitlab::Middleware::Go do
let(:path) { "#{project.full_path}/subpackage" }
it 'returns the full project path' do
expect_response_with_path(go, project.full_path)
expect_response_with_path(go, enabled_protocol, project.full_path)
end
end
......@@ -32,7 +33,7 @@ describe Gitlab::Middleware::Go do
let(:path) { project.full_path }
it 'returns the full project path' do
expect_response_with_path(go, project.full_path)
expect_response_with_path(go, enabled_protocol, project.full_path)
end
end
end
......@@ -44,7 +45,7 @@ describe Gitlab::Middleware::Go do
shared_examples 'a nested project' do
context 'when the project is public' do
it 'returns the full project path' do
expect_response_with_path(go, project.full_path)
expect_response_with_path(go, enabled_protocol, project.full_path)
end
end
......@@ -61,13 +62,13 @@ describe Gitlab::Middleware::Go do
end
it 'returns the full project path' do
expect_response_with_path(go, project.full_path)
expect_response_with_path(go, enabled_protocol, project.full_path)
end
end
context 'without access to the project' do
it 'returns the 2-segment group path' do
expect_response_with_path(go, group.full_path)
expect_response_with_path(go, enabled_protocol, group.full_path)
end
end
end
......@@ -103,6 +104,31 @@ describe Gitlab::Middleware::Go do
end
end
context 'with SSH disabled' do
before do
stub_application_setting(enabled_git_access_protocol: 'http')
end
include_examples 'go-get=1', enabled_protocol: :http
end
context 'with HTTP disabled' do
before do
stub_application_setting(enabled_git_access_protocol: 'ssh')
end
include_examples 'go-get=1', enabled_protocol: :ssh
end
context 'with nothing disabled' do
before do
stub_application_setting(enabled_git_access_protocol: nil)
end
include_examples 'go-get=1', enabled_protocol: nil
end
end
def go
env = {
'rack.input' => '',
......@@ -113,10 +139,16 @@ describe Gitlab::Middleware::Go do
middleware.call(env)
end
def expect_response_with_path(response, path)
def expect_response_with_path(response, protocol, path)
repository_url = case protocol
when :ssh
"ssh://git@#{Gitlab.config.gitlab.host}/#{path}.git"
when :http, nil
"http://#{Gitlab.config.gitlab.host}/#{path}.git"
end
expect(response[0]).to eq(200)
expect(response[1]['Content-Type']).to eq('text/html')
expected_body = %{<html><head><meta name="go-import" content="#{Gitlab.config.gitlab.host}/#{path} git http://#{Gitlab.config.gitlab.host}/#{path}.git" /></head></html>}
expected_body = %{<html><head><meta name="go-import" content="#{Gitlab.config.gitlab.host}/#{path} git #{repository_url}" /></head></html>}
expect(response[2].body).to eq([expected_body])
end
end
......
......@@ -4,7 +4,7 @@ describe Gitlab::OAuth::User do
let(:oauth_user) { described_class.new(auth_hash) }
let(:gl_user) { oauth_user.gl_user }
let(:uid) { 'my-uid' }
let(:dn) { 'uid=user1,ou=People,dc=example' }
let(:dn) { 'uid=user1,ou=people,dc=example' }
let(:provider) { 'my-provider' }
let(:auth_hash) { OmniAuth::AuthHash.new(uid: uid, provider: provider, info: info_hash) }
let(:info_hash) do
......
......@@ -7,7 +7,7 @@ describe Gitlab::Saml::User do
let(:saml_user) { described_class.new(auth_hash) }
let(:gl_user) { saml_user.gl_user }
let(:uid) { 'my-uid' }
let(:dn) { 'uid=user1,ou=People,dc=example' }
let(:dn) { 'uid=user1,ou=people,dc=example' }
let(:provider) { 'saml' }
let(:raw_info_attr) { { 'groups' => %w(Developers Freelancers Designers) } }
let(:auth_hash) { OmniAuth::AuthHash.new(uid: uid, provider: provider, info: info_hash, extra: { raw_info: OneLogin::RubySaml::Attributes.new(raw_info_attr) }) }
......
require 'spec_helper'
RSpec.describe Identity do
describe Identity do
describe 'relations' do
it { is_expected.to belong_to(:user) }
end
......@@ -22,4 +22,16 @@ RSpec.describe Identity do
expect(other_identity.ldap?).to be_falsey
end
end
describe '.with_extern_uid' do
context 'LDAP identity' do
let!(:ldap_identity) { create(:identity, provider: 'ldapmain', extern_uid: 'uid=john smith,ou=people,dc=example,dc=com') }
it 'finds the identity when the DN is formatted differently' do
identity = described_class.with_extern_uid('ldapmain', 'uid=John Smith, ou=People, dc=example, dc=com').first
expect(identity).to eq(ldap_identity)
end
end
end
end
......@@ -125,6 +125,7 @@ describe ProjectWiki do
end
describe "#find_page" do
shared_examples 'finding a wiki page' do
before do
create_page("index page", "This is an awesome Gollum Wiki")
end
......@@ -153,6 +154,15 @@ describe ProjectWiki do
end
end
context 'when Gitaly wiki_find_page is enabled' do
it_behaves_like 'finding a wiki page'
end
context 'when Gitaly wiki_find_page is disabled', :skip_gitaly_mock do
it_behaves_like 'finding a wiki page'
end
end
describe '#find_file' do
before do
file = Gollum::File.new(subject.wiki)
......@@ -273,6 +283,7 @@ describe ProjectWiki do
end
describe "#delete_page" do
shared_examples 'deleting a wiki page' do
before do
create_page("index", "some content")
@page = subject.wiki.page(title: "index")
......@@ -293,6 +304,15 @@ describe ProjectWiki do
end
end
context 'when Gitaly wiki_delete_page is enabled' do
it_behaves_like 'deleting a wiki page'
end
context 'when Gitaly wiki_delete_page is disabled', :skip_gitaly_mock do
it_behaves_like 'deleting a wiki page'
end
end
describe '#create_repo!' do
it 'creates a repository' do
expect(raw_repository.exists?).to eq(false)
......@@ -351,6 +371,6 @@ describe ProjectWiki do
end
def destroy_page(page)
subject.delete_page(page, commit_details)
subject.delete_page(page, "test commit")
end
end
......@@ -402,7 +402,7 @@ describe WikiPage do
def destroy_page(title)
page = wiki.wiki.page(title: title)
wiki.delete_page(page, commit_details)
wiki.delete_page(page, "test commit")
end
def get_slugs(page_or_dir)
......
/*!
* fuzzaldrin-plus.js - 0.3.1
* https://github.com/jeancroy/fuzzaldrin-plus
*
* Copyright 2016 - Jean Christophe Roy
* Released under the MIT license
* https://github.com/jeancroy/fuzzaldrin-plus/raw/master/LICENSE.md
*/
(function e(t,n,r){function s(o,u){if(!n[o]){if(!t[o]){var a=typeof require=="function"&&require;if(!u&&a)return a(o,!0);if(i)return i(o,!0);var f=new Error("Cannot find module '"+o+"'");throw f.code="MODULE_NOT_FOUND",f}var l=n[o]={exports:{}};t[o][0].call(l.exports,function(e){var n=t[o][1][e];return s(n?n:e)},l,l.exports,e,t,n,r)}return n[o].exports}var i=typeof require=="function"&&require;for(var o=0;o<r.length;o++)s(r[o]);return s})({1:[function(require,module,exports){
fuzzaldrinPlus = require('fuzzaldrin-plus');
},{"fuzzaldrin-plus":3}],2:[function(require,module,exports){
(function() {
var PathSeparator, legacy_scorer, pluckCandidates, scorer, sortCandidates;
scorer = require('./scorer');
legacy_scorer = require('./legacy');
pluckCandidates = function(a) {
return a.candidate;
};
sortCandidates = function(a, b) {
return b.score - a.score;
};
PathSeparator = require('path').sep;
module.exports = function(candidates, query, _arg) {
var allowErrors, bAllowErrors, bKey, candidate, coreQuery, key, legacy, maxInners, maxResults, prepQuery, queryHasSlashes, score, scoredCandidates, spotLeft, string, _i, _j, _len, _len1, _ref;
_ref = _arg != null ? _arg : {}, key = _ref.key, maxResults = _ref.maxResults, maxInners = _ref.maxInners, allowErrors = _ref.allowErrors, legacy = _ref.legacy;
scoredCandidates = [];
spotLeft = (maxInners != null) && maxInners > 0 ? maxInners : candidates.length;
bAllowErrors = !!allowErrors;
bKey = key != null;
prepQuery = scorer.prepQuery(query);
if (!legacy) {
for (_i = 0, _len = candidates.length; _i < _len; _i++) {
candidate = candidates[_i];
string = bKey ? candidate[key] : candidate;
if (!string) {
continue;
}
score = scorer.score(string, query, prepQuery, bAllowErrors);
if (score > 0) {
scoredCandidates.push({
candidate: candidate,
score: score
});
if (!--spotLeft) {
break;
}
}
}
} else {
queryHasSlashes = prepQuery.depth > 0;
coreQuery = prepQuery.core;
for (_j = 0, _len1 = candidates.length; _j < _len1; _j++) {
candidate = candidates[_j];
string = key != null ? candidate[key] : candidate;
if (!string) {
continue;
}
score = legacy_scorer.score(string, coreQuery, queryHasSlashes);
if (!queryHasSlashes) {
score = legacy_scorer.basenameScore(string, coreQuery, score);
}
if (score > 0) {
scoredCandidates.push({
candidate: candidate,
score: score
});
}
}
}
scoredCandidates.sort(sortCandidates);
candidates = scoredCandidates.map(pluckCandidates);
if (maxResults != null) {
candidates = candidates.slice(0, maxResults);
}
return candidates;
};
}).call(this);
},{"./legacy":4,"./scorer":6,"path":7}],3:[function(require,module,exports){
(function() {
var PathSeparator, filter, legacy_scorer, matcher, prepQueryCache, scorer;
scorer = require('./scorer');
legacy_scorer = require('./legacy');
filter = require('./filter');
matcher = require('./matcher');
PathSeparator = require('path').sep;
prepQueryCache = null;
module.exports = {
filter: function(candidates, query, options) {
if (!((query != null ? query.length : void 0) && (candidates != null ? candidates.length : void 0))) {
return [];
}
return filter(candidates, query, options);
},
prepQuery: function(query) {
return scorer.prepQuery(query);
},
score: function(string, query, prepQuery, _arg) {
var allowErrors, coreQuery, legacy, queryHasSlashes, score, _ref;
_ref = _arg != null ? _arg : {}, allowErrors = _ref.allowErrors, legacy = _ref.legacy;
if (!((string != null ? string.length : void 0) && (query != null ? query.length : void 0))) {
return 0;
}
if (prepQuery == null) {
prepQuery = prepQueryCache && prepQueryCache.query === query ? prepQueryCache : (prepQueryCache = scorer.prepQuery(query));
}
if (!legacy) {
score = scorer.score(string, query, prepQuery, !!allowErrors);
} else {
queryHasSlashes = prepQuery.depth > 0;
coreQuery = prepQuery.core;
score = legacy_scorer.score(string, coreQuery, queryHasSlashes);
if (!queryHasSlashes) {
score = legacy_scorer.basenameScore(string, coreQuery, score);
}
}
return score;
},
match: function(string, query, prepQuery, _arg) {
var allowErrors, baseMatches, matches, query_lw, string_lw, _i, _ref, _results;
allowErrors = (_arg != null ? _arg : {}).allowErrors;
if (!string) {
return [];
}
if (!query) {
return [];
}
if (string === query) {
return (function() {
_results = [];
for (var _i = 0, _ref = string.length; 0 <= _ref ? _i < _ref : _i > _ref; 0 <= _ref ? _i++ : _i--){ _results.push(_i); }
return _results;
}).apply(this);
}
if (prepQuery == null) {
prepQuery = prepQueryCache && prepQueryCache.query === query ? prepQueryCache : (prepQueryCache = scorer.prepQuery(query));
}
if (!(allowErrors || scorer.isMatch(string, prepQuery.core_lw, prepQuery.core_up))) {
return [];
}
string_lw = string.toLowerCase();
query_lw = prepQuery.query_lw;
matches = matcher.match(string, string_lw, prepQuery);
if (matches.length === 0) {
return matches;
}
if (string.indexOf(PathSeparator) > -1) {
baseMatches = matcher.basenameMatch(string, string_lw, prepQuery);
matches = matcher.mergeMatches(matches, baseMatches);
}
return matches;
}
};
}).call(this);
},{"./filter":2,"./legacy":4,"./matcher":5,"./scorer":6,"path":7}],4:[function(require,module,exports){
(function() {
var PathSeparator, queryIsLastPathSegment;
PathSeparator = require('path').sep;
exports.basenameScore = function(string, query, score) {
var base, depth, index, lastCharacter, segmentCount, slashCount;
index = string.length - 1;
while (string[index] === PathSeparator) {
index--;
}
slashCount = 0;
lastCharacter = index;
base = null;
while (index >= 0) {
if (string[index] === PathSeparator) {
slashCount++;
if (base == null) {
base = string.substring(index + 1, lastCharacter + 1);
}
} else if (index === 0) {
if (lastCharacter < string.length - 1) {
if (base == null) {
base = string.substring(0, lastCharacter + 1);
}
} else {
if (base == null) {
base = string;
}
}
}
index--;
}
if (base === string) {
score *= 2;
} else if (base) {
score += exports.score(base, query);
}
segmentCount = slashCount + 1;
depth = Math.max(1, 10 - segmentCount);
score *= depth * 0.01;
return score;
};
exports.score = function(string, query) {
var character, characterScore, indexInQuery, indexInString, lowerCaseIndex, minIndex, queryLength, queryScore, stringLength, totalCharacterScore, upperCaseIndex, _ref;
if (string === query) {
return 1;
}
if (queryIsLastPathSegment(string, query)) {
return 1;
}
totalCharacterScore = 0;
queryLength = query.length;
stringLength = string.length;
indexInQuery = 0;
indexInString = 0;
while (indexInQuery < queryLength) {
character = query[indexInQuery++];
lowerCaseIndex = string.indexOf(character.toLowerCase());
upperCaseIndex = string.indexOf(character.toUpperCase());
minIndex = Math.min(lowerCaseIndex, upperCaseIndex);
if (minIndex === -1) {
minIndex = Math.max(lowerCaseIndex, upperCaseIndex);
}
indexInString = minIndex;
if (indexInString === -1) {
return 0;
}
characterScore = 0.1;
if (string[indexInString] === character) {
characterScore += 0.1;
}
if (indexInString === 0 || string[indexInString - 1] === PathSeparator) {
characterScore += 0.8;
} else if ((_ref = string[indexInString - 1]) === '-' || _ref === '_' || _ref === ' ') {
characterScore += 0.7;
}
string = string.substring(indexInString + 1, stringLength);
totalCharacterScore += characterScore;
}
queryScore = totalCharacterScore / queryLength;
return ((queryScore * (queryLength / stringLength)) + queryScore) / 2;
};
queryIsLastPathSegment = function(string, query) {
if (string[string.length - query.length - 1] === PathSeparator) {
return string.lastIndexOf(query) === string.length - query.length;
}
};
exports.match = function(string, query, stringOffset) {
var character, indexInQuery, indexInString, lowerCaseIndex, matches, minIndex, queryLength, stringLength, upperCaseIndex, _i, _ref, _results;
if (stringOffset == null) {
stringOffset = 0;
}
if (string === query) {
return (function() {
_results = [];
for (var _i = stringOffset, _ref = stringOffset + string.length; stringOffset <= _ref ? _i < _ref : _i > _ref; stringOffset <= _ref ? _i++ : _i--){ _results.push(_i); }
return _results;
}).apply(this);
}
queryLength = query.length;
stringLength = string.length;
indexInQuery = 0;
indexInString = 0;
matches = [];
while (indexInQuery < queryLength) {
character = query[indexInQuery++];
lowerCaseIndex = string.indexOf(character.toLowerCase());
upperCaseIndex = string.indexOf(character.toUpperCase());
minIndex = Math.min(lowerCaseIndex, upperCaseIndex);
if (minIndex === -1) {
minIndex = Math.max(lowerCaseIndex, upperCaseIndex);
}
indexInString = minIndex;
if (indexInString === -1) {
return [];
}
matches.push(stringOffset + indexInString);
stringOffset += indexInString + 1;
string = string.substring(indexInString + 1, stringLength);
}
return matches;
};
}).call(this);
},{"path":7}],5:[function(require,module,exports){
(function() {
var PathSeparator, scorer;
PathSeparator = require('path').sep;
scorer = require('./scorer');
exports.basenameMatch = function(subject, subject_lw, prepQuery) {
var basePos, depth, end;
end = subject.length - 1;
while (subject[end] === PathSeparator) {
end--;
}
basePos = subject.lastIndexOf(PathSeparator, end);
if (basePos === -1) {
return [];
}
depth = prepQuery.depth;
while (depth-- > 0) {
basePos = subject.lastIndexOf(PathSeparator, basePos - 1);
if (basePos === -1) {
return [];
}
}
basePos++;
end++;
return exports.match(subject.slice(basePos, end), subject_lw.slice(basePos, end), prepQuery, basePos);
};
exports.mergeMatches = function(a, b) {
var ai, bj, i, j, m, n, out;
m = a.length;
n = b.length;
if (n === 0) {
return a.slice();
}
if (m === 0) {
return b.slice();
}
i = -1;
j = 0;
bj = b[j];
out = [];
while (++i < m) {
ai = a[i];
while (bj <= ai && ++j < n) {
if (bj < ai) {
out.push(bj);
}
bj = b[j];
}
out.push(ai);
}
while (j < n) {
out.push(b[j++]);
}
return out;
};
exports.match = function(subject, subject_lw, prepQuery, offset) {
var DIAGONAL, LEFT, STOP, UP, acro_score, align, backtrack, csc_diag, csc_row, csc_score, i, j, m, matches, move, n, pos, query, query_lw, score, score_diag, score_row, score_up, si_lw, start, trace;
if (offset == null) {
offset = 0;
}
query = prepQuery.query;
query_lw = prepQuery.query_lw;
m = subject.length;
n = query.length;
acro_score = scorer.scoreAcronyms(subject, subject_lw, query, query_lw).score;
score_row = new Array(n);
csc_row = new Array(n);
STOP = 0;
UP = 1;
LEFT = 2;
DIAGONAL = 3;
trace = new Array(m * n);
pos = -1;
j = -1;
while (++j < n) {
score_row[j] = 0;
csc_row[j] = 0;
}
i = -1;
while (++i < m) {
score = 0;
score_up = 0;
csc_diag = 0;
si_lw = subject_lw[i];
j = -1;
while (++j < n) {
csc_score = 0;
align = 0;
score_diag = score_up;
if (query_lw[j] === si_lw) {
start = scorer.isWordStart(i, subject, subject_lw);
csc_score = csc_diag > 0 ? csc_diag : scorer.scoreConsecutives(subject, subject_lw, query, query_lw, i, j, start);
align = score_diag + scorer.scoreCharacter(i, j, start, acro_score, csc_score);
}
score_up = score_row[j];
csc_diag = csc_row[j];
if (score > score_up) {
move = LEFT;
} else {
score = score_up;
move = UP;
}
if (align > score) {
score = align;
move = DIAGONAL;
} else {
csc_score = 0;
}
score_row[j] = score;
csc_row[j] = csc_score;
trace[++pos] = score > 0 ? move : STOP;
}
}
i = m - 1;
j = n - 1;
pos = i * n + j;
backtrack = true;
matches = [];
while (backtrack && i >= 0 && j >= 0) {
switch (trace[pos]) {
case UP:
i--;
pos -= n;
break;
case LEFT:
j--;
pos--;
break;
case DIAGONAL:
matches.push(i + offset);
j--;
i--;
pos -= n + 1;
break;
default:
backtrack = false;
}
}
matches.reverse();
return matches;
};
}).call(this);
},{"./scorer":6,"path":7}],6:[function(require,module,exports){
(function() {
var AcronymResult, PathSeparator, Query, basenameScore, coreChars, countDir, doScore, emptyAcronymResult, file_coeff, isMatch, isSeparator, isWordEnd, isWordStart, miss_coeff, opt_char_re, pos_bonus, scoreAcronyms, scoreCharacter, scoreConsecutives, scoreExact, scoreExactMatch, scorePattern, scorePosition, scoreSize, tau_depth, tau_size, truncatedUpperCase, wm;
PathSeparator = require('path').sep;
wm = 150;
pos_bonus = 20;
tau_depth = 13;
tau_size = 85;
file_coeff = 1.2;
miss_coeff = 0.75;
opt_char_re = /[ _\-:\/\\]/g;
exports.coreChars = coreChars = function(query) {
return query.replace(opt_char_re, '');
};
exports.score = function(string, query, prepQuery, allowErrors) {
var score, string_lw;
if (prepQuery == null) {
prepQuery = new Query(query);
}
if (allowErrors == null) {
allowErrors = false;
}
if (!(allowErrors || isMatch(string, prepQuery.core_lw, prepQuery.core_up))) {
return 0;
}
string_lw = string.toLowerCase();
score = doScore(string, string_lw, prepQuery);
return Math.ceil(basenameScore(string, string_lw, prepQuery, score));
};
Query = (function() {
function Query(query) {
if (!(query != null ? query.length : void 0)) {
return null;
}
this.query = query;
this.query_lw = query.toLowerCase();
this.core = coreChars(query);
this.core_lw = this.core.toLowerCase();
this.core_up = truncatedUpperCase(this.core);
this.depth = countDir(query, query.length);
}
return Query;
})();
exports.prepQuery = function(query) {
return new Query(query);
};
exports.isMatch = isMatch = function(subject, query_lw, query_up) {
var i, j, m, n, qj_lw, qj_up, si;
m = subject.length;
n = query_lw.length;
if (!m || n > m) {
return false;
}
i = -1;
j = -1;
while (++j < n) {
qj_lw = query_lw[j];
qj_up = query_up[j];
while (++i < m) {
si = subject[i];
if (si === qj_lw || si === qj_up) {
break;
}
}
if (i === m) {
return false;
}
}
return true;
};
doScore = function(subject, subject_lw, prepQuery) {
var acro, acro_score, align, csc_diag, csc_row, csc_score, i, j, m, miss_budget, miss_left, mm, n, pos, query, query_lw, record_miss, score, score_diag, score_row, score_up, si_lw, start, sz;
query = prepQuery.query;
query_lw = prepQuery.query_lw;
m = subject.length;
n = query.length;
acro = scoreAcronyms(subject, subject_lw, query, query_lw);
acro_score = acro.score;
if (acro.count === n) {
return scoreExact(n, m, acro_score, acro.pos);
}
pos = subject_lw.indexOf(query_lw);
if (pos > -1) {
return scoreExactMatch(subject, subject_lw, query, query_lw, pos, n, m);
}
score_row = new Array(n);
csc_row = new Array(n);
sz = scoreSize(n, m);
miss_budget = Math.ceil(miss_coeff * n) + 5;
miss_left = miss_budget;
j = -1;
while (++j < n) {
score_row[j] = 0;
csc_row[j] = 0;
}
i = subject_lw.indexOf(query_lw[0]);
if (i > -1) {
i--;
}
mm = subject_lw.lastIndexOf(query_lw[n - 1], m);
if (mm > i) {
m = mm + 1;
}
while (++i < m) {
score = 0;
score_diag = 0;
csc_diag = 0;
si_lw = subject_lw[i];
record_miss = true;
j = -1;
while (++j < n) {
score_up = score_row[j];
if (score_up > score) {
score = score_up;
}
csc_score = 0;
if (query_lw[j] === si_lw) {
start = isWordStart(i, subject, subject_lw);
csc_score = csc_diag > 0 ? csc_diag : scoreConsecutives(subject, subject_lw, query, query_lw, i, j, start);
align = score_diag + scoreCharacter(i, j, start, acro_score, csc_score);
if (align > score) {
score = align;
miss_left = miss_budget;
} else {
if (record_miss && --miss_left <= 0) {
return score_row[n - 1] * sz;
}
record_miss = false;
}
}
score_diag = score_up;
csc_diag = csc_row[j];
csc_row[j] = csc_score;
score_row[j] = score;
}
}
return score * sz;
};
exports.isWordStart = isWordStart = function(pos, subject, subject_lw) {
var curr_s, prev_s;
if (pos === 0) {
return true;
}
curr_s = subject[pos];
prev_s = subject[pos - 1];
return isSeparator(curr_s) || isSeparator(prev_s) || (curr_s !== subject_lw[pos] && prev_s === subject_lw[pos - 1]);
};
exports.isWordEnd = isWordEnd = function(pos, subject, subject_lw, len) {
var curr_s, next_s;
if (pos === len - 1) {
return true;
}
curr_s = subject[pos];
next_s = subject[pos + 1];
return isSeparator(curr_s) || isSeparator(next_s) || (curr_s === subject_lw[pos] && next_s !== subject_lw[pos + 1]);
};
isSeparator = function(c) {
return c === ' ' || c === '.' || c === '-' || c === '_' || c === '/' || c === '\\';
};
scorePosition = function(pos) {
var sc;
if (pos < pos_bonus) {
sc = pos_bonus - pos;
return 100 + sc * sc;
} else {
return Math.max(100 + pos_bonus - pos, 0);
}
};
scoreSize = function(n, m) {
return tau_size / (tau_size + Math.abs(m - n));
};
scoreExact = function(n, m, quality, pos) {
return 2 * n * (wm * quality + scorePosition(pos)) * scoreSize(n, m);
};
exports.scorePattern = scorePattern = function(count, len, sameCase, start, end) {
var bonus, sz;
sz = count;
bonus = 6;
if (sameCase === count) {
bonus += 2;
}
if (start) {
bonus += 3;
}
if (end) {
bonus += 1;
}
if (count === len) {
if (start) {
if (sameCase === len) {
sz += 2;
} else {
sz += 1;
}
}
if (end) {
bonus += 1;
}
}
return sameCase + sz * (sz + bonus);
};
exports.scoreCharacter = scoreCharacter = function(i, j, start, acro_score, csc_score) {
var posBonus;
posBonus = scorePosition(i);
if (start) {
return posBonus + wm * ((acro_score > csc_score ? acro_score : csc_score) + 10);
}
return posBonus + wm * csc_score;
};
exports.scoreConsecutives = scoreConsecutives = function(subject, subject_lw, query, query_lw, i, j, start) {
var k, m, mi, n, nj, sameCase, startPos, sz;
m = subject.length;
n = query.length;
mi = m - i;
nj = n - j;
k = mi < nj ? mi : nj;
startPos = i;
sameCase = 0;
sz = 0;
if (query[j] === subject[i]) {
sameCase++;
}
while (++sz < k && query_lw[++j] === subject_lw[++i]) {
if (query[j] === subject[i]) {
sameCase++;
}
}
if (sz === 1) {
return 1 + 2 * sameCase;
}
return scorePattern(sz, n, sameCase, start, isWordEnd(i, subject, subject_lw, m));
};
exports.scoreExactMatch = scoreExactMatch = function(subject, subject_lw, query, query_lw, pos, n, m) {
var end, i, pos2, sameCase, start;
start = isWordStart(pos, subject, subject_lw);
if (!start) {
pos2 = subject_lw.indexOf(query_lw, pos + 1);
if (pos2 > -1) {
start = isWordStart(pos2, subject, subject_lw);
if (start) {
pos = pos2;
}
}
}
i = -1;
sameCase = 0;
while (++i < n) {
if (query[pos + i] === subject[i]) {
sameCase++;
}
}
end = isWordEnd(pos + n - 1, subject, subject_lw, m);
return scoreExact(n, m, scorePattern(n, n, sameCase, start, end), pos);
};
AcronymResult = (function() {
function AcronymResult(score, pos, count) {
this.score = score;
this.pos = pos;
this.count = count;
}
return AcronymResult;
})();
emptyAcronymResult = new AcronymResult(0, 0.1, 0);
exports.scoreAcronyms = scoreAcronyms = function(subject, subject_lw, query, query_lw) {
var count, i, j, m, n, pos, qj_lw, sameCase, score;
m = subject.length;
n = query.length;
if (!(m > 1 && n > 1)) {
return emptyAcronymResult;
}
count = 0;
pos = 0;
sameCase = 0;
i = -1;
j = -1;
while (++j < n) {
qj_lw = query_lw[j];
while (++i < m) {
if (qj_lw === subject_lw[i] && isWordStart(i, subject, subject_lw)) {
if (query[j] === subject[i]) {
sameCase++;
}
pos += i;
count++;
break;
}
}
if (i === m) {
break;
}
}
if (count < 2) {
return emptyAcronymResult;
}
score = scorePattern(count, n, sameCase, true, false);
return new AcronymResult(score, pos / count, count);
};
basenameScore = function(subject, subject_lw, prepQuery, fullPathScore) {
var alpha, basePathScore, basePos, depth, end;
if (fullPathScore === 0) {
return 0;
}
end = subject.length - 1;
while (subject[end] === PathSeparator) {
end--;
}
basePos = subject.lastIndexOf(PathSeparator, end);
if (basePos === -1) {
return fullPathScore;
}
depth = prepQuery.depth;
while (depth-- > 0) {
basePos = subject.lastIndexOf(PathSeparator, basePos - 1);
if (basePos === -1) {
return fullPathScore;
}
}
basePos++;
end++;
basePathScore = doScore(subject.slice(basePos, end), subject_lw.slice(basePos, end), prepQuery);
alpha = 0.5 * tau_depth / (tau_depth + countDir(subject, end + 1));
return alpha * basePathScore + (1 - alpha) * fullPathScore * scoreSize(0, file_coeff * (end - basePos));
};
exports.countDir = countDir = function(path, end) {
var count, i;
if (end < 1) {
return 0;
}
count = 0;
i = -1;
while (++i < end && path[i] === PathSeparator) {
continue;
}
while (++i < end) {
if (path[i] === PathSeparator) {
count++;
while (++i < end && path[i] === PathSeparator) {
continue;
}
}
}
return count;
};
truncatedUpperCase = function(str) {
var char, upper, _i, _len;
upper = "";
for (_i = 0, _len = str.length; _i < _len; _i++) {
char = str[_i];
upper += char.toUpperCase()[0];
}
return upper;
};
}).call(this);
},{"path":7}],7:[function(require,module,exports){
(function (process){
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit
// persons to whom the Software is furnished to do so, subject to the
// following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
// resolves . and .. elements in a path array with directory names there
// must be no slashes, empty elements, or device names (c:\) in the array
// (so also no leading and trailing slashes - it does not distinguish
// relative and absolute paths)
function normalizeArray(parts, allowAboveRoot) {
// if the path tries to go above the root, `up` ends up > 0
var up = 0;
for (var i = parts.length - 1; i >= 0; i--) {
var last = parts[i];
if (last === '.') {
parts.splice(i, 1);
} else if (last === '..') {
parts.splice(i, 1);
up++;
} else if (up) {
parts.splice(i, 1);
up--;
}
}
// if the path is allowed to go above the root, restore leading ..s
if (allowAboveRoot) {
for (; up--; up) {
parts.unshift('..');
}
}
return parts;
}
// Split a filename into [root, dir, basename, ext], unix version
// 'root' is just a slash, or nothing.
var splitPathRe =
/^(\/?|)([\s\S]*?)((?:\.{1,2}|[^\/]+?|)(\.[^.\/]*|))(?:[\/]*)$/;
var splitPath = function(filename) {
return splitPathRe.exec(filename).slice(1);
};
// path.resolve([from ...], to)
// posix version
exports.resolve = function() {
var resolvedPath = '',
resolvedAbsolute = false;
for (var i = arguments.length - 1; i >= -1 && !resolvedAbsolute; i--) {
var path = (i >= 0) ? arguments[i] : process.cwd();
// Skip empty and invalid entries
if (typeof path !== 'string') {
throw new TypeError('Arguments to path.resolve must be strings');
} else if (!path) {
continue;
}
resolvedPath = path + '/' + resolvedPath;
resolvedAbsolute = path.charAt(0) === '/';
}
// At this point the path should be resolved to a full absolute path, but
// handle relative paths to be safe (might happen when process.cwd() fails)
// Normalize the path
resolvedPath = normalizeArray(filter(resolvedPath.split('/'), function(p) {
return !!p;
}), !resolvedAbsolute).join('/');
return ((resolvedAbsolute ? '/' : '') + resolvedPath) || '.';
};
// path.normalize(path)
// posix version
exports.normalize = function(path) {
var isAbsolute = exports.isAbsolute(path),
trailingSlash = substr(path, -1) === '/';
// Normalize the path
path = normalizeArray(filter(path.split('/'), function(p) {
return !!p;
}), !isAbsolute).join('/');
if (!path && !isAbsolute) {
path = '.';
}
if (path && trailingSlash) {
path += '/';
}
return (isAbsolute ? '/' : '') + path;
};
// posix version
exports.isAbsolute = function(path) {
return path.charAt(0) === '/';
};
// posix version
exports.join = function() {
var paths = Array.prototype.slice.call(arguments, 0);
return exports.normalize(filter(paths, function(p, index) {
if (typeof p !== 'string') {
throw new TypeError('Arguments to path.join must be strings');
}
return p;
}).join('/'));
};
// path.relative(from, to)
// posix version
exports.relative = function(from, to) {
from = exports.resolve(from).substr(1);
to = exports.resolve(to).substr(1);
function trim(arr) {
var start = 0;
for (; start < arr.length; start++) {
if (arr[start] !== '') break;
}
var end = arr.length - 1;
for (; end >= 0; end--) {
if (arr[end] !== '') break;
}
if (start > end) return [];
return arr.slice(start, end - start + 1);
}
var fromParts = trim(from.split('/'));
var toParts = trim(to.split('/'));
var length = Math.min(fromParts.length, toParts.length);
var samePartsLength = length;
for (var i = 0; i < length; i++) {
if (fromParts[i] !== toParts[i]) {
samePartsLength = i;
break;
}
}
var outputParts = [];
for (var i = samePartsLength; i < fromParts.length; i++) {
outputParts.push('..');
}
outputParts = outputParts.concat(toParts.slice(samePartsLength));
return outputParts.join('/');
};
exports.sep = '/';
exports.delimiter = ':';
exports.dirname = function(path) {
var result = splitPath(path),
root = result[0],
dir = result[1];
if (!root && !dir) {
// No dirname whatsoever
return '.';
}
if (dir) {
// It has a dirname, strip trailing slash
dir = dir.substr(0, dir.length - 1);
}
return root + dir;
};
exports.basename = function(path, ext) {
var f = splitPath(path)[2];
// TODO: make this comparison case-insensitive on windows?
if (ext && f.substr(-1 * ext.length) === ext) {
f = f.substr(0, f.length - ext.length);
}
return f;
};
exports.extname = function(path) {
return splitPath(path)[3];
};
function filter (xs, f) {
if (xs.filter) return xs.filter(f);
var res = [];
for (var i = 0; i < xs.length; i++) {
if (f(xs[i], i, xs)) res.push(xs[i]);
}
return res;
}
// String.prototype.substr - negative index don't work in IE8
var substr = 'ab'.substr(-1) === 'b'
? function (str, start, len) { return str.substr(start, len) }
: function (str, start, len) {
if (start < 0) start = str.length + start;
return str.substr(start, len);
}
;
}).call(this,require('_process'))
},{"_process":8}],8:[function(require,module,exports){
// shim for using process in browser
var process = module.exports = {};
var queue = [];
var draining = false;
var currentQueue;
var queueIndex = -1;
function cleanUpNextTick() {
draining = false;
if (currentQueue.length) {
queue = currentQueue.concat(queue);
} else {
queueIndex = -1;
}
if (queue.length) {
drainQueue();
}
}
function drainQueue() {
if (draining) {
return;
}
var timeout = setTimeout(cleanUpNextTick);
draining = true;
var len = queue.length;
while(len) {
currentQueue = queue;
queue = [];
while (++queueIndex < len) {
if (currentQueue) {
currentQueue[queueIndex].run();
}
}
queueIndex = -1;
len = queue.length;
}
currentQueue = null;
draining = false;
clearTimeout(timeout);
}
process.nextTick = function (fun) {
var args = new Array(arguments.length - 1);
if (arguments.length > 1) {
for (var i = 1; i < arguments.length; i++) {
args[i - 1] = arguments[i];
}
}
queue.push(new Item(fun, args));
if (queue.length === 1 && !draining) {
setTimeout(drainQueue, 0);
}
};
// v8 likes predictible objects
function Item(fun, array) {
this.fun = fun;
this.array = array;
}
Item.prototype.run = function () {
this.fun.apply(null, this.array);
};
process.title = 'browser';
process.browser = true;
process.env = {};
process.argv = [];
process.version = ''; // empty string to avoid regexp issues
process.versions = {};
function noop() {}
process.on = noop;
process.addListener = noop;
process.once = noop;
process.off = noop;
process.removeListener = noop;
process.removeAllListeners = noop;
process.emit = noop;
process.binding = function (name) {
throw new Error('process.binding is not supported');
};
process.cwd = function () { return '/' };
process.chdir = function (dir) {
throw new Error('process.chdir is not supported');
};
process.umask = function() { return 0; };
},{}]},{},[1]);
......@@ -2675,6 +2675,10 @@ function-bind@^1.1.1, function-bind@~1.1.0:
version "1.1.1"
resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.1.tgz#a56899d3ea3c9bab874bb9773b7c5ede92f4895d"
fuzzaldrin-plus@^0.5.0:
version "0.5.0"
resolved "https://registry.yarnpkg.com/fuzzaldrin-plus/-/fuzzaldrin-plus-0.5.0.tgz#ef5f26f0c2fc7e9e9a16ea149a802d6cb4804b1e"
gauge@~2.7.3:
version "2.7.4"
resolved "https://registry.yarnpkg.com/gauge/-/gauge-2.7.4.tgz#2c03405c7538c39d7eb37b317022e325fb018bf7"
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment