BigW Consortium Gitlab

Commit 985a1853 by Michael Kozono

Merge branch '10-2-stable-patch-5' into '10-2-stable'

Prepare 10.2.5 release See merge request gitlab-org/gitlab-ce!15925
parents a7bbf4c1 ce916f47
......@@ -581,7 +581,7 @@ codequality:
script:
- cp .rubocop.yml .rubocop.yml.bak
- grep -v "rubocop-gitlab-security" .rubocop.yml.bak > .rubocop.yml
- docker run --env CODECLIMATE_CODE="$PWD" --volume "$PWD":/code --volume /var/run/docker.sock:/var/run/docker.sock --volume /tmp/cc:/tmp/cc codeclimate/codeclimate:0.69.0 analyze -f json > raw_codeclimate.json
- docker run --env CODECLIMATE_CODE="$PWD" --volume "$PWD":/code --volume /var/run/docker.sock:/var/run/docker.sock --volume /tmp/cc:/tmp/cc codeclimate/codeclimate analyze -f json > raw_codeclimate.json
- cat raw_codeclimate.json | docker run -i stedolan/jq -c 'map({check_name,fingerprint,location})' > codeclimate.json
- mv .rubocop.yml.bak .rubocop.yml
artifacts:
......
......@@ -171,7 +171,7 @@ gem 're2', '~> 1.1.1'
gem 'version_sorter', '~> 2.1.0'
# Cache
gem 'redis-rails', '~> 5.0.1'
gem 'redis-rails', '~> 5.0.2'
# Redis
gem 'redis', '~> 3.2'
......@@ -281,7 +281,7 @@ group :metrics do
gem 'influxdb', '~> 0.2', require: false
# Prometheus
gem 'prometheus-client-mmap', '~>0.7.0.beta18'
gem 'prometheus-client-mmap', '~> 0.7.0.beta43'
gem 'raindrops', '~> 0.18'
end
......
......@@ -484,7 +484,6 @@ GEM
mini_mime (0.1.4)
mini_portile2 (2.3.0)
minitest (5.7.0)
mmap2 (2.2.7)
mousetrap-rails (1.4.6)
multi_json (1.12.2)
multi_xml (0.6.0)
......@@ -619,8 +618,7 @@ GEM
parser
unparser
procto (0.0.3)
prometheus-client-mmap (0.7.0.beta18)
mmap2 (~> 2.2, >= 2.2.7)
prometheus-client-mmap (0.7.0.beta43)
pry (0.10.4)
coderay (~> 1.1.0)
method_source (~> 0.8.1)
......@@ -695,24 +693,24 @@ GEM
recursive-open-struct (1.0.0)
redcarpet (3.4.0)
redis (3.3.3)
redis-actionpack (5.0.1)
redis-actionpack (5.0.2)
actionpack (>= 4.0, < 6)
redis-rack (>= 1, < 3)
redis-store (>= 1.1.0, < 1.4.0)
redis-activesupport (5.0.1)
redis-store (>= 1.1.0, < 2)
redis-activesupport (5.0.4)
activesupport (>= 3, < 6)
redis-store (~> 1.2.0)
redis-store (>= 1.3, < 2)
redis-namespace (1.5.2)
redis (~> 3.0, >= 3.0.4)
redis-rack (1.6.0)
rack (~> 1.5)
redis-store (~> 1.2.0)
redis-rails (5.0.1)
redis-actionpack (~> 5.0.0)
redis-activesupport (~> 5.0.0)
redis-store (~> 1.2.0)
redis-store (1.2.0)
redis (>= 2.2)
redis-rack (2.0.3)
rack (>= 1.5, < 3)
redis-store (>= 1.2, < 2)
redis-rails (5.0.2)
redis-actionpack (>= 5.0, < 6)
redis-activesupport (>= 5.0, < 6)
redis-store (>= 1.2, < 2)
redis-store (1.4.1)
redis (>= 2.2, < 5)
representable (3.0.4)
declarative (< 0.1.0)
declarative-option (< 0.2.0)
......@@ -1101,7 +1099,7 @@ DEPENDENCIES
peek-sidekiq (~> 1.0.3)
pg (~> 0.18.2)
premailer-rails (~> 1.9.7)
prometheus-client-mmap (~> 0.7.0.beta18)
prometheus-client-mmap (~> 0.7.0.beta43)
pry-byebug (~> 3.4.1)
pry-rails (~> 0.3.4)
rack-attack (~> 4.4.1)
......@@ -1122,7 +1120,7 @@ DEPENDENCIES
redcarpet (~> 3.4)
redis (~> 3.2)
redis-namespace (~> 1.5.2)
redis-rails (~> 5.0.1)
redis-rails (~> 5.0.2)
request_store (~> 1.3)
responders (~> 2.0)
rouge (~> 2.0)
......@@ -1185,4 +1183,4 @@ DEPENDENCIES
wikicloth (= 0.8.1)
BUNDLED WITH
1.15.4
1.16.0
......@@ -5,7 +5,7 @@ import '../vue_shared/vue_resource_interceptor';
document.addEventListener('DOMContentLoaded', () => {
const initialDataEl = document.getElementById('js-issuable-app-initial-data');
const initialData = JSON.parse(initialDataEl.innerHTML.replace(/&quot;/g, '"'));
const props = JSON.parse(initialDataEl.innerHTML.replace(/&quot;/g, '"'));
$('.issuable-edit').on('click', (e) => {
e.preventDefault();
......@@ -18,32 +18,9 @@ document.addEventListener('DOMContentLoaded', () => {
components: {
issuableApp,
},
data() {
return {
...initialData,
};
},
render(createElement) {
return createElement('issuable-app', {
props: {
canUpdate: this.canUpdate,
canDestroy: this.canDestroy,
endpoint: this.endpoint,
issuableRef: this.issuableRef,
initialTitleHtml: this.initialTitleHtml,
initialTitleText: this.initialTitleText,
initialDescriptionHtml: this.initialDescriptionHtml,
initialDescriptionText: this.initialDescriptionText,
issuableTemplates: this.issuableTemplates,
markdownPreviewPath: this.markdownPreviewPath,
markdownDocsPath: this.markdownDocsPath,
projectPath: this.projectPath,
projectNamespace: this.projectNamespace,
updatedAt: this.updatedAt,
updatedByName: this.updatedByName,
updatedByPath: this.updatedByPath,
initialTaskStatus: this.initialTaskStatus,
},
props,
});
},
});
......
......@@ -128,7 +128,7 @@ import IssuablesHelper from './helpers/issuables_helper';
};
MergeRequest.prototype.hideCloseButton = function() {
const el = document.querySelector('.merge-request .issuable-actions');
const el = document.querySelector('.merge-request .js-issuable-actions');
const closeDropdownItem = el.querySelector('li.close-item');
if (closeDropdownItem) {
closeDropdownItem.classList.add('hidden');
......
......@@ -243,6 +243,16 @@
}
}
&.dot-highlight::after {
content: '';
background-color: $blue-500;
width: $gl-padding * 0.5;
height: $gl-padding * 0.5;
display: inline-block;
border-radius: 50%;
margin-left: 3px;
}
svg {
height: 15px;
width: 15px;
......
......@@ -437,6 +437,7 @@
border-width: 1px;
width: 17px;
height: 17px;
top: 0;
}
&:hover,
......
......@@ -13,6 +13,41 @@
.author_link {
white-space: nowrap;
}
@media (max-width: $screen-xs-max) {
display: block;
}
}
.detail-page-header-body {
position: relative;
line-height: 35px;
display: flex;
flex-grow: 1;
@media (min-width: $screen-sm-min) {
padding-left: 0;
padding-right: 0;
}
}
.detail-page-header-actions {
@include new-style-dropdown;
align-self: center;
flex-shrink: 0;
flex: 0 0 auto;
@media (max-width: $screen-xs-max) {
width: 100%;
margin-top: 10px;
> .issue-btn-group {
> .btn {
width: 100%;
}
}
}
}
.detail-page-description {
......
......@@ -622,50 +622,16 @@
margin-top: 0;
height: auto;
align-self: center;
@media (max-width: $screen-xs-max) {
position: absolute;
top: 0;
left: 0;
}
}
.issuable-header {
position: relative;
padding-left: 45px;
padding-right: 45px;
line-height: 35px;
display: flex;
flex-grow: 1;
@media (min-width: $screen-sm-min) {
float: left;
padding-left: 0;
padding-right: 0;
}
}
.issuable-actions {
@include new-style-dropdown;
align-self: center;
flex-shrink: 0;
flex: 0 0 auto;
@media (min-width: $screen-sm-min) {
float: right;
}
}
.issuable-gutter-toggle {
@media (max-width: $screen-sm-max) {
position: absolute;
top: 0;
right: 0;
margin-left: $btn-side-margin;
}
}
.issuable-meta {
flex: 1;
display: inline-block;
font-size: 14px;
line-height: 24px;
......
......@@ -134,26 +134,11 @@ ul.related-merge-requests > li {
}
@media (max-width: $screen-xs-max) {
.detail-page-header,
.issuable-header {
display: block;
.detail-page-header {
.issuable-meta {
line-height: 18px;
}
}
.issuable-actions {
margin-top: 10px;
.issue-btn-group {
width: 100%;
.btn {
width: 100%;
}
}
}
}
.issue-form {
......
......@@ -303,6 +303,14 @@ class Group < Namespace
"#{parent.full_path}/#{path_was}"
end
def group_member(user)
if group_members.loaded?
group_members.find { |gm| gm.user_id == user.id }
else
group_members.find_by(user_id: user)
end
end
private
def update_two_factor_requirement
......
......@@ -567,8 +567,7 @@ class Project < ActiveRecord::Base
if forked?
RepositoryForkWorker.perform_async(id,
forked_from_project.repository_storage_path,
forked_from_project.full_path,
self.namespace.full_path)
forked_from_project.disk_path)
else
RepositoryImportWorker.perform_async(self.id)
end
......@@ -1125,8 +1124,12 @@ class Project < ActiveRecord::Base
end
def project_member(user)
if project_members.loaded?
project_members.find { |member| member.user_id == user.id }
else
project_members.find_by(user_id: user)
end
end
def default_branch
@default_branch ||= repository.root_ref if repository.exists?
......
......@@ -10,7 +10,9 @@ class ProtectedBranch < ActiveRecord::Base
def self.protected?(project, ref_name)
return true if project.empty_repo? && default_branch_protected?
self.matching(ref_name, protected_refs: project.protected_branches).present?
refs = project.protected_branches.select(:name)
self.matching(ref_name, protected_refs: refs).present?
end
def self.default_branch_protected?
......
......@@ -5,6 +5,8 @@ class ProtectedTag < ActiveRecord::Base
protected_ref_access_levels :create
def self.protected?(project, ref_name)
self.matching(ref_name, protected_refs: project.protected_tags).present?
refs = project.protected_tags.select(:name)
self.matching(ref_name, protected_refs: refs).present?
end
end
......@@ -978,8 +978,12 @@ class User < ActiveRecord::Base
end
def notification_settings_for(source)
if notification_settings.loaded?
notification_settings.find { |notification| notification.source == source }
else
notification_settings.find_or_initialize_by(source: source)
end
end
# Lazy load global notification setting
# Initializes User setting with Participating level if setting not persisted
......
......@@ -12,8 +12,12 @@ class BaseCountService
Rails.cache.fetch(cache_key, raw: raw?) { uncached_count }.to_i
end
def refresh_cache
Rails.cache.write(cache_key, uncached_count, raw: raw?)
def count_stored?
Rails.cache.read(cache_key).present?
end
def refresh_cache(&block)
Rails.cache.write(cache_key, block_given? ? yield : uncached_count, raw: raw?)
end
def uncached_count
......
......@@ -20,7 +20,7 @@ class MetricsService
end
def metrics_text
"#{health_metrics_text}#{prometheus_metrics_text}"
prometheus_metrics_text.concat(health_metrics_text)
end
private
......
# Service class for getting and caching the number of elements of several projects
# Warning: do not user this service with a really large set of projects
# because the service use maps to retrieve the project ids.
module Projects
class BatchCountService
def initialize(projects)
@projects = projects
end
def refresh_cache
@projects.each do |project|
service = count_service.new(project)
unless service.count_stored?
service.refresh_cache { global_count[project.id].to_i }
end
end
end
def project_ids
@projects.map(&:id)
end
def global_count(project)
raise NotImplementedError, 'global_count must be implemented and return an hash indexed by the project id'
end
def count_service
raise NotImplementedError, 'count_service must be implemented and return a Projects::CountService object'
end
end
end
# Service class for getting and caching the number of forks of several projects
# Warning: do not user this service with a really large set of projects
# because the service use maps to retrieve the project ids
module Projects
class BatchForksCountService < Projects::BatchCountService
def global_count
@global_count ||= begin
count_service.query(project_ids)
.group(:forked_from_project_id)
.count
end
end
def count_service
::Projects::ForksCountService
end
end
end
# Service class for getting and caching the number of issues of several projects
# Warning: do not user this service with a really large set of projects
# because the service use maps to retrieve the project ids
module Projects
class BatchOpenIssuesCountService < Projects::BatchCountService
def global_count
@global_count ||= begin
count_service.query(project_ids).group(:project_id).count
end
end
def count_service
::Projects::OpenIssuesCountService
end
end
end
......@@ -11,6 +11,10 @@ module Projects
@project = project
end
def relation_for_count
self.class.query(@project.id)
end
def cache_key_name
raise(
NotImplementedError,
......@@ -21,5 +25,12 @@ module Projects
def cache_key
['projects', 'count_service', VERSION, @project.id, cache_key_name]
end
def self.query(project_ids)
raise(
NotImplementedError,
'"query" must be implemented and return an ActiveRecord::Relation'
)
end
end
end
module Projects
# Service class for getting and caching the number of forks of a project.
class ForksCountService < Projects::CountService
def relation_for_count
@project.forks
end
def cache_key_name
'forks_count'
end
def self.query(project_ids)
# We can't directly change ForkedProjectLink to ForkNetworkMember here
# Nowadays, when a call using v3 to projects/:id/fork is made,
# the relationship to ForkNetworkMember is not updated
ForkedProjectLink.where(forked_from_project: project_ids)
end
end
end
......@@ -2,14 +2,14 @@ module Projects
# Service class for counting and caching the number of open issues of a
# project.
class OpenIssuesCountService < Projects::CountService
def relation_for_count
# We don't include confidential issues in this number since this would
# expose the number of confidential issues to non project members.
@project.issues.opened.public_only
end
def cache_key_name
'open_issues_count'
end
def self.query(project_ids)
# We don't include confidential issues in this number since this would
# expose the number of confidential issues to non project members.
Issue.opened.public_only.where(project: project_ids)
end
end
end
......@@ -15,8 +15,8 @@
#prometheus-graphs{ data: { "settings-path": edit_project_service_path(@project, 'prometheus'),
"documentation-path": help_page_path('administration/monitoring/prometheus/index.md'),
"empty-getting-started-svg-path": image_path('illustrations/monitoring/getting_started'),
"empty-loading-svg-path": image_path('illustrations/monitoring/loading'),
"empty-unable-to-connect-svg-path": image_path('illustrations/monitoring/unable_to_connect'),
"empty-getting-started-svg-path": image_path('illustrations/monitoring/getting_started.svg'),
"empty-loading-svg-path": image_path('illustrations/monitoring/loading.svg'),
"empty-unable-to-connect-svg-path": image_path('illustrations/monitoring/unable_to_connect.svg'),
"additional-metrics": additional_metrics_project_environment_path(@project, @environment, format: :json),
"has-metrics": "#{@environment.has_metrics?}", deployment_endpoint: project_environment_deployments_path(@project, @environment, format: :json) } }
- avatar = namespace_icon(namespace, 100)
- can_create_project = current_user.can?(:create_projects, namespace)
- if forked_project = namespace.find_fork_of(@project)
.bordered-box.fork-thumbnail.text-center.prepend-left-default.append-right-default.prepend-top-default.append-bottom-default.forked
= link_to project_path(forked_project) do
- if /no_((\w*)_)*avatar/.match(avatar)
= project_identicon(namespace, class: "avatar s100 identicon")
- else
.avatar-container.s100
= image_tag(avatar, class: "avatar s100")
%h5.prepend-top-default
= namespace.human_name
- else
.bordered-box.fork-thumbnail.text-center.prepend-left-default.append-right-default.prepend-top-default.append-bottom-default{ class: ("disabled" unless can_create_project) }
= link_to project_forks_path(@project, namespace_key: namespace.id),
method: "POST",
class: ("disabled has-tooltip" unless can_create_project),
title: (_('You have reached your project limit') unless can_create_project) do
- if /no_((\w*)_)*avatar/.match(avatar)
= project_identicon(namespace, class: "avatar s100 identicon")
- else
.avatar-container.s100
= image_tag(avatar, class: "avatar s100")
%h5.prepend-top-default
= namespace.human_name
......@@ -14,22 +14,7 @@
%h5.prepend-top-0.append-bottom-0.prepend-left-default.append-right-default
Click to fork the project
- @namespaces.each do |namespace|
- avatar = namespace_icon(namespace, 100)
- can_create_project = current_user.can?(:create_projects, namespace)
- forked_project = namespace.find_fork_of(@project)
- fork_path = forked_project ? project_path(forked_project) : project_forks_path(@project, namespace_key: namespace.id)
.bordered-box.fork-thumbnail.text-center.prepend-left-default.append-right-default.prepend-top-default.append-bottom-default{ class: [("disabled" unless can_create_project), ("forked" if forked_project)] }
= link_to fork_path,
method: "POST",
class: [("js-fork-thumbnail" unless forked_project), ("disabled has-tooltip" unless can_create_project)],
title: (_('You have reached your project limit') unless can_create_project) do
- if /no_((\w*)_)*avatar/.match(avatar)
= project_identicon(namespace, class: "avatar s100 identicon")
- else
.avatar-container.s100
= image_tag(avatar, class: "avatar s100")
%h5.prepend-top-default
= namespace.human_name
= render 'fork_button', namespace: namespace
- else
%strong
No available namespaces to fork the project.
......
......@@ -12,8 +12,8 @@
- can_update_issue = can?(current_user, :update_issue, @issue)
- can_report_spam = @issue.submittable_as_spam_by?(current_user)
.clearfix.detail-page-header
.issuable-header
.detail-page-header
.detail-page-header-body
.issuable-status-box.status-box.status-box-closed{ class: issue_button_visibility(@issue, false) }
= icon('check', class: "hidden-sm hidden-md hidden-lg")
%span.hidden-xs
......@@ -22,9 +22,6 @@
= icon('circle-o', class: "hidden-sm hidden-md hidden-lg")
%span.hidden-xs Open
%a.btn.btn-default.pull-right.visible-xs-block.gutter-toggle.issuable-gutter-toggle.js-sidebar-toggle{ href: "#" }
= icon('angle-double-left')
.issuable-meta
- if @issue.confidential
= icon('eye-slash', class: 'issuable-warning-icon')
......@@ -32,7 +29,10 @@
= icon('lock', class: 'issuable-warning-icon')
= issuable_meta(@issue, @project, "Issue")
.issuable-actions.js-issuable-actions
%a.btn.btn-default.pull-right.visible-xs-block.gutter-toggle.issuable-gutter-toggle.js-sidebar-toggle{ href: "#" }
= icon('angle-double-left')
.detail-page-header-actions.js-issuable-actions
.clearfix.issue-btn-group.dropdown
%button.btn.btn-default.pull-left.hidden-md.hidden-lg{ type: "button", data: { toggle: "dropdown" } }
Options
......@@ -74,10 +74,10 @@
= edited_time_ago_with_tooltip(@issue, placement: 'bottom', html_class: 'issue-edited-ago js-issue-edited-ago')
#merge-requests{ data: { url: referenced_merge_requests_project_issue_url(@project, @issue) } }
#merge-requests{ data: { url: referenced_merge_requests_project_issue_path(@project, @issue) } }
// This element is filled in using JavaScript.
#related-branches{ data: { url: related_branches_project_issue_url(@project, @issue) } }
#related-branches{ data: { url: related_branches_project_issue_path(@project, @issue) } }
// This element is filled in using JavaScript.
.content-block.emoji-block
......
......@@ -4,22 +4,22 @@
.alert.alert-danger
%p The source project of this merge request has been removed.
.clearfix.detail-page-header
.issuable-header
.detail-page-header
.detail-page-header-body
.issuable-status-box.status-box{ class: status_box_class(@merge_request) }
= icon(@merge_request.state_icon_name, class: "hidden-sm hidden-md hidden-lg")
%span.hidden-xs
= @merge_request.state_human_name
%a.btn.btn-default.pull-right.visible-xs-block.gutter-toggle.issuable-gutter-toggle.js-sidebar-toggle{ href: "#" }
= icon('angle-double-left')
.issuable-meta
- if @merge_request.discussion_locked?
= icon('lock', class: 'issuable-warning-icon')
= issuable_meta(@merge_request, @project, "Merge request")
.issuable-actions.js-issuable-actions
%a.btn.btn-default.pull-right.visible-xs-block.gutter-toggle.issuable-gutter-toggle.js-sidebar-toggle{ href: "#" }
= icon('angle-double-left')
.detail-page-header-actions.js-issuable-actions
.clearfix.issue-btn-group.dropdown
%button.btn.btn-default.pull-left.hidden-md.hidden-lg{ type: "button", data: { toggle: "dropdown" } }
Options
......
.detail-page-header.clearfix
.detail-page-header
.detail-page-header-body
.snippet-box.has-tooltip.inline.append-right-5{ title: snippet_visibility_level_description(@snippet.visibility_level, @snippet), data: { container: "body" } }
%span.sr-only
= visibility_level_label(@snippet.visibility_level)
......@@ -8,7 +9,7 @@
= time_ago_with_tooltip(@snippet.created_at, placement: 'bottom', html_class: 'snippet_updated_ago')
by #{link_to_member(@project, @snippet.author, size: 24, author_class: "author item-title", avatar_class: "hidden-xs")}
.snippet-actions
.detail-page-header-actions
- if @snippet.project_id?
= render "projects/snippets/actions"
- else
......
......@@ -8,18 +8,18 @@ class RepositoryForkWorker
sidekiq_options status_expiration: StuckImportJobsWorker::IMPORT_JOBS_EXPIRATION
def perform(project_id, forked_from_repository_storage_path, source_path, target_path)
def perform(project_id, forked_from_repository_storage_path, source_disk_path)
project = Project.find(project_id)
return unless start_fork(project)
Gitlab::Metrics.add_event(:fork_repository,
source_path: source_path,
target_path: target_path)
source_path: source_disk_path,
target_path: project.disk_path)
result = gitlab_shell.fork_repository(forked_from_repository_storage_path, source_path,
project.repository_storage_path, target_path)
raise ForkError, "Unable to fork project #{project_id} for repository #{source_path} -> #{target_path}" unless result
result = gitlab_shell.fork_repository(forked_from_repository_storage_path, source_disk_path,
project.repository_storage_path, project.disk_path)
raise ForkError, "Unable to fork project #{project_id} for repository #{source_disk_path} -> #{project.disk_path}" unless result
project.repository.after_import
raise ForkError, "Project #{project_id} had an invalid repository after fork" unless project.valid_repo?
......
---
title: Fix broken illustration images for monitoring page empty states
merge_request: 15889
author:
type: fixed
---
title: Fix related branches/Merge requests failing to load when the hostname setting
is changed
merge_request:
author:
type: fixed
---
title: Fix the fork project functionality for projects with hashed storage
merge_request: 15671
author:
type: fixed
---
title: Fix updateEndpoint undefined error for issue_show app root
merge_request: 15698
author:
type: fixed
---
title: Keep track of all circuitbreaker keys in a set
merge_request: 15613
author:
type: performance
---
title: Correctly link to a forked project from the new fork page.
merge_request: 15653
author:
type: fixed
---
title: Create a fork network for forks with a deleted source
merge_request: 15595
author:
type: fixed
---
title: Only load branch names for protected branch checks
merge_request:
author:
type: performance
---
title: Fix gitlab:import:repos Rake task moving repositories into the wrong location
merge_request:
author:
type: fixed
---
title: Optimize API /groups/:id/projects by preloading associations
merge_request:
author:
type: performance
......@@ -11,15 +11,7 @@ Prometheus::Client.configure do |config|
config.multiprocess_files_dir ||= Rails.root.join('tmp/prometheus_multiproc_dir')
end
config.pid_provider = -> do
wid = Prometheus::Client::Support::Unicorn.worker_id
wid = Process.pid if wid.nil?
if wid.nil?
"process_pid_#{Process.pid}"
else
"worker_id_#{wid}"
end
end
config.pid_provider = Prometheus::Client::Support::Unicorn.method(:worker_pid_provider)
end
Sidekiq.configure_server do |config|
......
class RescheduleForkNetworkCreation < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
MIGRATION = 'PopulateForkNetworksRange'.freeze
BATCH_SIZE = 100
DELAY_INTERVAL = 15.seconds
disable_ddl_transaction!
class ForkedProjectLink < ActiveRecord::Base
include EachBatch
self.table_name = 'forked_project_links'
end
def up
say 'Populating the `fork_networks` based on existing `forked_project_links`'
queue_background_migration_jobs_by_range_at_intervals(ForkedProjectLink, MIGRATION, DELAY_INTERVAL, batch_size: BATCH_SIZE)
end
def down
# nothing
end
end
......@@ -11,7 +11,7 @@
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 20171124132536) do
ActiveRecord::Schema.define(version: 20171124150326) do
# These are extensions that must be enabled in order to support this database
enable_extension "plpgsql"
......
......@@ -80,16 +80,37 @@ module API
expose :group_access, as: :group_access_level
end
class BasicProjectDetails < Grape::Entity
expose :id, :description, :default_branch, :tag_list
expose :ssh_url_to_repo, :http_url_to_repo, :web_url
class ProjectIdentity < Grape::Entity
expose :id, :description
expose :name, :name_with_namespace
expose :path, :path_with_namespace
expose :created_at
end
class BasicProjectDetails < ProjectIdentity
include ::API::ProjectsRelationBuilder
expose :default_branch
# Avoids an N+1 query: https://github.com/mbleigh/acts-as-taggable-on/issues/91#issuecomment-168273770
expose :tag_list do |project|
# project.tags.order(:name).pluck(:name) is the most suitable option
# to avoid loading all the ActiveRecord objects but, if we use it here
# it override the preloaded associations and makes a query
# (fixed in https://github.com/rails/rails/pull/25976).
project.tags.map(&:name).sort
end
expose :ssh_url_to_repo, :http_url_to_repo, :web_url
expose :avatar_url do |project, options|
project.avatar_url(only_path: false)
end
expose :star_count, :forks_count
expose :created_at, :last_activity_at
expose :last_activity_at
def self.preload_relation(projects_relation, options = {})
projects_relation.preload(:project_feature, :route)
.preload(namespace: [:route, :owner],
tags: :taggings)
end
end
class Project < BasicProjectDetails
......@@ -141,7 +162,7 @@ module API
expose :shared_runners_enabled
expose :lfs_enabled?, as: :lfs_enabled
expose :creator_id
expose :namespace, using: 'API::Entities::Namespace'
expose :namespace, using: 'API::Entities::NamespaceBasic'
expose :forked_from_project, using: Entities::BasicProjectDetails, if: lambda { |project, options| project.forked? }
expose :import_status
expose :import_error, if: lambda { |_project, options| options[:user_can_admin_project] }
......@@ -151,7 +172,7 @@ module API
expose :public_builds, as: :public_jobs
expose :ci_config_path
expose :shared_with_groups do |project, options|
SharedGroup.represent(project.project_group_links.all, options)
SharedGroup.represent(project.project_group_links, options)
end
expose :only_allow_merge_if_pipeline_succeeds
expose :request_access_enabled
......@@ -159,6 +180,18 @@ module API
expose :printing_merge_request_link_enabled
expose :statistics, using: 'API::Entities::ProjectStatistics', if: :statistics
def self.preload_relation(projects_relation, options = {})
super(projects_relation).preload(:group)
.preload(project_group_links: :group,
fork_network: :root_project,
forked_project_link: :forked_from_project,
forked_from_project: [:route, :forks, namespace: :route, tags: :taggings])
end
def self.forks_counting_projects(projects_relation)
projects_relation + projects_relation.map(&:forked_from_project).compact
end
end
class ProjectStatistics < Grape::Entity
......@@ -622,9 +655,11 @@ module API
expose :created_at
end
class Namespace < Grape::Entity
class NamespaceBasic < Grape::Entity
expose :id, :name, :path, :kind, :full_path, :parent_id
end
class Namespace < NamespaceBasic
expose :members_count_with_descendants, if: -> (namespace, opts) { expose_members_count_with_descendants?(namespace, opts) } do |namespace, _|
namespace.users_with_descendants.count
end
......@@ -684,7 +719,7 @@ module API
if options.key?(:project_members)
(options[:project_members] || []).find { |member| member.source_id == project.id }
else
project.project_members.find_by(user_id: options[:current_user].id)
project.project_member(options[:current_user])
end
end
......@@ -693,11 +728,25 @@ module API
if options.key?(:group_members)
(options[:group_members] || []).find { |member| member.source_id == project.namespace_id }
else
project.group.group_members.find_by(user_id: options[:current_user].id)
project.group.group_member(options[:current_user])
end
end
end
end
def self.preload_relation(projects_relation, options = {})
relation = super(projects_relation, options)
unless options.key?(:group_members)
relation = relation.preload(group: [group_members: [:source, user: [notification_settings: :source]]])
end
unless options.key?(:project_members)
relation = relation.preload(project_members: [:source, user: [notification_settings: :source]])
end
relation
end
end
class LabelBasic < Grape::Entity
......@@ -833,17 +882,24 @@ module API
expose :id, :sha, :ref, :status
end
class Job < Grape::Entity
class JobBasic < Grape::Entity
expose :id, :status, :stage, :name, :ref, :tag, :coverage
expose :created_at, :started_at, :finished_at
expose :duration
expose :user, with: User
expose :artifacts_file, using: JobArtifactFile, if: -> (job, opts) { job.artifacts? }
expose :commit, with: Commit
expose :runner, with: Runner
expose :pipeline, with: PipelineBasic
end
class Job < JobBasic
expose :artifacts_file, using: JobArtifactFile, if: -> (job, opts) { job.artifacts? }
expose :runner, with: Runner
end
class JobBasicWithProject < JobBasic
expose :project, with: ProjectIdentity
end
class Trigger < Grape::Entity
expose :id
expose :token, :description
......
......@@ -25,24 +25,7 @@ module API
optional :statistics, type: Boolean, default: false, desc: 'Include project statistics'
end
def present_groups(groups, options = {})
options = options.reverse_merge(
with: Entities::Group,
current_user: current_user
)
groups = groups.with_statistics if options[:statistics]
present paginate(groups), options
end
end
resource :groups do
include CustomAttributesEndpoints
desc 'Get a groups list' do
success Entities::Group
end
params do
params :group_list_params do
use :statistics_params
optional :skip_groups, type: Array[Integer], desc: 'Array of group ids to exclude from list'
optional :all_available, type: Boolean, desc: 'Show all group that you have access to'
......@@ -52,19 +35,54 @@ module API
optional :sort, type: String, values: %w[asc desc], default: 'asc', desc: 'Sort by asc (ascending) or desc (descending)'
use :pagination
end
get do
def find_groups(params)
find_params = {
all_available: params[:all_available],
owned: params[:owned],
custom_attributes: params[:custom_attributes]
custom_attributes: params[:custom_attributes],
owned: params[:owned]
}
find_params[:parent] = find_group!(params[:id]) if params[:id]
groups = GroupsFinder.new(current_user, find_params).execute
groups = groups.search(params[:search]) if params[:search].present?
groups = groups.where.not(id: params[:skip_groups]) if params[:skip_groups].present?
groups = groups.reorder(params[:order_by] => params[:sort])
present_groups groups, statistics: params[:statistics] && current_user.admin?
groups
end
def find_group_projects(params)
group = find_group!(params[:id])
projects = GroupProjectsFinder.new(group: group, current_user: current_user, params: project_finder_params).execute
projects = reorder_projects(projects)
paginate(projects)
end
def present_groups(params, groups)
options = {
with: Entities::Group,
current_user: current_user,
statistics: params[:statistics] && current_user.admin?
}
groups = groups.with_statistics if options[:statistics]
present paginate(groups), options
end
end
resource :groups do
include CustomAttributesEndpoints
desc 'Get a groups list' do
success Entities::Group
end
params do
use :group_list_params
end
get do
groups = find_groups(params)
present_groups params, groups
end
desc 'Create a group. Available only for users who can create groups.' do
......@@ -159,11 +177,10 @@ module API
use :pagination
end
get ":id/projects" do
group = find_group!(params[:id])
projects = GroupProjectsFinder.new(group: group, current_user: current_user, params: project_finder_params).execute
projects = reorder_projects(projects)
projects = find_group_projects(params)
entity = params[:simple] ? Entities::BasicProjectDetails : Entities::Project
present paginate(projects), with: entity, current_user: current_user
present entity.prepare_relation(projects), with: entity, current_user: current_user
end
desc 'Transfer a project to the group namespace. Available only for admin.' do
......
......@@ -79,11 +79,11 @@ module API
projects = projects.with_statistics if params[:statistics]
projects = projects.with_issues_enabled if params[:with_issues_enabled]
projects = projects.with_merge_requests_enabled if params[:with_merge_requests_enabled]
projects = paginate(projects)
if current_user
projects = projects.includes(:route, :taggings, namespace: :route)
project_members = current_user.project_members
group_members = current_user.group_members
project_members = current_user.project_members.preload(:source, user: [notification_settings: :source])
group_members = current_user.group_members.preload(:source, user: [notification_settings: :source])
end
options = options.reverse_merge(
......@@ -95,7 +95,7 @@ module API
)
options[:with] = Entities::BasicProjectDetails if params[:simple]
present paginate(projects), options
present options[:with].prepare_relation(projects, options), options
end
end
......
module API
module ProjectsRelationBuilder
extend ActiveSupport::Concern
module ClassMethods
def prepare_relation(projects_relation, options = {})
projects_relation = preload_relation(projects_relation, options)
execute_batch_counting(projects_relation)
projects_relation
end
def preload_relation(projects_relation, options = {})
projects_relation
end
def forks_counting_projects(projects_relation)
projects_relation
end
def batch_forks_counting(projects_relation)
::Projects::BatchForksCountService.new(forks_counting_projects(projects_relation)).refresh_cache
end
def batch_open_issues_counting(projects_relation)
::Projects::BatchOpenIssuesCountService.new(projects_relation).refresh_cache
end
def execute_batch_counting(projects_relation)
batch_forks_counting(projects_relation)
batch_open_issues_counting(projects_relation)
end
end
end
end
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
# This background migration is going to create all `fork_networks` and
# the `fork_network_members` for the roots of fork networks based on the
# existing `forked_project_links`.
#
# When the source of a fork is deleted, we will create the fork with the
# target project as the root. This way, when there are forks of the target
# project, they will be joined into the same fork network.
#
# When the `fork_networks` and memberships for the root projects are created
# the `CreateForkNetworkMembershipsRange` migration is scheduled. This
# migration will create the memberships for all remaining forks-of-forks
class PopulateForkNetworksRange
def perform(start_id, end_id)
log("Creating fork networks for forked project links: #{start_id} - #{end_id}")
create_fork_networks_for_existing_projects(start_id, end_id)
create_fork_networks_for_missing_projects(start_id, end_id)
create_fork_networks_memberships_for_root_projects(start_id, end_id)
delay = BackgroundMigration::CreateForkNetworkMembershipsRange::RESCHEDULE_DELAY # rubocop:disable Metrics/LineLength
BackgroundMigrationWorker.perform_in(
delay, "CreateForkNetworkMembershipsRange", [start_id, end_id]
)
end
def create_fork_networks_for_existing_projects(start_id, end_id)
log("Creating fork networks: #{start_id} - #{end_id}")
ActiveRecord::Base.connection.execute <<~INSERT_NETWORKS
INSERT INTO fork_networks (root_project_id)
SELECT DISTINCT forked_project_links.forked_from_project_id
FROM forked_project_links
-- Exclude the forks that are not the first level fork of a project
WHERE NOT EXISTS (
SELECT true
FROM forked_project_links inner_links
WHERE inner_links.forked_to_project_id = forked_project_links.forked_from_project_id
)
/* Exclude the ones that are already created, in case the fork network
was already created for another fork of the project.
*/
AND NOT EXISTS (
SELECT true
FROM fork_networks
WHERE forked_project_links.forked_from_project_id = fork_networks.root_project_id
)
-- Only create a fork network for a root project that still exists
AND EXISTS (
SELECT true
FROM projects
......@@ -27,7 +57,45 @@ module Gitlab
)
AND forked_project_links.id BETWEEN #{start_id} AND #{end_id}
INSERT_NETWORKS
end
def create_fork_networks_for_missing_projects(start_id, end_id)
log("Creating fork networks with missing root: #{start_id} - #{end_id}")
ActiveRecord::Base.connection.execute <<~INSERT_NETWORKS
INSERT INTO fork_networks (root_project_id)
SELECT DISTINCT forked_project_links.forked_to_project_id
FROM forked_project_links
-- Exclude forks that are not the root forks
WHERE NOT EXISTS (
SELECT true
FROM forked_project_links inner_links
WHERE inner_links.forked_to_project_id = forked_project_links.forked_from_project_id
)
/* Exclude the ones that are already created, in case this migration is
re-run
*/
AND NOT EXISTS (
SELECT true
FROM fork_networks
WHERE forked_project_links.forked_to_project_id = fork_networks.root_project_id
)
/* Exclude projects for which the project still exists, those are
Processed in the previous step of this migration
*/
AND NOT EXISTS (
SELECT true
FROM projects
WHERE projects.id = forked_project_links.forked_from_project_id
)
AND forked_project_links.id BETWEEN #{start_id} AND #{end_id}
INSERT_NETWORKS
end
def create_fork_networks_memberships_for_root_projects(start_id, end_id)
log("Creating memberships for root projects: #{start_id} - #{end_id}")
ActiveRecord::Base.connection.execute <<~INSERT_ROOT
......@@ -36,8 +104,12 @@ module Gitlab
FROM fork_networks
/* Joining both on forked_from- and forked_to- so we could create the
memberships for forks for which the source was deleted
*/
INNER JOIN forked_project_links
ON forked_project_links.forked_from_project_id = fork_networks.root_project_id
OR forked_project_links.forked_to_project_id = fork_networks.root_project_id
WHERE NOT EXISTS (
SELECT true
......@@ -46,9 +118,6 @@ module Gitlab
)
AND forked_project_links.id BETWEEN #{start_id} AND #{end_id}
INSERT_ROOT
delay = BackgroundMigration::CreateForkNetworkMembershipsRange::RESCHEDULE_DELAY
BackgroundMigrationWorker.perform_in(delay, "CreateForkNetworkMembershipsRange", [start_id, end_id])
end
def log(message)
......
......@@ -55,6 +55,7 @@ module Gitlab
name: project_name,
path: project_name,
skip_disk_validation: true,
import_type: 'gitlab_project',
namespace_id: group&.id).execute
if project.persisted? && mv_repo(project)
......
......@@ -7,6 +7,8 @@ module Gitlab
@root_path = root_path
@repo_path = repo_path
@root_path << '/' unless root_path.ends_with?('/')
# Split path into 'all/the/namespaces' and 'project_name'
@group_path, _, @project_name = repo_relative_path.rpartition('/')
end
......
......@@ -15,6 +15,7 @@ module Gitlab
Failing = Class.new(Inaccessible)
REDIS_KEY_PREFIX = 'storage_accessible:'.freeze
REDIS_KNOWN_KEYS = "#{REDIS_KEY_PREFIX}known_keys_set".freeze
def self.redis
Gitlab::Redis::SharedState
......
......@@ -13,10 +13,8 @@ module Gitlab
delegate :last_failure, :failure_count, to: :failure_info
def self.reset_all!
pattern = "#{Gitlab::Git::Storage::REDIS_KEY_PREFIX}*"
Gitlab::Git::Storage.redis.with do |redis|
all_storage_keys = redis.scan_each(match: pattern).to_a
all_storage_keys = redis.zrange(Gitlab::Git::Storage::REDIS_KNOWN_KEYS, 0, -1)
redis.del(*all_storage_keys) unless all_storage_keys.empty?
end
......@@ -135,23 +133,29 @@ module Gitlab
redis.hset(cache_key, :last_failure, last_failure.to_i)
redis.hincrby(cache_key, :failure_count, 1)
redis.expire(cache_key, failure_reset_time)
maintain_known_keys(redis)
end
end
end
def track_storage_accessible
return if no_failures?
@failure_info = FailureInfo.new(nil, 0)
Gitlab::Git::Storage.redis.with do |redis|
redis.pipelined do
redis.hset(cache_key, :last_failure, nil)
redis.hset(cache_key, :failure_count, 0)
maintain_known_keys(redis)
end
end
end
def maintain_known_keys(redis)
expire_time = Time.now.to_i + failure_reset_time
redis.zadd(Gitlab::Git::Storage::REDIS_KNOWN_KEYS, expire_time, cache_key)
redis.zremrangebyscore(Gitlab::Git::Storage::REDIS_KNOWN_KEYS, '-inf', Time.now.to_i)
end
def get_failure_info
last_failure, failure_count = Gitlab::Git::Storage.redis.with do |redis|
redis.hmget(cache_key, :last_failure, :failure_count)
......
......@@ -4,8 +4,8 @@ module Gitlab
class Health
attr_reader :storage_name, :info
def self.pattern_for_storage(storage_name)
"#{Gitlab::Git::Storage::REDIS_KEY_PREFIX}#{storage_name}:*"
def self.prefix_for_storage(storage_name)
"#{Gitlab::Git::Storage::REDIS_KEY_PREFIX}#{storage_name}:"
end
def self.for_all_storages
......@@ -25,26 +25,15 @@ module Gitlab
private_class_method def self.all_keys_for_storages(storage_names, redis)
keys_per_storage = {}
all_keys = redis.zrange(Gitlab::Git::Storage::REDIS_KNOWN_KEYS, 0, -1)
redis.pipelined do
storage_names.each do |storage_name|
pattern = pattern_for_storage(storage_name)
matched_keys = redis.scan_each(match: pattern)
prefix = prefix_for_storage(storage_name)
keys_per_storage[storage_name] = matched_keys
end
keys_per_storage[storage_name] = all_keys.select { |key| key.starts_with?(prefix) }
end
# We need to make sure each lazy-loaded `Enumerator` for matched keys
# is loaded into an array.
#
# Otherwise it would be loaded in the second `Redis#pipelined` block
# within `.load_for_keys`. In this pipelined call, the active
# Redis-client changes again, so the values would not be available
# until the end of that pipelined-block.
keys_per_storage.each do |storage_name, key_future|
keys_per_storage[storage_name] = key_future.to_a
end
keys_per_storage
end
private_class_method def self.load_for_keys(keys_per_storage, redis)
......
......@@ -32,7 +32,7 @@ module Gitlab
def init_metrics
metrics = {}
metrics[:sampler_duration] = Metrics.histogram(with_prefix(:sampler_duration, :seconds), 'Sampler time', {})
metrics[:sampler_duration] = Metrics.histogram(with_prefix(:sampler_duration, :seconds), 'Sampler time', { worker: nil })
metrics[:total_time] = Metrics.gauge(with_prefix(:gc, :time_total), 'Total GC time', labels, :livesum)
GC.stat.keys.each do |key|
metrics[key] = Metrics.gauge(with_prefix(:gc, key), to_doc_string(key), labels, :livesum)
......@@ -99,9 +99,9 @@ module Gitlab
worker_no = ::Prometheus::Client::Support::Unicorn.worker_id
if worker_no
{ unicorn: worker_no }
{ worker: worker_no }
else
{ unicorn: 'master' }
{ worker: 'master' }
end
end
end
......
......@@ -144,20 +144,27 @@ module Gitlab
storage, "#{path}.git", "#{new_path}.git"])
end
# Fork repository to new namespace
# Fork repository to new path
# forked_from_storage - forked-from project's storage path
# path - project path with namespace
# forked_from_disk_path - project disk path
# forked_to_storage - forked-to project's storage path
# fork_namespace - namespace for forked project
# forked_to_disk_path - forked project disk path
#
# Ex.
# fork_repository("/path/to/forked_from/storage", "gitlab/gitlab-ci", "/path/to/forked_to/storage", "randx")
# fork_repository("/path/to/forked_from/storage", "gitlab/gitlab-ci", "/path/to/forked_to/storage", "new-namespace/gitlab-ci")
#
# Gitaly note: JV: not easy to migrate because this involves two Gitaly servers, not one.
def fork_repository(forked_from_storage, path, forked_to_storage, fork_namespace)
gitlab_shell_fast_execute([gitlab_shell_projects_path, 'fork-project',
forked_from_storage, "#{path}.git", forked_to_storage,
fork_namespace])
def fork_repository(forked_from_storage, forked_from_disk_path, forked_to_storage, forked_to_disk_path)
gitlab_shell_fast_execute(
[
gitlab_shell_projects_path,
'fork-repository',
forked_from_storage,
"#{forked_from_disk_path}.git",
forked_to_storage,
"#{forked_to_disk_path}.git"
]
)
end
# Remove repository from file system
......
require 'spec_helper'
describe 'Project fork' do
include ProjectForksHelper
let(:user) { create(:user) }
let(:project) { create(:project, :public, :repository) }
......@@ -24,8 +26,9 @@ describe 'Project fork' do
end
context 'master in group' do
let(:group) { create(:group) }
before do
group = create(:group)
group.add_master(user)
end
......@@ -53,5 +56,17 @@ describe 'Project fork' do
expect(page).to have_css('.fork-thumbnail', count: 2)
expect(page).to have_css('.fork-thumbnail.disabled')
end
it 'links to the fork if the project was already forked within that namespace' do
forked_project = fork_project(project, user, namespace: group, repository: true)
visit new_project_fork_path(project)
expect(page).to have_css('div.forked', text: group.full_name)
click_link group.full_name
expect(current_path).to eq(project_path(forked_project))
end
end
end
......@@ -32,7 +32,7 @@ feature 'issuable templates', :js do
message: 'added issue template',
branch_name: 'master')
visit project_issue_path project, issue
page.within('.content .issuable-actions') do
page.within('.js-issuable-actions') do
click_on 'Edit'
end
fill_in :'issuable-title', with: 'test issue title'
......@@ -77,7 +77,7 @@ feature 'issuable templates', :js do
message: 'added issue template',
branch_name: 'master')
visit project_issue_path project, issue
page.within('.content .issuable-actions') do
page.within('.js-issuable-actions') do
click_on 'Edit'
end
fill_in :'issuable-title', with: 'test issue title'
......
......@@ -63,7 +63,7 @@ import IssuablesHelper from '~/helpers/issuables_helper';
describe('merge request of another user', () => {
beforeEach(() => {
loadFixtures('merge_requests/merge_request_with_task_list.html.raw');
this.el = document.querySelector('.merge-request .issuable-actions');
this.el = document.querySelector('.js-issuable-actions');
const merge = new MergeRequest();
merge.hideCloseButton();
});
......@@ -83,7 +83,7 @@ import IssuablesHelper from '~/helpers/issuables_helper';
describe('merge request of current_user', () => {
beforeEach(() => {
loadFixtures('merge_requests/merge_request_of_current_user.html.raw');
this.el = document.querySelector('.merge-request .issuable-actions');
this.el = document.querySelector('.js-issuable-actions');
const merge = new MergeRequest();
merge.hideCloseButton();
});
......
......@@ -62,12 +62,15 @@ describe Gitlab::BackgroundMigration::PopulateForkNetworksRange, :migration, sch
expect(base2_membership).not_to be_nil
end
it 'skips links that had their source project deleted' do
forked_project_links.create(id: 6, forked_from_project_id: 99999, forked_to_project_id: create(:project).id)
it 'creates a fork network for the fork of which the source was deleted' do
fork = create(:project)
forked_project_links.create(id: 6, forked_from_project_id: 99999, forked_to_project_id: fork.id)
migration.perform(5, 8)
expect(fork_networks.find_by(root_project_id: 99999)).to be_nil
expect(fork_networks.find_by(root_project_id: fork.id)).not_to be_nil
expect(fork_network_members.find_by(project_id: fork.id)).not_to be_nil
end
it 'schedules a job for inserting memberships for forks-of-forks' do
......
......@@ -132,6 +132,23 @@ describe Gitlab::BareRepositoryImport::Importer, repository: true do
expect(File).to exist(File.join(project.repository_storage_path, project.disk_path + '.git'))
end
it 'moves an existing project to the correct path' do
# This is a quick way to get a valid repository instead of copying an
# existing one. Since it's not persisted, the importer will try to
# create the project.
project = build(:project, :repository)
original_commit_count = project.repository.commit_count
bare_repo = Gitlab::BareRepositoryImport::Repository.new(project.repository_storage_path, project.repository.path)
gitlab_importer = described_class.new(admin, bare_repo)
expect(gitlab_importer).to receive(:create_project).and_call_original
new_project = gitlab_importer.create_project_if_needed
expect(new_project.repository.commit_count).to eq(original_commit_count)
end
end
context 'with Wiki' do
......
......@@ -46,6 +46,13 @@ describe ::Gitlab::BareRepositoryImport::Repository do
describe '#project_full_path' do
it 'returns the project full path' do
expect(project_repo_path.repo_path).to eq('/full/path/to/repo.git')
expect(project_repo_path.project_full_path).to eq('to/repo')
end
it 'with no trailing slash in the root path' do
repo_path = described_class.new('/full/path', '/full/path/to/repo.git')
expect(repo_path.project_full_path).to eq('to/repo')
end
end
end
......@@ -27,6 +27,7 @@ describe Gitlab::Git::Storage::CircuitBreaker, clean_gitlab_redis_shared_state:
def set_in_redis(name, value)
Gitlab::Git::Storage.redis.with do |redis|
redis.zadd(Gitlab::Git::Storage::REDIS_KNOWN_KEYS, 0, cache_key)
redis.hmset(cache_key, name, value)
end.first
end
......@@ -181,6 +182,24 @@ describe Gitlab::Git::Storage::CircuitBreaker, clean_gitlab_redis_shared_state:
expect(circuit_breaker.last_failure).to be_nil
end
it 'maintains known storage keys' do
Timecop.freeze do
# Insert an old key to expire
old_entry = Time.now.to_i - 3.days.to_i
Gitlab::Git::Storage.redis.with do |redis|
redis.zadd(Gitlab::Git::Storage::REDIS_KNOWN_KEYS, old_entry, 'to_be_removed')
end
circuit_breaker.perform { '' }
known_keys = Gitlab::Git::Storage.redis.with do |redis|
redis.zrange(Gitlab::Git::Storage::REDIS_KNOWN_KEYS, 0, -1)
end
expect(known_keys).to contain_exactly(cache_key)
end
end
it 'only performs the accessibility check once' do
expect(Gitlab::Git::Storage::ForkedStorageCheck)
.to receive(:storage_available?).once.and_call_original
......
......@@ -6,6 +6,7 @@ describe Gitlab::Git::Storage::Health, clean_gitlab_redis_shared_state: true, br
def set_in_redis(cache_key, value)
Gitlab::Git::Storage.redis.with do |redis|
redis.zadd(Gitlab::Git::Storage::REDIS_KNOWN_KEYS, 0, cache_key)
redis.hmset(cache_key, :failure_count, value)
end.first
end
......
......@@ -200,18 +200,18 @@ describe Gitlab::Shell do
describe '#fork_repository' do
it 'returns true when the command succeeds' do
expect(Gitlab::Popen).to receive(:popen)
.with([projects_path, 'fork-project', 'current/storage', 'project/path.git', 'new/storage', 'new-namespace'],
.with([projects_path, 'fork-repository', 'current/storage', 'project/path.git', 'new/storage', 'fork/path.git'],
nil, popen_vars).and_return([nil, 0])
expect(gitlab_shell.fork_repository('current/storage', 'project/path', 'new/storage', 'new-namespace')).to be true
expect(gitlab_shell.fork_repository('current/storage', 'project/path', 'new/storage', 'fork/path')).to be true
end
it 'return false when the command fails' do
expect(Gitlab::Popen).to receive(:popen)
.with([projects_path, 'fork-project', 'current/storage', 'project/path.git', 'new/storage', 'new-namespace'],
.with([projects_path, 'fork-repository', 'current/storage', 'project/path.git', 'new/storage', 'fork/path.git'],
nil, popen_vars).and_return(["error", 1])
expect(gitlab_shell.fork_repository('current/storage', 'project/path', 'new/storage', 'new-namespace')).to be false
expect(gitlab_shell.fork_repository('current/storage', 'project/path', 'new/storage', 'fork/path')).to be false
end
end
......
......@@ -314,7 +314,6 @@ describe Project do
it { is_expected.to delegate_method(:empty_repo?).to(:repository) }
it { is_expected.to delegate_method(:members).to(:team).with_prefix(true) }
it { is_expected.to delegate_method(:count).to(:forks).with_prefix(true) }
it { is_expected.to delegate_method(:name).to(:owner).with_prefix(true).with_arguments(allow_nil: true) }
end
......@@ -1716,8 +1715,7 @@ describe Project do
expect(RepositoryForkWorker).to receive(:perform_async).with(
project.id,
forked_from_project.repository_storage_path,
forked_from_project.disk_path,
project.namespace.full_path).and_return(import_jid)
forked_from_project.disk_path).and_return(import_jid)
expect(project.add_import_job).to eq(import_jid)
end
......@@ -2459,7 +2457,7 @@ describe Project do
it 'returns the number of forks' do
project = build(:project)
allow(project.forks).to receive(:count).and_return(1)
expect_any_instance_of(Projects::ForksCountService).to receive(:count).and_return(1)
expect(project.forks_count).to eq(1)
end
......
......@@ -463,6 +463,20 @@ describe API::Groups do
expect(response).to have_gitlab_http_status(404)
end
it 'avoids N+1 queries' do
get api("/groups/#{group1.id}/projects", admin)
control_count = ActiveRecord::QueryRecorder.new do
get api("/groups/#{group1.id}/projects", admin)
end.count
create(:project, namespace: group1)
expect do
get api("/groups/#{group1.id}/projects", admin)
end.not_to exceed_query_limit(control_count)
end
end
context 'when using group path in URL' do
......
......@@ -4,9 +4,17 @@ describe Projects::CountService do
let(:project) { build(:project, id: 1) }
let(:service) { described_class.new(project) }
describe '#relation_for_count' do
describe '.query' do
it 'raises NotImplementedError' do
expect { service.relation_for_count }.to raise_error(NotImplementedError)
expect { described_class.query(project.id) }.to raise_error(NotImplementedError)
end
end
describe '#relation_for_count' do
it 'calls the class method query with the project id' do
expect(described_class).to receive(:query).with(project.id)
service.relation_for_count
end
end
......
require 'spec_helper'
describe RepositoryForkWorker do
let(:project) { create(:project, :repository, :import_scheduled) }
let(:fork_project) { create(:project, :repository, forked_from_project: project) }
let(:project) { create(:project, :repository) }
let(:fork_project) { create(:project, :repository, :import_scheduled, forked_from_project: project) }
let(:shell) { Gitlab::Shell.new }
subject { described_class.new }
......@@ -12,50 +12,39 @@ describe RepositoryForkWorker do
end
describe "#perform" do
def perform!
subject.perform(fork_project.id, '/test/path', project.disk_path)
end
def expect_fork_repository
expect(shell).to receive(:fork_repository).with(
'/test/path',
project.disk_path,
fork_project.repository_storage_path,
fork_project.disk_path
)
end
describe 'when a worker was reset without cleanup' do
let(:jid) { '12345678' }
let(:started_project) { create(:project, :repository, :import_started) }
it 'creates a new repository from a fork' do
allow(subject).to receive(:jid).and_return(jid)
expect(shell).to receive(:fork_repository).with(
'/test/path',
project.full_path,
project.repository_storage_path,
fork_project.namespace.full_path
).and_return(true)
expect_fork_repository.and_return(true)
subject.perform(
project.id,
'/test/path',
project.full_path,
fork_project.namespace.full_path)
perform!
end
end
it "creates a new repository from a fork" do
expect(shell).to receive(:fork_repository).with(
'/test/path',
project.full_path,
project.repository_storage_path,
fork_project.namespace.full_path
).and_return(true)
expect_fork_repository.and_return(true)
subject.perform(
project.id,
'/test/path',
project.full_path,
fork_project.namespace.full_path)
perform!
end
it 'flushes various caches' do
expect(shell).to receive(:fork_repository).with(
'/test/path',
project.full_path,
project.repository_storage_path,
fork_project.namespace.full_path
).and_return(true)
expect_fork_repository.and_return(true)
expect_any_instance_of(Repository).to receive(:expire_emptiness_caches)
.and_call_original
......@@ -63,32 +52,22 @@ describe RepositoryForkWorker do
expect_any_instance_of(Repository).to receive(:expire_exists_cache)
.and_call_original
subject.perform(project.id, '/test/path', project.full_path,
fork_project.namespace.full_path)
perform!
end
it "handles bad fork" do
source_path = project.full_path
target_path = fork_project.namespace.full_path
error_message = "Unable to fork project #{project.id} for repository #{source_path} -> #{target_path}"
error_message = "Unable to fork project #{fork_project.id} for repository #{project.full_path} -> #{fork_project.full_path}"
expect(shell).to receive(:fork_repository).and_return(false)
expect_fork_repository.and_return(false)
expect do
subject.perform(project.id, '/test/path', source_path, target_path)
end.to raise_error(RepositoryForkWorker::ForkError, error_message)
expect { perform! }.to raise_error(RepositoryForkWorker::ForkError, error_message)
end
it 'handles unexpected error' do
source_path = project.full_path
target_path = fork_project.namespace.full_path
allow_any_instance_of(Gitlab::Shell).to receive(:fork_repository).and_raise(RuntimeError)
expect_fork_repository.and_raise(RuntimeError)
expect do
subject.perform(project.id, '/test/path', source_path, target_path)
end.to raise_error(RepositoryForkWorker::ForkError)
expect(project.reload.import_status).to eq('failed')
expect { perform! }.to raise_error(RepositoryForkWorker::ForkError)
expect(fork_project.reload.import_status).to eq('failed')
end
end
end
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment