BigW Consortium Gitlab

Commit 0f953ac4 by Filipa Lacerda

Merge branch '10-8-stable-prepare-rc8' into '10-8-stable'

Prepare 10.8 RC8 release See merge request gitlab-org/gitlab-ce!18883
parents d66ae2d5 abd24d1c
......@@ -110,8 +110,8 @@ export default {
Welcome to the GitLab IDE
</h4>
<p>
You can select a file in the left sidebar to begin
editing and use the right sidebar to commit your changes.
Select a file from the left sidebar to begin editing.
Afterwards, you'll be able to commit your changes.
</p>
</div>
</div>
......
......@@ -140,7 +140,7 @@ export default {
this.file.staged && this.file.key.indexOf('unstaged-') === 0 ? head : null,
);
if (this.viewer === viewerTypes.mr) {
if (this.viewer === viewerTypes.mr && this.file.mrChange) {
this.editor.attachMergeRequestModel(this.model);
} else {
this.editor.attachModel(this.model);
......
......@@ -44,6 +44,7 @@ export const dataStructure = () => ({
size: 0,
parentPath: null,
lastOpenedAt: 0,
mrChange: null,
});
export const decorateData = entity => {
......
......@@ -17,6 +17,7 @@
display: flex;
align-items: center;
justify-content: space-between;
line-height: $line-height-base;
.title {
display: flex;
......@@ -33,10 +34,14 @@
.navbar-collapse {
padding-right: 0;
.navbar-nav {
margin: 0;
}
}
.nav li a {
color: $theme-gray-700;
.nav li {
float: none;
}
}
......
......@@ -3,6 +3,10 @@ module Users
include InternalRedirect
skip_before_action :enforce_terms!
skip_before_action :check_password_expiration
skip_before_action :check_two_factor_requirement
skip_before_action :require_email
before_action :terms
layout 'terms'
......
......@@ -107,7 +107,13 @@ module Ci
end
def assign_to(project, current_user = nil)
if shared?
self.is_shared = false if shared?
self.runner_type = :project_type
elsif group_type?
raise ArgumentError, 'Transitioning a group runner to a project runner is not supported'
end
self.save
project.runner_projects.create(runner_id: self.id)
end
......
......@@ -22,7 +22,8 @@ module ShaAttribute
column = columns.find { |c| c.name == name.to_s }
unless column
raise ArgumentError.new("sha_attribute #{name.inspect} is invalid since the column doesn't exist")
warn "WARNING: sha_attribute #{name.inspect} is invalid since the column doesn't exist - you may need to run database migrations"
return
end
unless column.type == :binary
......
......@@ -20,10 +20,10 @@
= brand_header_logo
- logo_text = brand_header_logo_type
- if logo_text.present?
%span.logo-text.hidden-xs.prepend-left-8
%span.logo-text.prepend-left-8
= logo_text
- if header_link?(:user_dropdown)
.navbar-collapse.collapse
.navbar-collapse
%ul.nav.navbar-nav
%li.header-user.dropdown
= link_to current_user, class: user_dropdown_class, data: { toggle: "dropdown" } do
......
......@@ -9,85 +9,6 @@ module ObjectStorage
SanityCheckError = Class.new(StandardError)
class Upload < ActiveRecord::Base
# Upper limit for foreground checksum processing
CHECKSUM_THRESHOLD = 100.megabytes
belongs_to :model, polymorphic: true # rubocop:disable Cop/PolymorphicAssociations
validates :size, presence: true
validates :path, presence: true
validates :model, presence: true
validates :uploader, presence: true
before_save :calculate_checksum!, if: :foreground_checksummable?
after_commit :schedule_checksum, if: :checksummable?
scope :stored_locally, -> { where(store: [nil, ObjectStorage::Store::LOCAL]) }
scope :stored_remotely, -> { where(store: ObjectStorage::Store::REMOTE) }
def self.hexdigest(path)
Digest::SHA256.file(path).hexdigest
end
def absolute_path
raise ObjectStorage::RemoteStoreError, "Remote object has no absolute path." unless local?
return path unless relative_path?
uploader_class.absolute_path(self)
end
def calculate_checksum!
self.checksum = nil
return unless checksummable?
self.checksum = self.class.hexdigest(absolute_path)
end
def build_uploader(mounted_as = nil)
uploader_class.new(model, mounted_as).tap do |uploader|
uploader.upload = self
uploader.retrieve_from_store!(identifier)
end
end
def exist?
File.exist?(absolute_path)
end
def local?
return true if store.nil?
store == ObjectStorage::Store::LOCAL
end
private
def checksummable?
checksum.nil? && local? && exist?
end
def foreground_checksummable?
checksummable? && size <= CHECKSUM_THRESHOLD
end
def schedule_checksum
UploadChecksumWorker.perform_async(id)
end
def relative_path?
!path.start_with?('/')
end
def identifier
File.basename(path)
end
def uploader_class
Object.const_get(uploader)
end
end
class MigrationResult
attr_reader :upload
attr_accessor :error
......
---
title: 46210 Display logo and user dropdown on mobile for terms page and fix styling
merge_request:
author:
type: fixed
---
title: Fixes database inconsistencies between Community and Enterprise Edition on
import state
merge_request: 18811
author:
type: fixed
---
title: Fix finding wiki pages when they have invalidly-encoded content
merge_request: 18856
author:
type: fixed
---
title: Fix outdated Web IDE welcome copy
merge_request: 18861
author:
type: fixed
......@@ -7,6 +7,20 @@ module Gollum
end
require "gollum-lib"
module Gollum
class Page
def text_data(encoding = nil)
data = if raw_data.respond_to?(:encoding)
raw_data.force_encoding(encoding || Encoding::UTF_8)
else
raw_data
end
Gitlab::EncodingHelper.encode!(data)
end
end
end
Rails.application.configure do
config.after_initialize do
Gollum::Page.per_page = Kaminari.config.default_per_page
......
class AddNotNullConstraintToProjectMirrorDataForeignKey < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
class ProjectImportState < ActiveRecord::Base
include EachBatch
self.table_name = 'project_mirror_data'
end
def up
ProjectImportState.where(project_id: nil).delete_all
change_column_null :project_mirror_data, :project_id, false
end
def down
change_column_null :project_mirror_data, :project_id, true
end
end
class AddUniqueConstraintToProjectMirrorDataProjectIdIndex < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
def up
add_concurrent_index(:project_mirror_data,
:project_id,
unique: true,
name: 'index_project_mirror_data_on_project_id_unique')
remove_concurrent_index_by_name(:project_mirror_data, 'index_project_mirror_data_on_project_id')
rename_index(:project_mirror_data,
'index_project_mirror_data_on_project_id_unique',
'index_project_mirror_data_on_project_id')
end
def down
rename_index(:project_mirror_data,
'index_project_mirror_data_on_project_id',
'index_project_mirror_data_on_project_id_old')
add_concurrent_index(:project_mirror_data, :project_id)
remove_concurrent_index_by_name(:project_mirror_data,
'index_project_mirror_data_on_project_id_old')
end
end
......@@ -11,7 +11,7 @@
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 20180508055821) do
ActiveRecord::Schema.define(version: 20180508102840) do
# These are extensions that must be enabled in order to support this database
enable_extension "plpgsql"
......@@ -1529,14 +1529,14 @@ ActiveRecord::Schema.define(version: 20180508055821) do
add_index "project_import_data", ["project_id"], name: "index_project_import_data_on_project_id", using: :btree
create_table "project_mirror_data", force: :cascade do |t|
t.integer "project_id"
t.integer "project_id", null: false
t.string "status"
t.string "jid"
t.text "last_error"
end
add_index "project_mirror_data", ["jid"], name: "index_project_mirror_data_on_jid", using: :btree
add_index "project_mirror_data", ["project_id"], name: "index_project_mirror_data_on_project_id", using: :btree
add_index "project_mirror_data", ["project_id"], name: "index_project_mirror_data_on_project_id", unique: true, using: :btree
add_index "project_mirror_data", ["status"], name: "index_project_mirror_data_on_status", using: :btree
create_table "project_statistics", force: :cascade do |t|
......
......@@ -437,5 +437,107 @@ feature 'Login' do
expect(current_path).to eq(root_path)
end
context 'when 2FA is required for the user' do
before do
group = create(:group, require_two_factor_authentication: true)
group.add_developer(user)
end
context 'when the user did not enable 2FA' do
it 'asks to set 2FA before asking to accept the terms' do
visit new_user_session_path
fill_in 'user_login', with: user.email
fill_in 'user_password', with: '12345678'
click_button 'Sign in'
expect_to_be_on_terms_page
click_button 'Accept terms'
expect(current_path).to eq(profile_two_factor_auth_path)
fill_in 'pin_code', with: user.reload.current_otp
click_button 'Register with two-factor app'
click_link 'Proceed'
expect(current_path).to eq(profile_account_path)
end
end
context 'when the user already enabled 2FA' do
before do
user.update!(otp_required_for_login: true,
otp_secret: User.generate_otp_secret(32))
end
it 'asks the user to accept the terms' do
visit new_user_session_path
fill_in 'user_login', with: user.email
fill_in 'user_password', with: '12345678'
click_button 'Sign in'
fill_in 'user_otp_attempt', with: user.reload.current_otp
click_button 'Verify code'
expect_to_be_on_terms_page
click_button 'Accept terms'
expect(current_path).to eq(root_path)
end
end
end
context 'when the users password is expired' do
before do
user.update!(password_expires_at: Time.parse('2018-05-08 11:29:46 UTC'))
end
it 'asks the user to accept the terms before setting a new password' do
visit new_user_session_path
fill_in 'user_login', with: user.email
fill_in 'user_password', with: '12345678'
click_button 'Sign in'
expect_to_be_on_terms_page
click_button 'Accept terms'
expect(current_path).to eq(new_profile_password_path)
fill_in 'user_current_password', with: '12345678'
fill_in 'user_password', with: 'new password'
fill_in 'user_password_confirmation', with: 'new password'
click_button 'Set new password'
expect(page).to have_content('Password successfully changed')
end
end
context 'when the user does not have an email configured' do
let(:user) { create(:omniauth_user, extern_uid: 'my-uid', provider: 'saml', email: 'temp-email-for-oauth-user@gitlab.localhost') }
before do
stub_omniauth_saml_config(enabled: true, auto_link_saml_user: true, allow_single_sign_on: ['saml'], providers: [mock_saml_config])
end
it 'asks the user to accept the terms before setting an email' do
gitlab_sign_in_via('saml', user, 'my-uid')
expect_to_be_on_terms_page
click_button 'Accept terms'
expect(current_path).to eq(profile_path)
fill_in 'Email', with: 'hello@world.com'
click_button 'Update profile settings'
expect(page).to have_content('Profile was successfully updated')
end
end
end
end
......@@ -24,7 +24,7 @@ describe('RepoEditor', () => {
f.active = true;
f.tempFile = true;
vm.$store.state.openFiles.push(f);
vm.$store.state.entries[f.path] = f;
Vue.set(vm.$store.state.entries, f.path, f);
vm.monaco = true;
vm.$mount();
......@@ -215,6 +215,30 @@ describe('RepoEditor', () => {
expect(vm.editor.attachModel).toHaveBeenCalledWith(vm.model);
});
it('attaches model to merge request editor', () => {
vm.$store.state.viewer = 'mrdiff';
vm.file.mrChange = true;
spyOn(vm.editor, 'attachMergeRequestModel');
Editor.editorInstance.modelManager.dispose();
vm.setupEditor();
expect(vm.editor.attachMergeRequestModel).toHaveBeenCalledWith(vm.model);
});
it('does not attach model to merge request editor when not a MR change', () => {
vm.$store.state.viewer = 'mrdiff';
vm.file.mrChange = false;
spyOn(vm.editor, 'attachMergeRequestModel');
Editor.editorInstance.modelManager.dispose();
vm.setupEditor();
expect(vm.editor.attachMergeRequestModel).not.toHaveBeenCalledWith(vm.model);
});
it('adds callback methods', () => {
spyOn(vm.editor, 'onPositionChange').and.callThrough();
......
require 'spec_helper'
require Rails.root.join('db', 'migrate', '20180508100222_add_not_null_constraint_to_project_mirror_data_foreign_key.rb')
describe AddNotNullConstraintToProjectMirrorDataForeignKey, :migration do
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:import_state) { table(:project_mirror_data) }
before do
import_state.create!(id: 1, project_id: nil, status: :started)
end
it 'removes every import state without an associated project_id' do
expect do
subject.up
end.to change { import_state.count }.from(1).to(0)
end
end
......@@ -198,16 +198,30 @@ describe Ci::Runner do
end
describe '#assign_to' do
let!(:project) { FactoryBot.create :project }
let!(:shared_runner) { FactoryBot.create(:ci_runner, :shared) }
let!(:project) { FactoryBot.create(:project) }
before do
shared_runner.assign_to(project)
subject { runner.assign_to(project) }
context 'with shared runner' do
let!(:runner) { FactoryBot.create(:ci_runner, :shared) }
it 'transitions shared runner to project runner and assigns project' do
subject
expect(runner).to be_specific
expect(runner).to be_project_type
expect(runner.projects).to eq([project])
expect(runner.only_for?(project)).to be_truthy
end
end
context 'with group runner' do
let!(:runner) { FactoryBot.create(:ci_runner, runner_type: :group_type) }
it { expect(shared_runner).to be_specific }
it { expect(shared_runner.projects).to eq([project]) }
it { expect(shared_runner.only_for?(project)).to be_truthy }
it 'raises an error' do
expect { subject }
.to raise_error(ArgumentError, 'Transitioning a group runner to a project runner is not supported')
end
end
end
describe '.online' do
......
......@@ -36,24 +36,26 @@ describe ShaAttribute do
end
context 'when the table does not exist' do
it 'allows the attribute to be added' do
it 'allows the attribute to be added and issues a warning' do
allow(model).to receive(:table_exists?).and_return(false)
expect(model).not_to receive(:columns)
expect(model).to receive(:attribute)
expect(model).to receive(:warn)
model.sha_attribute(:name)
end
end
context 'when the column does not exist' do
it 'raises ArgumentError' do
it 'allows the attribute to be added and issues a warning' do
allow(model).to receive(:table_exists?).and_return(true)
expect(model).to receive(:columns)
expect(model).not_to receive(:attribute)
expect(model).to receive(:attribute)
expect(model).to receive(:warn)
expect { model.sha_attribute(:no_name) }.to raise_error(ArgumentError)
model.sha_attribute(:no_name)
end
end
......
......@@ -159,6 +159,17 @@ describe ProjectWiki do
expect(page.title).to eq("autre pagé")
end
end
context 'pages with invalidly-encoded content' do
before do
create_page("encoding is fun", "f\xFCr".b)
end
it "can find the page" do
page = subject.find_page("encoding is fun")
expect(page.content).to eq("fr")
end
end
end
context 'when Gitaly wiki_find_page is enabled' do
......
......@@ -7,16 +7,11 @@ describe ObjectStorage::MigrateUploadsWorker, :sidekiq do
end
end
let!(:projects) { create_list(:project, 10, :with_avatar) }
let(:uploads) { Upload.all }
let(:model_class) { Project }
let(:mounted_as) { :avatar }
let(:uploads) { Upload.all }
let(:to_store) { ObjectStorage::Store::REMOTE }
before do
stub_uploads_object_storage(AvatarUploader)
end
shared_examples "uploads migration worker" do
describe '.enqueue!' do
def enqueue!
described_class.enqueue!(uploads, Project, mounted_as, to_store)
......@@ -50,14 +45,18 @@ describe ObjectStorage::MigrateUploadsWorker, :sidekiq do
end
end
before do
stub_const("WrongModel", Class.new)
end
context 'uploader types mismatch' do
let!(:outlier) { create(:upload, uploader: 'FileUploader') }
let!(:outlier) { create(:upload, uploader: 'GitlabUploader') }
include_examples 'raises a SanityCheckError'
end
context 'model types mismatch' do
let!(:outlier) { create(:upload, model_type: 'Potato') }
let!(:outlier) { create(:upload, model_type: 'WrongModel') }
include_examples 'raises a SanityCheckError'
end
......@@ -101,19 +100,45 @@ describe ObjectStorage::MigrateUploadsWorker, :sidekiq do
it 'migrates files' do
perform
aggregate_failures do
projects.each do |project|
expect(project.reload.avatar.upload.local?).to be_falsey
end
end
expect(Upload.where(store: ObjectStorage::Store::LOCAL).count).to eq(0)
end
context 'migration is unsuccessful' do
before do
allow_any_instance_of(ObjectStorage::Concern).to receive(:migrate!).and_raise(CarrierWave::UploadError, "I am a teapot.")
allow_any_instance_of(ObjectStorage::Concern)
.to receive(:migrate!).and_raise(CarrierWave::UploadError, "I am a teapot.")
end
it_behaves_like 'outputs correctly', failures: 10
end
end
end
context "for AvatarUploader" do
let!(:projects) { create_list(:project, 10, :with_avatar) }
let(:mounted_as) { :avatar }
before do
stub_uploads_object_storage(AvatarUploader)
end
it_behaves_like "uploads migration worker"
end
context "for FileUploader" do
let!(:projects) { create_list(:project, 10) }
let(:secret) { SecureRandom.hex }
let(:mounted_as) { nil }
before do
stub_uploads_object_storage(FileUploader)
projects.map do |project|
uploader = FileUploader.new(project)
uploader.store!(fixture_file_upload('spec/fixtures/doc_sample.txt'))
end
end
it_behaves_like "uploads migration worker"
end
end
......@@ -12,8 +12,10 @@
# AUTO_DEVOPS_DOMAIN must also be set as a variable at the group or project
# level, or manually added below.
#
# If you want to deploy to staging first, or enable canary deploys,
# uncomment the relevant jobs in the pipeline below.
# Continuous deployment to production is enabled by default.
# If you want to deploy to staging first, or enable incremental rollouts,
# set STAGING_ENABLED or INCREMENTAL_ROLLOUT_ENABLED environment variables.
# If you want to use canary deployments, uncomment the canary job.
#
# If Auto DevOps fails to detect the proper buildpack, or if you want to
# specify a custom buildpack, set a project variable `BUILDPACK_URL` to the
......@@ -88,14 +90,6 @@ codequality:
artifacts:
paths: [codeclimate.json]
license_management:
image: registry.gitlab.com/gitlab-org/security-products/license-management:latest
allow_failure: true
script:
- license_management
artifacts:
paths: [gl-license-report.json]
performance:
stage: performance
image: docker:stable
......@@ -223,8 +217,8 @@ stop_review:
# Staging deploys are disabled by default since
# continuous deployment to production is enabled by default
# If you prefer to automatically deploy to staging and
# only manually promote to production, enable this job by removing the dot (.),
# and uncomment the `when: manual` line in the `production` job.
# only manually promote to production, enable this job by setting
# STAGING_ENABLED.
staging:
stage: staging
......@@ -245,13 +239,9 @@ staging:
kubernetes: active
variables:
- $STAGING_ENABLED
except:
variables:
- $INCREMENTAL_ROLLOUT_ENABLED
# Canaries are disabled by default, but if you want them,
# and know what the downsides are, enable this job by removing the dot (.),
# and uncomment the `when: manual` line in the `production` job.
# and know what the downsides are, enable this job by removing the dot (.).
.canary:
stage: canary
......@@ -272,11 +262,6 @@ staging:
- master
kubernetes: active
# This job continuously deploys to production on every push to `master`.
# To make this a manual process, either because you're enabling `staging`
# or `canary` deploys, or you simply want more control over when you deploy
# to production, uncomment the `when: manual` line in the `production` job.
.production: &production_template
stage: production
script:
......@@ -310,6 +295,7 @@ production:
production_manual:
<<: *production_template
when: manual
allow_failure: false
only:
refs:
- master
......@@ -345,6 +331,7 @@ rollout 10%:
<<: *rollout_template
variables:
ROLLOUT_PERCENTAGE: 10
when: manual
only:
refs:
- master
......@@ -379,6 +366,7 @@ rollout 50%:
rollout 100%:
<<: *production_template
when: manual
allow_failure: false
only:
refs:
- master
......@@ -428,14 +416,6 @@ rollout 100%:
"registry.gitlab.com/gitlab-org/security-products/codequality:$SP_VERSION" /code
}
function license_management() {
if echo $GITLAB_FEATURES |grep license_management > /dev/null ; then
/run.sh .
else
echo "License management is not available in your subscription"
fi
}
function sast() {
case "$CI_SERVER_VERSION" in
*-ee)
......@@ -562,12 +542,14 @@ rollout 100%:
replicas=$(get_replicas "$track" "$percentage")
if [[ -n "$(helm ls -q "^$name$")" ]]; then
helm upgrade --reuse-values \
--wait \
--set replicaCount="$replicas" \
--namespace="$KUBE_NAMESPACE" \
"$name" \
chart/
fi
}
function install_dependencies() {
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment