BigW Consortium Gitlab

Commit 0d458b96 by Micaël Bergeron

remove geo specific code

parent 0e732fa4
class Projects::JobsController < Projects::ApplicationController
prepend EE::Projects::JobsController
before_action :build, except: [:index, :cancel_all]
before_action :authorize_read_build!,
......@@ -119,11 +117,17 @@ class Projects::JobsController < Projects::ApplicationController
end
def raw
build.trace.read do |stream|
if stream.file?
send_file stream.path, type: 'text/plain; charset=utf-8', disposition: 'inline'
else
render_404
if trace_artifact_file
send_upload(trace_artifact_file,
send_params: raw_send_params,
redirect_params: raw_redirect_params)
else
build.trace.read do |stream|
if stream.file?
send_file stream.path, type: 'text/plain; charset=utf-8', disposition: 'inline'
else
render_404
end
end
end
end
......@@ -138,9 +142,21 @@ class Projects::JobsController < Projects::ApplicationController
return access_denied! unless can?(current_user, :erase_build, build)
end
def raw_send_params
{ type: 'text/plain; charset=utf-8', disposition: 'inline' }
end
def raw_redirect_params
{ query: { 'response-content-type' => 'text/plain; charset=utf-8', 'response-content-disposition' => 'inline' } }
end
def trace_artifact_file
@trace_artifact_file ||= build.job_artifacts_trace&.file
end
def build
@build ||= project.builds.find(params[:id])
.present(current_user: current_user)
.present(current_user: current_user)
end
def build_path(build)
......
......@@ -15,7 +15,7 @@ class LfsObject < ActiveRecord::Base
after_save if: :file_changed?, on: [:create, :update] do
run_after_commit do
file.schedule_migration_to_object_storage
file.schedule_background_upload
end
end
......
class JobArtifactUploader < GitlabUploader
prepend EE::JobArtifactUploader
extend Workhorse::UploadPath
include ObjectStorage::Concern
......
......@@ -11,7 +11,7 @@
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 20180213131630) do
ActiveRecord::Schema.define(version: 20180216121030) do
# These are extensions that must be enabled in order to support this database
enable_extension "plpgsql"
......
require 'spec_helper'
describe Gitlab::Geo::FileTransfer do
let(:user) { create(:user, avatar: fixture_file_upload(Rails.root + 'spec/fixtures/dk.png', 'image/png')) }
let(:upload) { Upload.find_by(model: user, uploader: 'AvatarUploader') }
subject { described_class.new(:file, upload) }
describe '#execute' do
context 'user avatar' do
it 'sets an absolute path' do
expect(subject.file_type).to eq(:file)
expect(subject.file_id).to eq(upload.id)
expect(subject.filename).to eq(upload.absolute_path)
expect(Pathname.new(subject.filename).absolute?).to be_truthy
expect(subject.request_data).to eq({ id: upload.model_id,
type: 'User',
checksum: upload.checksum })
end
end
end
end
require 'spec_helper'
describe Geo::FileDownloadService do
include ::EE::GeoHelpers
set(:primary) { create(:geo_node, :primary) }
set(:secondary) { create(:geo_node) }
before do
stub_current_geo_node(secondary)
allow_any_instance_of(Gitlab::ExclusiveLease).to receive(:try_obtain).and_return(true)
end
describe '#execute' do
context 'user avatar' do
let(:user) { create(:user, avatar: fixture_file_upload(Rails.root + 'spec/fixtures/dk.png', 'image/png')) }
let(:upload) { Upload.find_by(model: user, uploader: 'AvatarUploader') }
subject(:execute!) { described_class.new(:avatar, upload.id).execute }
it 'downloads a user avatar' do
stub_transfer(Gitlab::Geo::FileTransfer, 100)
expect { execute! }.to change { Geo::FileRegistry.synced.count }.by(1)
end
it 'registers when the download fails' do
stub_transfer(Gitlab::Geo::FileTransfer, -1)
expect { execute! }.to change { Geo::FileRegistry.failed.count }.by(1)
expect(Geo::FileRegistry.last.retry_count).to eq(1)
expect(Geo::FileRegistry.last.retry_at).to be_present
end
it 'registers when the download fails with some other error' do
stub_transfer(Gitlab::Geo::FileTransfer, nil)
expect { execute! }.to change { Geo::FileRegistry.failed.count }.by(1)
end
end
context 'group avatar' do
let(:group) { create(:group, avatar: fixture_file_upload(Rails.root + 'spec/fixtures/dk.png', 'image/png')) }
let(:upload) { Upload.find_by(model: group, uploader: 'AvatarUploader') }
subject(:execute!) { described_class.new(:avatar, upload.id).execute }
it 'downloads a group avatar' do
stub_transfer(Gitlab::Geo::FileTransfer, 100)
expect { execute! }.to change { Geo::FileRegistry.synced.count }.by(1)
end
it 'registers when the download fails' do
stub_transfer(Gitlab::Geo::FileTransfer, -1)
expect { execute! }.to change { Geo::FileRegistry.failed.count }.by(1)
end
end
context 'project avatar' do
let(:project) { create(:project, avatar: fixture_file_upload(Rails.root + 'spec/fixtures/dk.png', 'image/png')) }
let(:upload) { Upload.find_by(model: project, uploader: 'AvatarUploader') }
subject(:execute!) { described_class.new(:avatar, upload.id).execute }
it 'downloads a project avatar' do
stub_transfer(Gitlab::Geo::FileTransfer, 100)
expect { execute! }.to change { Geo::FileRegistry.synced.count }.by(1)
end
it 'registers when the download fails' do
stub_transfer(Gitlab::Geo::FileTransfer, -1)
expect { execute! }.to change { Geo::FileRegistry.failed.count }.by(1)
end
end
context 'with an attachment' do
let(:note) { create(:note, :with_attachment) }
let(:upload) { Upload.find_by(model: note, uploader: 'AttachmentUploader') }
subject(:execute!) { described_class.new(:attachment, upload.id).execute }
it 'downloads the attachment' do
stub_transfer(Gitlab::Geo::FileTransfer, 100)
expect { execute! }.to change { Geo::FileRegistry.synced.count }.by(1)
end
it 'registers when the download fails' do
stub_transfer(Gitlab::Geo::FileTransfer, -1)
expect { execute! }.to change { Geo::FileRegistry.failed.count }.by(1)
end
end
context 'with a snippet' do
let(:upload) { create(:upload, :personal_snippet_upload) }
subject(:execute!) { described_class.new(:personal_file, upload.id).execute }
it 'downloads the file' do
stub_transfer(Gitlab::Geo::FileTransfer, 100)
expect { execute! }.to change { Geo::FileRegistry.synced.count }.by(1)
end
it 'registers when the download fails' do
stub_transfer(Gitlab::Geo::FileTransfer, -1)
expect { execute! }.to change { Geo::FileRegistry.failed.count }.by(1)
end
end
context 'with file upload' do
let(:project) { create(:project) }
let(:upload) { Upload.find_by(model: project, uploader: 'FileUploader') }
subject { described_class.new(:file, upload.id) }
before do
FileUploader.new(project).store!(fixture_file_upload(Rails.root + 'spec/fixtures/dk.png', 'image/png'))
end
it 'downloads the file' do
stub_transfer(Gitlab::Geo::FileTransfer, 100)
expect { subject.execute }.to change { Geo::FileRegistry.synced.count }.by(1)
end
it 'registers when the download fails' do
stub_transfer(Gitlab::Geo::FileTransfer, -1)
expect { subject.execute }.to change { Geo::FileRegistry.failed.count }.by(1)
end
end
context 'with namespace file upload' do
let(:group) { create(:group) }
let(:upload) { Upload.find_by(model: group, uploader: 'NamespaceFileUploader') }
subject { described_class.new(:file, upload.id) }
before do
NamespaceFileUploader.new(group).store!(fixture_file_upload(Rails.root + 'spec/fixtures/dk.png', 'image/png'))
end
it 'downloads the file' do
stub_transfer(Gitlab::Geo::FileTransfer, 100)
expect { subject.execute }.to change { Geo::FileRegistry.synced.count }.by(1)
end
it 'registers when the download fails' do
stub_transfer(Gitlab::Geo::FileTransfer, -1)
expect { subject.execute }.to change { Geo::FileRegistry.failed.count }.by(1)
end
end
context 'LFS object' do
let(:lfs_object) { create(:lfs_object) }
subject { described_class.new(:lfs, lfs_object.id) }
it 'downloads an LFS object' do
stub_transfer(Gitlab::Geo::LfsTransfer, 100)
expect { subject.execute }.to change { Geo::FileRegistry.synced.count }.by(1)
end
it 'registers when the download fails' do
stub_transfer(Gitlab::Geo::LfsTransfer, -1)
expect { subject.execute }.to change { Geo::FileRegistry.failed.count }.by(1)
end
it 'logs a message' do
stub_transfer(Gitlab::Geo::LfsTransfer, 100)
expect(Gitlab::Geo::Logger).to receive(:info).with(hash_including(:message, :download_time_s, success: true, bytes_downloaded: 100)).and_call_original
subject.execute
end
end
context 'job artifacts' do
let(:job_artifact) { create(:ci_job_artifact) }
subject { described_class.new(:job_artifact, job_artifact.id) }
it 'downloads a job artifact' do
stub_transfer(Gitlab::Geo::JobArtifactTransfer, 100)
expect { subject.execute }.to change { Geo::FileRegistry.synced.count }.by(1)
end
it 'registers when the download fails' do
stub_transfer(Gitlab::Geo::JobArtifactTransfer, -1)
expect { subject.execute }.to change { Geo::FileRegistry.failed.count }.by(1)
end
it 'logs a message' do
stub_transfer(Gitlab::Geo::JobArtifactTransfer, 100)
expect(Gitlab::Geo::Logger).to receive(:info).with(hash_including(:message, :download_time_s, success: true, bytes_downloaded: 100)).and_call_original
subject.execute
end
end
context 'bad object type' do
it 'raises an error' do
expect { described_class.new(:bad, 1).execute }.to raise_error(NameError)
end
end
def stub_transfer(kls, result)
instance = double("(instance of #{kls})", download_from_primary: result)
allow(kls).to receive(:new).and_return(instance)
end
end
end
require 'spec_helper'
# Disable transactions via :delete method because a foreign table
# can't see changes inside a transaction of a different connection.
describe Geo::FilesExpireService, :geo, :delete do
let(:project) { create(:project) }
let!(:old_full_path) { project.full_path }
subject { described_class.new(project, old_full_path) }
describe '#execute' do
let(:file_uploader) { build(:file_uploader, project: project) }
let!(:upload) { Upload.find_by(path: file_uploader.upload_path) }
let!(:file_registry) { create(:geo_file_registry, file_id: upload.id) }
before do
project.update(path: "#{project.path}_renamed")
end
context 'when in Geo secondary node' do
before do
allow(Gitlab::Geo).to receive(:secondary?) { true }
end
it 'remove file from disk' do
file_path = File.join(subject.base_dir, upload.path)
expect(File.exist?(file_path)).to be_truthy
Sidekiq::Testing.inline! { subject.execute }
expect(File.exist?(file_path)).to be_falsey
end
it 'removes file_registry associates with upload' do
expect(file_registry.success).to be_truthy
subject.execute
expect { file_registry.reload }.to raise_error(ActiveRecord::RecordNotFound)
end
end
context 'when not in Geo secondary node' do
it 'no-op execute action' do
expect(subject).not_to receive(:schedule_file_removal)
expect(subject).not_to receive(:mark_for_resync!)
subject.execute
end
end
end
end
require 'spec_helper'
def base_path(storage)
File.join(FileUploader.root, storage.disk_path)
end
describe Geo::HashedStorageAttachmentsMigrationService do
let!(:project) { create(:project) }
let(:legacy_storage) { Storage::LegacyProject.new(project) }
let(:hashed_storage) { Storage::HashedProject.new(project) }
let!(:upload) { Upload.find_by(path: file_uploader.upload_path) }
let(:file_uploader) { build(:file_uploader, project: project) }
let(:old_path) { File.join(base_path(legacy_storage), upload.path) }
let(:new_path) { File.join(base_path(hashed_storage), upload.path) }
subject(:service) do
described_class.new(project.id,
old_attachments_path: legacy_storage.disk_path,
new_attachments_path: hashed_storage.disk_path)
end
describe '#execute' do
context 'when succeeds' do
it 'moves attachments to hashed storage layout' do
expect(File.file?(old_path)).to be_truthy
expect(File.file?(new_path)).to be_falsey
expect(File.exist?(base_path(legacy_storage))).to be_truthy
expect(File.exist?(base_path(hashed_storage))).to be_falsey
expect(FileUtils).to receive(:mv).with(base_path(legacy_storage), base_path(hashed_storage)).and_call_original
service.execute
expect(File.exist?(base_path(hashed_storage))).to be_truthy
expect(File.exist?(base_path(legacy_storage))).to be_falsey
expect(File.file?(old_path)).to be_falsey
expect(File.file?(new_path)).to be_truthy
end
end
context 'when original folder does not exist anymore' do
before do
FileUtils.rm_rf(base_path(legacy_storage))
end
it 'skips moving folders and go to next' do
expect(FileUtils).not_to receive(:mv).with(base_path(legacy_storage), base_path(hashed_storage))
service.execute
expect(File.exist?(base_path(hashed_storage))).to be_falsey
expect(File.file?(new_path)).to be_falsey
end
end
context 'when target folder already exists' do
before do
FileUtils.mkdir_p(base_path(hashed_storage))
end
it 'raises AttachmentMigrationError' do
expect(FileUtils).not_to receive(:mv).with(base_path(legacy_storage), base_path(hashed_storage))
expect { service.execute }.to raise_error(::Geo::AttachmentMigrationError)
end
end
end
describe '#async_execute' do
it 'starts the worker' do
expect(Geo::HashedStorageAttachmentsMigrationWorker).to receive(:perform_async)
service.async_execute
end
it 'returns job id' do
allow(Geo::HashedStorageAttachmentsMigrationWorker).to receive(:perform_async).and_return('foo')
expect(service.async_execute).to eq('foo')
end
end
end
FactoryBot.define do
factory :geo_event_log, class: Geo::EventLog do
trait :created_event do
repository_created_event factory: :geo_repository_created_event
end
trait :updated_event do
repository_updated_event factory: :geo_repository_updated_event
end
trait :deleted_event do
repository_deleted_event factory: :geo_repository_deleted_event
end
trait :renamed_event do
repository_renamed_event factory: :geo_repository_renamed_event
end
trait :hashed_storage_migration_event do
hashed_storage_migrated_event factory: :geo_hashed_storage_migrated_event
end
trait :hashed_storage_attachments_event do
hashed_storage_attachments_event factory: :geo_hashed_storage_attachments_event
end
trait :lfs_object_deleted_event do
lfs_object_deleted_event factory: :geo_lfs_object_deleted_event
end
trait :job_artifact_deleted_event do
job_artifact_deleted_event factory: :geo_job_artifact_deleted_event
end
end
factory :geo_repository_created_event, class: Geo::RepositoryCreatedEvent do
project
repository_storage_name { project.repository_storage }
repository_storage_path { project.repository_storage_path }
add_attribute(:repo_path) { project.disk_path }
project_name { project.name }
wiki_path { project.wiki.disk_path }
end
factory :geo_repository_updated_event, class: Geo::RepositoryUpdatedEvent do
project
source 0
branches_affected 0
tags_affected 0
end
factory :geo_repository_deleted_event, class: Geo::RepositoryDeletedEvent do
project
repository_storage_name { project.repository_storage }
repository_storage_path { project.repository_storage_path }
deleted_path { project.path_with_namespace }
deleted_project_name { project.name }
end
factory :geo_repositories_changed_event, class: Geo::RepositoriesChangedEvent do
geo_node
end
factory :geo_repository_renamed_event, class: Geo::RepositoryRenamedEvent do
project { create(:project, :repository) }
repository_storage_name { project.repository_storage }
repository_storage_path { project.repository_storage_path }
old_path_with_namespace { project.path_with_namespace }
new_path_with_namespace { project.path_with_namespace + '_new' }
old_wiki_path_with_namespace { project.wiki.path_with_namespace }
new_wiki_path_with_namespace { project.wiki.path_with_namespace + '_new' }
old_path { project.path }
new_path { project.path + '_new' }
end
factory :geo_hashed_storage_migrated_event, class: Geo::HashedStorageMigratedEvent do
project { create(:project, :repository) }
repository_storage_name { project.repository_storage }
repository_storage_path { project.repository_storage_path }
old_disk_path { project.path_with_namespace }
new_disk_path { project.path_with_namespace + '_new' }
old_wiki_disk_path { project.wiki.path_with_namespace }
new_wiki_disk_path { project.wiki.path_with_namespace + '_new' }
new_storage_version { Project::HASHED_STORAGE_FEATURES[:repository] }
end
factory :geo_hashed_storage_attachments_event, class: Geo::HashedStorageAttachmentsEvent do
project { create(:project, :repository) }
old_attachments_path { Storage::LegacyProject.new(project).disk_path }
new_attachments_path { Storage::HashedProject.new(project).disk_path }
end
factory :geo_lfs_object_deleted_event, class: Geo::LfsObjectDeletedEvent do
lfs_object { create(:lfs_object, :with_file) }
after(:build, :stub) do |event, _|
local_store_path = Pathname.new(LfsObjectUploader.root)
relative_path = Pathname.new(event.lfs_object.file.path).relative_path_from(local_store_path)
event.oid = event.lfs_object.oid
event.file_path = relative_path
end
end
factory :geo_job_artifact_deleted_event, class: Geo::JobArtifactDeletedEvent do
job_artifact { create(:ci_job_artifact, :archive) }
after(:build, :stub) do |event, _|
local_store_path = Pathname.new(JobArtifactUploader.root)
relative_path = Pathname.new(event.job_artifact.file.path).relative_path_from(local_store_path)
event.file_path = relative_path
end
end
end
......@@ -3,7 +3,6 @@ FactoryBot.define do
model { build(:project) }
size 100.kilobytes
uploader "AvatarUploader"
store ObjectStorage::Store::LOCAL
mount_point :avatar
secret nil
store ObjectStorage::Store::LOCAL
......
......@@ -18,7 +18,7 @@ describe Ci::JobArtifact do
describe 'callbacks' do
subject { create(:ci_job_artifact, :archive) }
describe '#schedule_migration_to_object_storage' do
describe '#schedule_background_upload' do
context 'when object storage is disabled' do
before do
stub_artifacts_object_storage(enabled: false)
......
shared_examples "matches the method pattern" do |method|
let(:target) { subject }
let(:args) { nil }
let(:pattern) { patterns[method] }
it do
return skip "No pattern provided, skipping." unless pattern
expect(target.method(method).call(*args)).to match(pattern)
end
end
shared_examples "builds correct paths" do |**patterns|
let(:patterns) { patterns }
before do
allow(subject).to receive(:filename).and_return('<filename>')
end
describe "#store_dir" do
it_behaves_like "matches the method pattern", :store_dir
end
describe "#cache_dir" do
it_behaves_like "matches the method pattern", :cache_dir
end
describe "#work_dir" do
it_behaves_like "matches the method pattern", :work_dir
end
describe "#upload_path" do
it_behaves_like "matches the method pattern", :upload_path
end
describe ".absolute_path" do
it_behaves_like "matches the method pattern", :absolute_path do
let(:target) { subject.class }
let(:args) { [upload] }
end
end
describe ".base_dir" do
it_behaves_like "matches the method pattern", :base_dir do
let(:target) { subject.class }
end
end
end
......@@ -20,7 +20,7 @@ describe ObjectStorageUploadWorker do
stub_lfs_object_storage(background_upload: true)
end
it 'uploads object to storage' do
it 'uploads object to storage' d
expect { perform }.to change { lfs_object.reload.file_store }.from(local).to(remote)
end
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment