Read about our upcoming Code of Conduct on this issue

This instance was upgraded to Heptapod 0.28.0 today

Commit 120f9aba authored by Kamil Trzcinski's avatar Kamil Trzcinski Committed by James Edwards-Jones
Browse files

Add GitLab Pages

- The pages are created when build artifacts for `pages` job are uploaded
- Pages serve the content under: http://group.pages.domain.com/project
- Pages can be used to serve the group page, special project named as host: group.pages.domain.com
- User can provide own 403 and 404 error pages by creating 403.html and 404.html in group page project
- Pages can be explicitly removed from the project by clicking Remove Pages in Project Settings
- The size of pages is limited by Application Setting: max pages size, which limits the maximum size of unpacked archive (default: 100MB)
- The public/ is extracted from artifacts and content is served as static pages
- Pages asynchronous worker use `dd` to limit the unpacked tar size
- Pages needs to be explicitly enabled and domain needs to be specified in gitlab.yml
- Pages are part of backups
- Pages notify the deployment status using Commit Status API
- Pages use a new sidekiq queue: pages
- Pages use a separate nginx config which needs to be explicitly added
parent 65118f2b3924
require 'spec_helper'
describe UpdatePagesService, services: true do
let(:build) { create(:ci_build) }
let(:data) { Gitlab::BuildDataBuilder.build(build) }
let(:service) { UpdatePagesService.new(data) }
context 'execute asynchronously for pages job' do
before { build.name = 'pages' }
context 'on success' do
before { build.success }
it 'should execute worker' do
expect(PagesWorker).to receive(:perform_async)
service.execute
end
end
%w(pending running failed canceled).each do |status|
context "on #{status}" do
before { build.status = status }
it 'should not execute worker' do
expect(PagesWorker).to_not receive(:perform_async)
service.execute
end
end
end
end
context 'for other jobs' do
before do
build.name = 'other job'
build.success
end
it 'should not execute worker' do
expect(PagesWorker).to_not receive(:perform_async)
service.execute
end
end
end
......@@ -28,7 +28,7 @@ def run_rake_task(task_name)
end
def reenable_backup_sub_tasks
%w{db repo uploads builds artifacts lfs registry}.each do |subtask|
%w{db repo uploads builds artifacts pages lfs registry}.each do |subtask|
Rake::Task["gitlab:backup:#{subtask}:create"].reenable
end
end
......@@ -71,6 +71,7 @@ def reenable_backup_sub_tasks
expect(Rake::Task['gitlab:backup:builds:restore']).to receive(:invoke)
expect(Rake::Task['gitlab:backup:uploads:restore']).to receive(:invoke)
expect(Rake::Task['gitlab:backup:artifacts:restore']).to receive(:invoke)
expect(Rake::Task['gitlab:backup:pages:restore']).to receive(:invoke)
expect(Rake::Task['gitlab:backup:lfs:restore']).to receive(:invoke)
expect(Rake::Task['gitlab:backup:registry:restore']).to receive(:invoke)
expect(Rake::Task['gitlab:shell:setup']).to receive(:invoke)
......@@ -202,7 +203,7 @@ def restore_backup
it 'sets correct permissions on the tar contents' do
tar_contents, exit_status = Gitlab::Popen.popen(
%W{tar -tvf #{@backup_tar} db uploads.tar.gz repositories builds.tar.gz artifacts.tar.gz lfs.tar.gz registry.tar.gz}
%W{tar -tvf #{@backup_tar} db uploads.tar.gz repositories builds.tar.gz artifacts.tar.gz pages.tar.gz lfs.tar.gz registry.tar.gz}
)
expect(exit_status).to eq(0)
expect(tar_contents).to match('db/')
......@@ -210,14 +211,15 @@ def restore_backup
expect(tar_contents).to match('repositories/')
expect(tar_contents).to match('builds.tar.gz')
expect(tar_contents).to match('artifacts.tar.gz')
expect(tar_contents).to match('pages.tar.gz')
expect(tar_contents).to match('lfs.tar.gz')
expect(tar_contents).to match('registry.tar.gz')
expect(tar_contents).not_to match(/^.{4,9}[rwx].* (database.sql.gz|uploads.tar.gz|repositories|builds.tar.gz|artifacts.tar.gz|registry.tar.gz)\/$/)
expect(tar_contents).not_to match(/^.{4,9}[rwx].* (database.sql.gz|uploads.tar.gz|repositories|builds.tar.gz|pages.tar.gz|artifacts.tar.gz|registry.tar.gz)\/$/)
end
it 'deletes temp directories' do
temp_dirs = Dir.glob(
File.join(Gitlab.config.backup.path, '{db,repositories,uploads,builds,artifacts,lfs,registry}')
File.join(Gitlab.config.backup.path, '{db,repositories,uploads,builds,artifacts,pages,lfs,registry}')
)
expect(temp_dirs).to be_empty
......@@ -304,7 +306,7 @@ def tars_glob
it "does not contain skipped item" do
tar_contents, _exit_status = Gitlab::Popen.popen(
%W{tar -tvf #{@backup_tar} db uploads.tar.gz repositories builds.tar.gz artifacts.tar.gz lfs.tar.gz registry.tar.gz}
%W{tar -tvf #{@backup_tar} db uploads.tar.gz repositories builds.tar.gz artifacts.tar.gz pages.tar.gz lfs.tar.gz registry.tar.gz}
)
expect(tar_contents).to match('db/')
......@@ -312,6 +314,7 @@ def tars_glob
expect(tar_contents).to match('builds.tar.gz')
expect(tar_contents).to match('artifacts.tar.gz')
expect(tar_contents).to match('lfs.tar.gz')
expect(tar_contents).to match('pages.tar.gz')
expect(tar_contents).to match('registry.tar.gz')
expect(tar_contents).not_to match('repositories/')
end
......@@ -327,6 +330,7 @@ def tars_glob
expect(Rake::Task['gitlab:backup:uploads:restore']).not_to receive :invoke
expect(Rake::Task['gitlab:backup:builds:restore']).to receive :invoke
expect(Rake::Task['gitlab:backup:artifacts:restore']).to receive :invoke
expect(Rake::Task['gitlab:backup:pages:restore']).to receive :invoke
expect(Rake::Task['gitlab:backup:lfs:restore']).to receive :invoke
expect(Rake::Task['gitlab:backup:registry:restore']).to receive :invoke
expect(Rake::Task['gitlab:shell:setup']).to receive :invoke
......
require "spec_helper"
describe PagesWorker do
let(:project) { create :project }
let(:commit) { create :ci_commit, project: project, sha: project.commit('HEAD').sha }
let(:build) { create :ci_build, commit: commit, ref: 'HEAD' }
let(:worker) { PagesWorker.new }
let(:file) { fixture_file_upload(Rails.root + 'spec/fixtures/pages.tar.gz', 'application/octet-stream') }
let(:empty_file) { fixture_file_upload(Rails.root + 'spec/fixtures/pages_empty.tar.gz', 'application/octet-stream') }
let(:invalid_file) { fixture_file_upload(Rails.root + 'spec/fixtures/dk.png', 'application/octet-stream') }
before do
project.remove_pages
end
context 'for valid file' do
before { build.update_attributes(artifacts_file: file) }
it 'succeeds' do
expect(project.pages_url).to be_nil
expect(worker.perform(build.id)).to be_truthy
expect(project.pages_url).to_not be_nil
end
it 'limits pages size' do
stub_application_setting(max_pages_size: 1)
expect(worker.perform(build.id)).to_not be_truthy
end
it 'removes pages after destroy' do
expect(project.pages_url).to be_nil
expect(worker.perform(build.id)).to be_truthy
expect(project.pages_url).to_not be_nil
project.destroy
expect(Dir.exist?(project.public_pages_path)).to be_falsey
end
end
it 'fails if no artifacts' do
expect(worker.perform(build.id)).to_not be_truthy
end
it 'fails for empty file fails' do
build.update_attributes(artifacts_file: empty_file)
expect(worker.perform(build.id)).to_not be_truthy
end
it 'fails for invalid archive' do
build.update_attributes(artifacts_file: invalid_file)
expect(worker.perform(build.id)).to_not be_truthy
end
it 'fails if sha on branch is not latest' do
commit.update_attributes(sha: 'old_sha')
build.update_attributes(artifacts_file: file)
expect(worker.perform(build.id)).to_not be_truthy
end
end
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment