Commit 80f4638a authored by GitLab Bot's avatar GitLab Bot
Browse files

Add latest changes from gitlab-org/gitlab@master

parent 9c3f02fe4fe6
......@@ -40,7 +40,10 @@ Attach the screenshot and HTML snapshot of the page from the job's artifacts:
/due in 2 weeks
<!-- Base labels. -->
/label ~Quality ~QA ~bug ~S1
/label ~Quality ~QA ~test
<!-- Test failure type label, please use just one.-->
/label ~"failure::broken-test" ~"failure::flaky-test" ~"failure::stale-test" ~"failure::test-environment" ~"failure::investigating"
<!--
Choose the stage that appears in the test path, e.g. ~"devops::create" for
......
import Vue from 'vue';
import pdfLab from '../../pdf/index.vue';
import { GlLoadingIcon } from '@gitlab/ui';
import PdfViewer from './pdf_viewer.vue';
export default () => {
const el = document.getElementById('js-pdf-viewer');
return new Vue({
el,
components: {
pdfLab,
GlLoadingIcon,
render(createElement) {
return createElement(PdfViewer, {
props: {
pdf: el.dataset.endpoint,
},
});
},
data() {
return {
error: false,
loadError: false,
loading: true,
pdf: el.dataset.endpoint,
};
},
methods: {
onLoad() {
this.loading = false;
},
onError(error) {
this.loading = false;
this.loadError = true;
this.error = error;
},
},
template: `
<div class="js-pdf-viewer container-fluid md prepend-top-default append-bottom-default">
<div
class="text-center loading"
v-if="loading && !error">
<gl-loading-icon class="mt-5" size="lg"/>
</div>
<pdf-lab
v-if="!loadError"
:pdf="pdf"
@pdflabload="onLoad"
@pdflaberror="onError" />
<p
class="text-center"
v-if="error">
<span v-if="loadError">
An error occurred while loading the file. Please try again later.
</span>
<span v-else>
An error occurred while decoding the file.
</span>
</p>
</div>
`,
});
};
<script>
import PdfLab from '../../pdf/index.vue';
import { GlLoadingIcon } from '@gitlab/ui';
export default {
components: {
PdfLab,
GlLoadingIcon,
},
props: {
pdf: {
type: String,
required: true,
},
},
data() {
return {
error: false,
loadError: false,
loading: true,
};
},
methods: {
onLoad() {
this.loading = false;
},
onError(error) {
this.loading = false;
this.loadError = true;
this.error = error;
},
},
};
</script>
<template>
<div class="js-pdf-viewer container-fluid md prepend-top-default append-bottom-default">
<div v-if="loading && !error" class="text-center loading">
<gl-loading-icon class="mt-5" size="lg" />
</div>
<pdf-lab v-if="!loadError" :pdf="pdf" @pdflabload="onLoad" @pdflaberror="onError" />
<p v-if="error" class="text-center">
<span v-if="loadError" ref="loadError">
{{ __('An error occurred while loading the file. Please try again later.') }}
</span>
<span v-else>{{ __('An error occurred while decoding the file.') }}</span>
</p>
</div>
</template>
......@@ -8,6 +8,7 @@ import axios from './axios_utils';
import { getLocationHash } from './url_utility';
import { convertToCamelCase, convertToSnakeCase } from './text_utility';
import { isObject } from './type_utility';
import { isFunction } from 'lodash';
export const getPagePath = (index = 0) => {
const page = $('body').attr('data-page') || '';
......@@ -667,30 +668,34 @@ export const spriteIcon = (icon, className = '') => {
};
/**
* This method takes in object with snake_case property names
* and returns a new object with camelCase property names
*
* Reasoning for this method is to ensure consistent property
* naming conventions across JS code.
* @callback ConversionFunction
* @param {string} prop
*/
/**
* This function takes a conversion function as the first parameter
* and applies this function to each prop in the provided object.
*
* This method also supports additional params in `options` object
*
* @param {ConversionFunction} conversionFunction - Function to apply to each prop of the object.
* @param {Object} obj - Object to be converted.
* @param {Object} options - Object containing additional options.
* @param {boolean} options.deep - FLag to allow deep object converting
* @param {Array[]} dropKeys - List of properties to discard while building new object
* @param {Array[]} ignoreKeyNames - List of properties to leave intact (as snake_case) while building new object
* @param {Array[]} options.dropKeys - List of properties to discard while building new object
* @param {Array[]} options.ignoreKeyNames - List of properties to leave intact (as snake_case) while building new object
*/
export const convertObjectPropsToCamelCase = (obj = {}, options = {}) => {
if (obj === null) {
export const convertObjectProps = (conversionFunction, obj = {}, options = {}) => {
if (!isFunction(conversionFunction) || obj === null) {
return {};
}
const initial = Array.isArray(obj) ? [] : {};
const { deep = false, dropKeys = [], ignoreKeyNames = [] } = options;
const isObjParameterArray = Array.isArray(obj);
const initialValue = isObjParameterArray ? [] : {};
return Object.keys(obj).reduce((acc, prop) => {
const result = acc;
const val = obj[prop];
// Drop properties from new object if
......@@ -702,34 +707,54 @@ export const convertObjectPropsToCamelCase = (obj = {}, options = {}) => {
// Skip converting properties in new object
// if there are any mentioned in options
if (ignoreKeyNames.indexOf(prop) > -1) {
result[prop] = obj[prop];
acc[prop] = val;
return acc;
}
if (deep && (isObject(val) || Array.isArray(val))) {
result[convertToCamelCase(prop)] = convertObjectPropsToCamelCase(val, options);
if (isObjParameterArray) {
acc[prop] = convertObjectProps(conversionFunction, val, options);
} else {
acc[conversionFunction(prop)] = convertObjectProps(conversionFunction, val, options);
}
} else {
result[convertToCamelCase(prop)] = obj[prop];
acc[conversionFunction(prop)] = val;
}
return acc;
}, initial);
}, initialValue);
};
/**
* This method takes in object with snake_case property names
* and returns a new object with camelCase property names
*
* Reasoning for this method is to ensure consistent property
* naming conventions across JS code.
*
* This method also supports additional params in `options` object
*
* @param {Object} obj - Object to be converted.
* @param {Object} options - Object containing additional options.
* @param {boolean} options.deep - FLag to allow deep object converting
* @param {Array[]} options.dropKeys - List of properties to discard while building new object
* @param {Array[]} options.ignoreKeyNames - List of properties to leave intact (as snake_case) while building new object
*/
export const convertObjectPropsToCamelCase = (obj = {}, options = {}) =>
convertObjectProps(convertToCamelCase, obj, options);
/**
* Converts all the object keys to snake case
*
* @param {Object} obj Object to transform
* @returns {Object}
* This method also supports additional params in `options` object
*
* @param {Object} obj - Object to be converted.
* @param {Object} options - Object containing additional options.
* @param {boolean} options.deep - FLag to allow deep object converting
* @param {Array[]} options.dropKeys - List of properties to discard while building new object
* @param {Array[]} options.ignoreKeyNames - List of properties to leave intact (as snake_case) while building new object
*/
// Follow up to add additional options param:
// https://gitlab.com/gitlab-org/gitlab/issues/39173
export const convertObjectPropsToSnakeCase = (obj = {}) =>
obj
? Object.entries(obj).reduce(
(acc, [key, value]) => ({ ...acc, [convertToSnakeCase(key)]: value }),
{},
)
: {};
export const convertObjectPropsToSnakeCase = (obj = {}, options = {}) =>
convertObjectProps(convertToSnakeCase, obj, options);
export const imagePath = imgUrl =>
`${gon.asset_host || ''}${gon.relative_url_root || ''}/assets/${imgUrl}`;
......
......@@ -7,7 +7,7 @@ class CiCdController < Groups::ApplicationController
before_action :authorize_admin_group!
before_action :authorize_update_max_artifacts_size!, only: [:update]
before_action do
push_frontend_feature_flag(:new_variables_ui, @group)
push_frontend_feature_flag(:new_variables_ui, @group, default_enabled: true)
end
before_action :define_variables, only: [:show, :create_deploy_token]
......
......@@ -21,7 +21,7 @@ class Projects::MergeRequestsController < Projects::MergeRequests::ApplicationCo
before_action only: [:show] do
push_frontend_feature_flag(:diffs_batch_load, @project, default_enabled: true)
push_frontend_feature_flag(:deploy_from_footer, @project, default_enabled: true)
push_frontend_feature_flag(:single_mr_diff_view, @project)
push_frontend_feature_flag(:single_mr_diff_view, @project, default_enabled: true)
push_frontend_feature_flag(:suggest_pipeline) if experiment_enabled?(:suggest_pipeline)
end
......
......@@ -6,7 +6,7 @@ class CiCdController < Projects::ApplicationController
before_action :authorize_admin_pipeline!
before_action :define_variables
before_action do
push_frontend_feature_flag(:new_variables_ui, @project)
push_frontend_feature_flag(:new_variables_ui, @project, default_enabled: true)
end
def show
......
......@@ -28,7 +28,8 @@ class JobArtifact < ApplicationRecord
license_scanning: 'gl-license-scanning-report.json',
performance: 'performance.json',
metrics: 'metrics.txt',
lsif: 'lsif.json'
lsif: 'lsif.json',
dotenv: '.env'
}.freeze
INTERNAL_TYPES = {
......@@ -43,6 +44,7 @@ class JobArtifact < ApplicationRecord
metrics_referee: :gzip,
network_referee: :gzip,
lsif: :gzip,
dotenv: :gzip,
# All these file formats use `raw` as we need to store them uncompressed
# for Frontend to fetch the files and do analysis
......@@ -118,7 +120,8 @@ class JobArtifact < ApplicationRecord
metrics: 12, ## EE-specific
metrics_referee: 13, ## runner referees
network_referee: 14, ## runner referees
lsif: 15 # LSIF data for code navigation
lsif: 15, # LSIF data for code navigation
dotenv: 16
}
enum file_format: {
......
......@@ -4,11 +4,14 @@ module Ci
class JobVariable < ApplicationRecord
extend Gitlab::Ci::Model
include NewHasVariable
include BulkInsertSafe
belongs_to :job, class_name: "Ci::Build", foreign_key: :job_id
alias_attribute :secret_value, :value
validates :key, uniqueness: { scope: :job_id }
validates :key, uniqueness: { scope: :job_id }, unless: :dotenv_source?
enum source: { internal: 0, dotenv: 1 }, _suffix: true
end
end
......@@ -67,14 +67,14 @@ class DiffFileEntity < DiffFileBaseEntity
private
def parallel_diff_view?(options, diff_file)
return true unless Feature.enabled?(:single_mr_diff_view, diff_file.repository.project)
return true unless Feature.enabled?(:single_mr_diff_view, diff_file.repository.project, default_enabled: true)
# If we're not rendering inline, we must be rendering parallel
!inline_diff_view?(options, diff_file)
end
def inline_diff_view?(options, diff_file)
return true unless Feature.enabled?(:single_mr_diff_view, diff_file.repository.project)
return true unless Feature.enabled?(:single_mr_diff_view, diff_file.repository.project, default_enabled: true)
# If nothing is present, inline will be the default.
options.fetch(:diff_view, :inline).to_sym == :inline
......
......@@ -10,10 +10,24 @@ class CreateJobArtifactsService < ::BaseService
].freeze
def execute(job, artifacts_file, params, metadata_file: nil)
return success if sha256_matches_existing_artifact?(job, params['artifact_type'], artifacts_file)
artifact, artifact_metadata = build_artifact(job, artifacts_file, params, metadata_file)
result = parse_artifact(job, artifact)
return result unless result[:status] == :success
persist_artifact(job, artifact, artifact_metadata)
end
private
def build_artifact(job, artifacts_file, params, metadata_file)
expire_in = params['expire_in'] ||
Gitlab::CurrentSettings.current_application_settings.default_artifacts_expire_in
job.job_artifacts.build(
artifact = Ci::JobArtifact.new(
job_id: job.id,
project: job.project,
file: artifacts_file,
file_type: params['artifact_type'],
......@@ -21,34 +35,51 @@ def execute(job, artifacts_file, params, metadata_file: nil)
file_sha256: artifacts_file.sha256,
expire_in: expire_in)
if metadata_file
job.job_artifacts.build(
project: job.project,
file: metadata_file,
file_type: :metadata,
file_format: :gzip,
file_sha256: metadata_file.sha256,
expire_in: expire_in)
artifact_metadata = if metadata_file
Ci::JobArtifact.new(
job_id: job.id,
project: job.project,
file: metadata_file,
file_type: :metadata,
file_format: :gzip,
file_sha256: metadata_file.sha256,
expire_in: expire_in)
end
[artifact, artifact_metadata]
end
def parse_artifact(job, artifact)
unless Feature.enabled?(:ci_synchronous_artifact_parsing, job.project, default_enabled: true)
return success
end
if job.update(artifacts_expire_in: expire_in)
success
else
error(job.errors.messages, :bad_request)
case artifact.file_type
when 'dotenv' then parse_dotenv_artifact(job, artifact)
else success
end
end
rescue ActiveRecord::RecordNotUnique => error
return success if sha256_matches_existing_artifact?(job, params['artifact_type'], artifacts_file)
def persist_artifact(job, artifact, artifact_metadata)
Ci::JobArtifact.transaction do
artifact.save!
artifact_metadata&.save!
# NOTE: The `artifacts_expire_at` column is already deprecated and to be removed in the near future.
job.update_column(:artifacts_expire_at, artifact.expire_at)
end
success
rescue ActiveRecord::RecordNotUnique => error
track_exception(error, job, params)
error('another artifact of the same type already exists', :bad_request)
rescue *OBJECT_STORAGE_ERRORS => error
track_exception(error, job, params)
error(error.message, :service_unavailable)
rescue => error
error(error.message, :bad_request)
end
private
def sha256_matches_existing_artifact?(job, artifact_type, artifacts_file)
existing_artifact = job.job_artifacts.find_by_file_type(artifact_type)
return false unless existing_artifact
......@@ -63,5 +94,9 @@ def track_exception(error, job, params)
uploading_type: params['artifact_type']
)
end
def parse_dotenv_artifact(job, artifact)
Ci::ParseDotenvArtifactService.new(job.project, current_user).execute(artifact)
end
end
end
# frozen_string_literal: true
module Ci
class ParseDotenvArtifactService < ::BaseService
MAX_ACCEPTABLE_DOTENV_SIZE = 5.kilobytes
MAX_ACCEPTABLE_VARIABLES_COUNT = 10
SizeLimitError = Class.new(StandardError)
ParserError = Class.new(StandardError)
def execute(artifact)
validate!(artifact)
variables = parse!(artifact)
Ci::JobVariable.bulk_insert!(variables)
success
rescue SizeLimitError, ParserError, ActiveRecord::RecordInvalid => error
Gitlab::ErrorTracking.track_exception(error, job_id: artifact.job_id)
error(error.message, :bad_request)
end
private
def validate!(artifact)
unless artifact&.dotenv?
raise ArgumentError, 'Artifact is not dotenv file type'
end
unless artifact.file.size < MAX_ACCEPTABLE_DOTENV_SIZE
raise SizeLimitError,
"Dotenv Artifact Too Big. Maximum Allowable Size: #{MAX_ACCEPTABLE_DOTENV_SIZE}"
end
end
def parse!(artifact)
variables = []
artifact.each_blob do |blob|
blob.each_line do |line|
key, value = scan_line!(line)
variables << Ci::JobVariable.new(job_id: artifact.job_id,
source: :dotenv, key: key, value: value)
end
end
if variables.size > MAX_ACCEPTABLE_VARIABLES_COUNT
raise SizeLimitError,
"Dotenv files cannot have more than #{MAX_ACCEPTABLE_VARIABLES_COUNT} variables"
end
variables
end
def scan_line!(line)
result = line.scan(/^(.*)=(.*)$/).last
raise ParserError, 'Invalid Format' if result.nil?
result.each(&:strip!)
end
end
end
......@@ -5,7 +5,7 @@
- link_start = '<a href="%{url}">'.html_safe % { url: help_page_path('ci/variables/README', anchor: 'protected-variables') }
= s_('Environment variables are configured by your administrator to be %{link_start}protected%{link_end} by default').html_safe % { link_start: link_start, link_end: '</a>'.html_safe }
- if Feature.enabled?(:new_variables_ui, @project || @group)
- if Feature.enabled?(:new_variables_ui, @project || @group, default_enabled: true)
- is_group = !@group.nil?
#js-ci-project-variables{ data: { endpoint: save_endpoint, project_id: @project&.id || '', group: is_group.to_s, maskable_regex: ci_variable_maskable_regex} }
......
---
title: Support DotEnv Variables through report type artifact
merge_request: 26247
author:
type: added
---
title: Diffs load each view style separately, on demand
merge_request: 24821
author:
type: performance
---
title: Update UI for project and group settings CI variables
merge_request: 26901
author:
type: added
# frozen_string_literal: true
class AddRuntimeCreatedToCiJobVariables < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
DEFAULT_SOURCE = 0 # Equvalent to Ci::JobVariable.internal_source
def up
add_column_with_default(:ci_job_variables, :source, :integer, limit: 2, default: DEFAULT_SOURCE, allow_null: false)
end
def down
remove_column(:ci_job_variables, :source)
end
end
......@@ -780,6 +780,7 @@
t.string "encrypted_value_iv"
t.bigint "job_id", null: false
t.integer "variable_type", limit: 2, default: 1, null: false
t.integer "source", limit: 2, default: 0, null: false
t.index ["job_id"], name: "index_ci_job_variables_on_job_id"
t.index ["key", "job_id"], name: "index_ci_job_variables_on_key_and_job_id", unique: true
end
......
......@@ -156,6 +156,70 @@ Starting with GitLab 9.3, the environment URL is exposed to the Runner via
- `.gitlab-ci.yml`.
- The external URL from the environment if not defined in `.gitlab-ci.yml`.
#### Set dynamic environment URLs after a job finishes
> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/17066) in GitLab 12.9.
In a job script, you can specify a static [environment URL](#using-the-environment-url).
However, there may be times when you want a dynamic URL. For example,
if you deploy a Review App to an external hosting
service that generates a random URL per deployment, like `https://94dd65b.amazonaws.com/qa-lambda-1234567`,
you don't know the URL before the deployment script finishes.
If you want to use the environment URL in GitLab, you would have to update it manually.
To address this problem, you can configure a deployment job to report back a set of
variables, including the URL that was dynamically-generated by the external service.
GitLab supports [dotenv](https://github.com/bkeepers/dotenv) file as the format,
and expands the `environment:url` value with variables defined in the dotenv file.
To use this feature, specify the
[`artifacts:reports:dotenv`](yaml/README.md#artifactsreportsdotenv) keyword in `.gitlab-ci.yml`.
##### Example of setting dynamic environment URLs
The following example shows a Review App that creates a new environment
per merge request. The `review` job is triggered by every push, and
creates or updates an environment named `review/your-branch-name`.