Read about our upcoming Code of Conduct on this issue

Commit d6bb7907 authored by Shinya Maeda's avatar Shinya Maeda
Browse files

Merge branch 'master' into live-trace-v2

......@@ -72,3 +72,4 @@ eslint-report.html
/locale/**/*.time_stamp
/.rspec
/plugins/*
/.gitlab_pages_secret
......@@ -110,7 +110,7 @@ stages:
# Jobs that only need to pull cache
.dedicated-no-docs-pull-cache-job: &dedicated-no-docs-pull-cache-job
<<: *dedicated-runner
<<: *except-docs-and-qa
<<: *except-docs
<<: *pull-cache
dependencies:
- setup-test-env
......@@ -122,6 +122,10 @@ stages:
variables:
SETUP_DB: "false"
.dedicated-no-docs-and-no-qa-pull-cache-job: &dedicated-no-docs-and-no-qa-pull-cache-job
<<: *dedicated-no-docs-pull-cache-job
<<: *except-docs-and-qa
.rake-exec: &rake-exec
<<: *dedicated-no-docs-no-db-pull-cache-job
script:
......@@ -222,7 +226,7 @@ stages:
- master@gitlab/gitlab-ee
.gitlab-setup: &gitlab-setup
<<: *dedicated-no-docs-pull-cache-job
<<: *dedicated-no-docs-and-no-qa-pull-cache-job
<<: *use-pg
variables:
CREATE_DB_USER: "true"
......@@ -262,12 +266,12 @@ stages:
# DB migration, rollback, and seed jobs
.db-migrate-reset: &db-migrate-reset
<<: *dedicated-no-docs-pull-cache-job
<<: *dedicated-no-docs-and-no-qa-pull-cache-job
script:
- bundle exec rake db:migrate:reset
.migration-paths: &migration-paths
<<: *dedicated-no-docs-pull-cache-job
<<: *dedicated-no-docs-and-no-qa-pull-cache-job
variables:
CREATE_DB_USER: "true"
script:
......@@ -647,7 +651,7 @@ migration:path-mysql:
<<: *use-mysql
.db-rollback: &db-rollback
<<: *dedicated-no-docs-pull-cache-job
<<: *dedicated-no-docs-and-no-qa-pull-cache-job
script:
- bundle exec rake db:migrate VERSION=20170523121229
- bundle exec rake db:migrate
......@@ -670,7 +674,7 @@ gitlab:setup-mysql:
# Frontend-related jobs
gitlab:assets:compile:
<<: *dedicated-no-docs-no-db-pull-cache-job
<<: *dedicated-no-docs-and-no-qa-pull-cache-job
dependencies: []
variables:
NODE_ENV: "production"
......@@ -691,7 +695,7 @@ gitlab:assets:compile:
- webpack-report/
karma:
<<: *dedicated-no-docs-pull-cache-job
<<: *dedicated-no-docs-and-no-qa-pull-cache-job
<<: *use-pg
dependencies:
- compile-assets
......@@ -815,7 +819,7 @@ coverage:
- coverage/assets/
lint:javascript:report:
<<: *dedicated-no-docs-no-db-pull-cache-job
<<: *dedicated-no-docs-and-no-qa-pull-cache-job
stage: post-test
dependencies:
- compile-assets
......
class ListLabel {
export default class ListLabel {
constructor(obj) {
this.id = obj.id;
this.title = obj.title;
......
......@@ -14,6 +14,7 @@ def execute
items = by_scope(items)
items = by_status(items)
items = by_ref(items)
items = by_sha(items)
items = by_name(items)
items = by_username(items)
items = by_yaml_errors(items)
......@@ -69,6 +70,14 @@ def by_ref(items)
end
end
def by_sha(items)
if params[:sha].present?
items.where(sha: params[:sha])
else
items
end
end
def by_name(items)
if params[:name].present?
items.joins(:user).where(users: { name: params[:name] })
......
......@@ -159,7 +159,7 @@ def change_time_estimate(noteable, project, author)
body = if noteable.time_estimate == 0
"removed time estimate"
else
"changed time estimate to #{parsed_time},"
"changed time estimate to #{parsed_time}"
end
create_note(NoteSummary.new(noteable, project, author, body, action: 'time_tracking'))
......
......@@ -17,14 +17,14 @@
.ci-variable-row-body
%input.js-ci-variable-input-id{ type: "hidden", name: id_input_name, value: id }
%input.js-ci-variable-input-destroy{ type: "hidden", name: destroy_input_name }
%input.js-ci-variable-input-key.ci-variable-body-item.form-control{ type: "text",
%input.js-ci-variable-input-key.ci-variable-body-item.qa-ci-variable-input-key.form-control{ type: "text",
name: key_input_name,
value: key,
placeholder: s_('CiVariables|Input variable key') }
.ci-variable-body-item
.form-control.js-secret-value-placeholder{ class: ('hide' unless id) }
.form-control.js-secret-value-placeholder.qa-ci-variable-input-value{ class: ('hide' unless id) }
= '*' * 20
%textarea.js-ci-variable-input-value.js-secret-value.form-control{ class: ('hide' if id),
%textarea.js-ci-variable-input-value.js-secret-value.qa-ci-variable-input-value.form-control{ class: ('hide' if id),
rows: 1,
name: value_input_name,
placeholder: s_('CiVariables|Input variable value') }
......
- content_for :merge_access_levels do
.merge_access_levels-container
= dropdown_tag('Select',
options: { toggle_class: 'js-allowed-to-merge wide',
dropdown_class: 'dropdown-menu-selectable capitalize-header',
options: { toggle_class: 'js-allowed-to-merge qa-allowed-to-merge-select wide',
dropdown_class: 'dropdown-menu-selectable qa-allowed-to-merge-dropdown capitalize-header',
data: { field_name: 'protected_branch[merge_access_levels_attributes][0][access_level]', input_id: 'merge_access_levels_attributes' }})
- content_for :push_access_levels do
.push_access_levels-container
......
%td
= hidden_field_tag "allowed_to_merge_#{protected_branch.id}", protected_branch.merge_access_levels.first.access_level
= dropdown_tag( (protected_branch.merge_access_levels.first.humanize || 'Select') ,
options: { toggle_class: 'js-allowed-to-merge', dropdown_class: 'dropdown-menu-selectable js-allowed-to-merge-container capitalize-header',
options: { toggle_class: 'js-allowed-to-merge qa-allowed-to-merge', dropdown_class: 'dropdown-menu-selectable js-allowed-to-merge-container capitalize-header',
data: { field_name: "allowed_to_merge_#{protected_branch.id}", access_level_id: protected_branch.merge_access_levels.first.id }})
%td
= hidden_field_tag "allowed_to_push_#{protected_branch.id}", protected_branch.push_access_levels.first.access_level
......
......@@ -36,5 +36,6 @@
= form.text_field :domain, class: 'form-control', placeholder: 'domain.com'
.help-block
= s_('CICD|You need to specify a domain if you want to use Auto Review Apps and Auto Deploy stages.')
= link_to icon('question-circle'), help_page_path('topics/autodevops/index.md', anchor: 'auto-devops-base-domain'), target: '_blank'
= f.submit 'Save changes', class: "btn btn-success prepend-top-15"
---
title: Add sha filter to pipelines list API
merge_request: 18125
author:
type: changed
---
title: Repository#exists? is always executed through Gitaly
merge_request:
author:
type: performance
......@@ -184,18 +184,18 @@ production: &base
# base_dir: uploads/-/system
object_store:
enabled: false
# remote_directory: uploads # Bucket name
remote_directory: uploads # Bucket name
# direct_upload: false # Use Object Storage directly for uploads instead of background uploads if enabled (Default: false)
# background_upload: false # Temporary option to limit automatic upload (Default: true)
# proxy_download: false # Passthrough all downloads via GitLab instead of using Redirects to Object Storage
connection:
provider: AWS
aws_access_key_id: AWS_ACCESS_KEY_ID
aws_secret_access_key: AWS_SECRET_ACCESS_KEY
region: us-east-1
# host: 'localhost' # default: s3.amazonaws.com
# endpoint: 'http://127.0.0.1:9000' # default: nil
# path_style: true # Use 'host/bucket_name/object' instead of 'bucket_name.host/object'
connection:
provider: AWS
aws_access_key_id: AWS_ACCESS_KEY_ID
aws_secret_access_key: AWS_SECRET_ACCESS_KEY
region: us-east-1
# host: 'localhost' # default: s3.amazonaws.com
# endpoint: 'http://127.0.0.1:9000' # default: nil
# path_style: true # Use 'host/bucket_name/object' instead of 'bucket_name.host/object'
## GitLab Pages
pages:
......@@ -212,6 +212,8 @@ production: &base
artifacts_server: true
# external_http: ["1.1.1.1:80", "[2001::1]:80"] # If defined, enables custom domain support in GitLab Pages
# external_https: ["1.1.1.1:443", "[2001::1]:443"] # If defined, enables custom domain and certificate support in GitLab Pages
admin:
address: unix:/home/git/gitlab/tmp/sockets/private/pages-admin.socket # TCP connections are supported too (e.g. tcp://host:port)
## Mattermost
## For enabling Add to Mattermost button
......
......@@ -215,6 +215,9 @@
Settings.pages['external_https'] ||= false unless Settings.pages['external_https'].present?
Settings.pages['artifacts_server'] ||= Settings.pages['enabled'] if Settings.pages['artifacts_server'].nil?
Settings.pages['admin'] ||= Settingslogic.new({})
Settings.pages.admin['certificate'] ||= ''
#
# Git LFS
#
......
Gitlab::PagesClient.read_or_create_token
Gitlab::PagesClient.load_certificate
......@@ -323,7 +323,7 @@ The prerequisites for a HA Redis setup are the following:
# machines to connect to it.
redis['port'] = 6379
# The same password for Redeis authentication you set up for the master node.
# The same password for Redis authentication you set up for the master node.
redis['password'] = 'redis-password-goes-here'
# The IP of the master Redis node.
......
......@@ -107,7 +107,7 @@ For source installations the following settings are nested under `artifacts:` an
| Setting | Description | Default |
|---------|-------------|---------|
| `enabled` | Enable/disable object storage | `false` |
| `remote_directory` | The bucket name where Artfacts will be stored| |
| `remote_directory` | The bucket name where Artifacts will be stored| |
| `direct_upload` | Set to true to enable direct upload of Artifacts without the need of local shared storage. Option may be removed once we decide to support only single storage for all files. Currently only `Google` provider is supported | `false` |
| `background_upload` | Set to false to disable automatic upload. Option may be removed once upload is direct to S3 | `true` |
| `proxy_download` | Set to true to enable proxying all files served. Option allows to reduce egress traffic as this allows clients to download directly from remote storage instead of proxying all data | `false` |
......@@ -148,7 +148,7 @@ _The artifacts are stored by default in
```
NOTE: For GitLab 9.4+, if you are using AWS IAM profiles, be sure to omit the
AWS access key and secret acces key/value pairs. For example:
AWS access key and secret access key/value pairs. For example:
```ruby
gitlab_rails['artifacts_object_store_connection'] = {
......
......@@ -46,7 +46,7 @@ In this experimental phase, only a few metrics are available:
| redis_ping_latency_seconds | Gauge | 9.4 | Round trip time of the redis ping |
| user_session_logins_total | Counter | 9.4 | Counter of how many users have logged in |
| filesystem_circuitbreaker_latency_seconds | Gauge | 9.5 | Time spent validating if a storage is accessible |
| filesystem_circuitbreaker | Gauge | 9.5 | Wether or not the circuit for a certain shard is broken or not |
| filesystem_circuitbreaker | Gauge | 9.5 | Whether or not the circuit for a certain shard is broken or not |
| circuitbreaker_storage_check_duration_seconds | Histogram | 10.3 | Time a single storage probe took |
## Metrics shared directory
......
......@@ -31,7 +31,7 @@ GitLab Shell provides a way to authorize SSH users via a fast, indexed lookup
to the GitLab database. GitLab Shell uses the fingerprint of the SSH key to
check whether the user is authorized to access GitLab.
Add the following to your `sshd_config` file. This is usuaully located at
Add the following to your `sshd_config` file. This is usually located at
`/etc/ssh/sshd_config`, but it will be `/assets/sshd_config` if you're using
Omnibus Docker:
......
......@@ -104,7 +104,7 @@ _The uploads are stored by default in
```
>**Note:**
If you are using AWS IAM profiles, be sure to omit the AWS access key and secret acces key/value pairs.
If you are using AWS IAM profiles, be sure to omit the AWS access key and secret access key/value pairs.
```ruby
gitlab_rails['uploads_object_store_connection'] = {
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment