Commit 15771b9d authored by GitLab Bot's avatar GitLab Bot
Browse files

Add latest changes from gitlab-org/gitlab@master

parent 42fa0e85998d
Please view this file on the master branch, on stable branches it's out of date.
## 12.9.3 (2020-04-14)
### Security (1 change)
- Fix filename bypass when uploading NuGet packages.
## 12.9.2 (2020-03-31)
### Fixed (4 changes)
......@@ -152,6 +159,13 @@ Please view this file on the master branch, on stable branches it's out of date.
- Allow users to be marked as service users. !202680
## 12.8.9 (2020-04-14)
### Security (1 change)
- Fix filename bypass when uploading NuGet packages.
## 12.8.7 (2020-03-16)
### Fixed (1 change)
......@@ -305,6 +319,13 @@ Please view this file on the master branch, on stable branches it's out of date.
- Prepare DB structure for GMA forking changes. !22002
## 12.7.9 (2020-04-14)
### Security (1 change)
- Fix filename bypass when uploading NuGet packages.
## 12.7.5
### Fixed (1 change)
......
......@@ -163,7 +163,7 @@ gem 'diffy', '~> 3.3'
gem 'diff_match_patch', '~> 0.1.0'
# Application server
gem 'rack', '~> 2.0.7'
gem 'rack', '~> 2.0.9'
group :unicorn do
gem 'unicorn', '~> 5.4.1'
......
......@@ -173,7 +173,7 @@ GEM
concord (0.1.5)
adamantium (~> 0.2.0)
equalizer (~> 0.0.9)
concurrent-ruby (1.1.5)
concurrent-ruby (1.1.6)
connection_pool (2.2.2)
contracts (0.11.0)
cork (0.3.0)
......@@ -788,7 +788,7 @@ GEM
public_suffix (4.0.3)
pyu-ruby-sasl (0.0.3.3)
raabro (1.1.6)
rack (2.0.7)
rack (2.0.9)
rack-accept (0.4.5)
rack (>= 0.4)
rack-attack (6.2.0)
......@@ -859,17 +859,17 @@ GEM
json
recursive-open-struct (1.1.0)
redis (4.1.3)
redis-actionpack (5.1.0)
actionpack (>= 4.0, < 7)
redis-rack (>= 1, < 3)
redis-actionpack (5.2.0)
actionpack (>= 5, < 7)
redis-rack (>= 2.1.0, < 3)
redis-store (>= 1.1.0, < 2)
redis-activesupport (5.2.0)
activesupport (>= 3, < 7)
redis-store (>= 1.3, < 2)
redis-namespace (1.6.0)
redis (>= 3.0.4)
redis-rack (2.0.6)
rack (>= 1.5, < 3)
redis-rack (2.1.2)
rack (>= 2.0.8, < 3)
redis-store (>= 1.2, < 2)
redis-rails (5.0.2)
redis-actionpack (>= 5.0, < 6)
......@@ -1331,7 +1331,7 @@ DEPENDENCIES
prometheus-client-mmap (~> 0.10.0)
pry-byebug (~> 3.5.1)
pry-rails (~> 0.3.9)
rack (~> 2.0.7)
rack (~> 2.0.9)
rack-attack (~> 6.2.0)
rack-cors (~> 1.0.6)
rack-oauth2 (~> 1.9.3)
......
......@@ -6,31 +6,32 @@ class ActiveSession
SESSION_BATCH_SIZE = 200
ALLOWED_NUMBER_OF_ACTIVE_SESSIONS = 100
attr_writer :session_id
attr_accessor :created_at, :updated_at,
:ip_address, :browser, :os,
:device_name, :device_type,
:is_impersonated
:is_impersonated, :session_id
def current?(session)
return false if session_id.nil? || session.id.nil?
session_id == session.id
# Rack v2.0.8+ added private_id, which uses the hash of the
# public_id to avoid timing attacks.
session_id.private_id == session.id.private_id
end
def human_device_type
device_type&.titleize
end
# This is not the same as Rack::Session::SessionId#public_id, but we
# need to preserve this for backwards compatibility.
def public_id
encrypted_id = Gitlab::CryptoHelper.aes256_gcm_encrypt(session_id)
CGI.escape(encrypted_id)
Gitlab::CryptoHelper.aes256_gcm_encrypt(session_id.public_id)
end
def self.set(user, request)
Gitlab::Redis::SharedState.with do |redis|
session_id = request.session.id
session_id = request.session.id.public_id
client = DeviceDetector.new(request.user_agent)
timestamp = Time.current
......@@ -63,32 +64,35 @@ def self.set(user, request)
def self.list(user)
Gitlab::Redis::SharedState.with do |redis|
cleaned_up_lookup_entries(redis, user).map do |entry|
# rubocop:disable Security/MarshalLoad
Marshal.load(entry)
# rubocop:enable Security/MarshalLoad
cleaned_up_lookup_entries(redis, user).map do |raw_session|
load_raw_session(raw_session)
end
end
end
def self.destroy(user, session_id)
return unless session_id
Gitlab::Redis::SharedState.with do |redis|
destroy_sessions(redis, user, [session_id])
end
end
def self.destroy_with_public_id(user, public_id)
session_id = decrypt_public_id(public_id)
destroy(user, session_id) unless session_id.nil?
decrypted_id = decrypt_public_id(public_id)
return if decrypted_id.nil?
session_id = Rack::Session::SessionId.new(decrypted_id)
destroy(user, session_id)
end
def self.destroy_sessions(redis, user, session_ids)
key_names = session_ids.map {|session_id| key_name(user.id, session_id) }
session_names = session_ids.map {|session_id| "#{Gitlab::Redis::SharedState::SESSION_NAMESPACE}:#{session_id}" }
key_names = session_ids.map { |session_id| key_name(user.id, session_id.public_id) }
redis.srem(lookup_key_name(user.id), session_ids)
redis.srem(lookup_key_name(user.id), session_ids.map(&:public_id))
redis.del(key_names)
redis.del(session_names)
redis.del(rack_session_keys(session_ids))
end
def self.cleanup(user)
......@@ -110,28 +114,65 @@ def self.list_sessions(user)
sessions_from_ids(session_ids_for_user(user.id))
end
# Lists the relevant session IDs for the user.
#
# Returns an array of Rack::Session::SessionId objects
def self.session_ids_for_user(user_id)
Gitlab::Redis::SharedState.with do |redis|
redis.smembers(lookup_key_name(user_id))
session_ids = redis.smembers(lookup_key_name(user_id))
session_ids.map { |id| Rack::Session::SessionId.new(id) }
end
end
# Lists the ActiveSession objects for the given session IDs.
#
# session_ids - An array of Rack::Session::SessionId objects
#
# Returns an array of ActiveSession objects
def self.sessions_from_ids(session_ids)
return [] if session_ids.empty?
Gitlab::Redis::SharedState.with do |redis|
session_keys = session_ids.map { |session_id| "#{Gitlab::Redis::SharedState::SESSION_NAMESPACE}:#{session_id}" }
session_keys = rack_session_keys(session_ids)
session_keys.each_slice(SESSION_BATCH_SIZE).flat_map do |session_keys_batch|
redis.mget(session_keys_batch).compact.map do |raw_session|
# rubocop:disable Security/MarshalLoad
Marshal.load(raw_session)
# rubocop:enable Security/MarshalLoad
load_raw_session(raw_session)
end
end
end
end
# Deserializes an ActiveSession object from Redis.
#
# raw_session - Raw bytes from Redis
#
# Returns an ActiveSession object
def self.load_raw_session(raw_session)
# rubocop:disable Security/MarshalLoad
session = Marshal.load(raw_session)
# rubocop:enable Security/MarshalLoad
# Older ActiveSession models serialize `session_id` as strings, To
# avoid breaking older sessions, we keep backwards compatibility
# with older Redis keys and initiate Rack::Session::SessionId here.
session.session_id = Rack::Session::SessionId.new(session.session_id) if session.try(:session_id).is_a?(String)
session
end
def self.rack_session_keys(session_ids)
session_ids.each_with_object([]) do |session_id, arr|
# This is a redis-rack implementation detail
# (https://github.com/redis-store/redis-rack/blob/master/lib/rack/session/redis.rb#L88)
#
# We need to delete session keys based on the legacy public key name
# and the newer private ID keys, but there's no well-defined interface
# so we have to do it directly.
arr << "#{Gitlab::Redis::SharedState::SESSION_NAMESPACE}:#{session_id.public_id}"
arr << "#{Gitlab::Redis::SharedState::SESSION_NAMESPACE}:#{session_id.private_id}"
end
end
def self.raw_active_session_entries(redis, session_ids, user_id)
return [] if session_ids.empty?
......@@ -146,7 +187,7 @@ def self.active_session_entries(session_ids, user_id, redis)
entry_keys = raw_active_session_entries(redis, session_ids, user_id)
entry_keys.compact.map do |raw_session|
Marshal.load(raw_session) # rubocop:disable Security/MarshalLoad
load_raw_session(raw_session)
end
end
......@@ -159,10 +200,13 @@ def self.clean_up_old_sessions(redis, user)
sessions = active_session_entries(session_ids, user.id, redis)
sessions.sort_by! {|session| session.updated_at }.reverse!
destroyable_sessions = sessions.drop(ALLOWED_NUMBER_OF_ACTIVE_SESSIONS)
destroyable_session_ids = destroyable_sessions.map { |session| session.send :session_id } # rubocop:disable GitlabSecurity/PublicSend
destroyable_session_ids = destroyable_sessions.map { |session| session.session_id }
destroy_sessions(redis, user, destroyable_session_ids) if destroyable_session_ids.any?
end
# Cleans up the lookup set by removing any session IDs that are no longer present.
#
# Returns an array of marshalled ActiveModel objects that are still active.
def self.cleaned_up_lookup_entries(redis, user)
session_ids = session_ids_for_user(user.id)
entries = raw_active_session_entries(redis, session_ids, user.id)
......@@ -181,13 +225,8 @@ def self.cleaned_up_lookup_entries(redis, user)
end
private_class_method def self.decrypt_public_id(public_id)
decoded_id = CGI.unescape(public_id)
Gitlab::CryptoHelper.aes256_gcm_decrypt(decoded_id)
Gitlab::CryptoHelper.aes256_gcm_decrypt(public_id)
rescue
nil
end
private
attr_reader :session_id
end
......@@ -2,17 +2,19 @@
# rubocop:disable Rails/ActiveRecordAliases
class WikiPage
include Gitlab::Utils::StrongMemoize
PageChangedError = Class.new(StandardError)
PageRenameError = Class.new(StandardError)
MAX_TITLE_BYTES = 245
MAX_DIRECTORY_BYTES = 255
FrontMatterTooLong = Class.new(StandardError)
include ActiveModel::Validations
include ActiveModel::Conversion
include StaticModel
extend ActiveModel::Naming
delegate :content, :front_matter, to: :parsed_content
def self.primary_key
'slug'
end
......@@ -114,8 +116,7 @@ def title=(new_title)
@attributes[:title] = new_title
end
# The raw content of this page.
def content
def raw_content
@attributes[:content] ||= @page&.text_data
end
......@@ -238,7 +239,7 @@ def update(attrs = {})
save do
wiki.update_page(
@page,
content: content,
content: raw_content,
format: format,
message: attrs[:message],
title: title
......@@ -281,8 +282,10 @@ def title_changed?
# Updates the current @attributes hash by merging a hash of params
def update_attributes(attrs)
attrs[:title] = process_title(attrs[:title]) if attrs[:title].present?
update_front_matter(attrs)
attrs.slice!(:content, :format, :message, :title)
clear_memoization(:parsed_content) if attrs.has_key?(:content)
@attributes.merge!(attrs)
end
......@@ -293,6 +296,28 @@ def to_ability_name
private
def serialize_front_matter(hash)
return '' unless hash.present?
YAML.dump(hash.transform_keys(&:to_s)) + "---\n"
end
def update_front_matter(attrs)
return unless Gitlab::WikiPages::FrontMatterParser.enabled?(project)
return unless attrs.has_key?(:front_matter)
fm_yaml = serialize_front_matter(attrs[:front_matter])
raise FrontMatterTooLong if fm_yaml.size > Gitlab::WikiPages::FrontMatterParser::MAX_FRONT_MATTER_LENGTH
attrs[:content] = fm_yaml + (attrs[:content].presence || content)
end
def parsed_content
strong_memoize(:parsed_content) do
Gitlab::WikiPages::FrontMatterParser.new(raw_content, project).parse
end
end
# Process and format the title based on the user input.
def process_title(title)
return if title.blank?
......@@ -339,14 +364,16 @@ def save
def validate_path_limits
*dirnames, title = @attributes[:title].split('/')
if title && title.bytesize > MAX_TITLE_BYTES
errors.add(:title, _("exceeds the limit of %{bytes} bytes") % { bytes: MAX_TITLE_BYTES })
if title && title.bytesize > Gitlab::WikiPages::MAX_TITLE_BYTES
errors.add(:title, _("exceeds the limit of %{bytes} bytes") % {
bytes: Gitlab::WikiPages::MAX_TITLE_BYTES
})
end
invalid_dirnames = dirnames.select { |d| d.bytesize > MAX_DIRECTORY_BYTES }
invalid_dirnames = dirnames.select { |d| d.bytesize > Gitlab::WikiPages::MAX_DIRECTORY_BYTES }
invalid_dirnames.each do |dirname|
errors.add(:title, _('exceeds the limit of %{bytes} bytes for directory name "%{dirname}"') % {
bytes: MAX_DIRECTORY_BYTES,
bytes: Gitlab::WikiPages::MAX_DIRECTORY_BYTES,
dirname: dirname
})
end
......
......@@ -29,7 +29,15 @@ def execute
group.chat_team&.remove_mattermost_team(current_user)
user_ids_for_project_authorizations_refresh = group.user_ids_for_project_authorizations
group.destroy
UserProjectAccessChangedService
.new(user_ids_for_project_authorizations_refresh)
.execute(blocking: true)
group
end
# rubocop: enable CodeReuse/ActiveRecord
end
......
---
title: Read metadata from Wiki front-matter
merge_request: 27706
author:
type: added
# frozen_string_literal: true
class ScheduleRecalculateProjectAuthorizationsThirdRun < ActiveRecord::Migration[5.1]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
MIGRATION = 'RecalculateProjectAuthorizationsWithMinMaxUserId'
BATCH_SIZE = 2_500
DELAY_INTERVAL = 2.minutes.to_i
disable_ddl_transaction!
class User < ActiveRecord::Base
include ::EachBatch
self.table_name = 'users'
end
def up
say "Scheduling #{MIGRATION} jobs"
queue_background_migration_jobs_by_range_at_intervals(User, MIGRATION, DELAY_INTERVAL, batch_size: BATCH_SIZE)
end
def down
end
end
......@@ -12947,6 +12947,7 @@ COPY "schema_migrations" (version) FROM STDIN;
20200204070729
20200204113223
20200204113224
20200204113225
20200204131054
20200204131831
20200205143231
......
......@@ -2,6 +2,7 @@ Akismet
Alertmanager
Algolia
Ansible
Anthos
API
approvers
Artifactory
......@@ -118,6 +119,8 @@ hardcode
hardcoded
hardcodes
Helm
Heroku
Herokuish
HipChat
hostname
hostnames
......@@ -133,6 +136,7 @@ initializers
interdependencies
interdependency
Irker
Istio
jasmine-jquery
JavaScript
Jaeger
......@@ -180,6 +184,7 @@ misconfiguring
mitigations
mockup
mockups
ModSecurity
nameserver
nameservers
namespace
......@@ -326,6 +331,10 @@ unchecking
unchecks
uncomment
uncommented
unencode
unencoded
unencoder
unencodes
unencrypted
Unicorn
unindexed
......
......@@ -22,10 +22,9 @@ Pipelines comprise:
Jobs are executed by [Runners](../runners/README.md). Multiple jobs in the same stage are executed in parallel,
if there are enough concurrent runners.
If all the jobs in a stage:
If *all* jobs in a stage succeed, the pipeline moves on to the next stage.
- Succeed, the pipeline moves on to the next stage.
- Fail, the next stage is not (usually) executed and the pipeline ends early.
If *any* job in a stage fails, the next stage is not (usually) executed and the pipeline ends early.
In general, pipelines are executed automatically and require no intervention once created. However, there are
also times when you can manually interact with a pipeline.
......@@ -46,6 +45,10 @@ you may need to enable pipeline triggering in your project's
Pipelines can be configured in many different ways:
- [Basic pipelines](pipeline_architectures.md#basic-pipelines) run everything in each stage concurrently,
followed by the next stage.
- [Directed Acyclic Graph Pipeline (DAG) pipelines](../directed_acyclic_graph/index.md) are based on relationships
between jobs and can run more quickly than basic pipelines.
- [Multi-project pipelines](../multi_project_pipelines.md) combine pipelines for different projects together.
- [Parent-Child pipelines](../parent_child_pipelines.md) break down complex pipelines
into one parent pipeline that can trigger multiple child sub-pipelines, which all
......
This diff is collapsed.
......@@ -418,12 +418,23 @@ spec:
## Troubleshooting
- Auto Build and Auto Test may fail in detecting your language/framework. There
may be no buildpack for your application, or your application may be missing the
key files the buildpack is looking for. For example, for Ruby applications, you must
have a `Gemfile` to be properly detected, even though it is possible to write a
Ruby app without a `Gemfile`. Try specifying a [custom
buildpack](customize.md#custom-buildpacks).
- Auto Build and Auto Test may fail to detect your language or framework with the
following error:
```plaintext
Step 5/11 : RUN /bin/herokuish buildpack build
---> Running in eb468cd46085
-----> Unable to select a buildpack
The command '/bin/sh -c /bin/herokuish buildpack build' returned a non-zero code: 1
```
The following are possible reasons:
- Your application may be missing the key files the buildpack is looking for. For
example, for Ruby applications you must have a `Gemfile` to be properly detected,
even though it is possible to write a Ruby app without a `Gemfile`.
- There may be no buildpack for your application. Try specifying a
[custom buildpack](customize.md#custom-buildpacks).
- Auto Test may fail because of a mismatch between testing frameworks. In this
case, you may need to customize your `.gitlab-ci.yml` with your test commands.
- Auto Deploy will fail if GitLab can not create a Kubernetes namespace and
......
This diff is collapsed.
......@@ -58,7 +58,7 @@ under which this application will be deployed.
1. On the project's landing page, click **Add Kubernetes cluster**
(note that this option is also available when you navigate to **Operations > Kubernetes**).
![Project landing page](../autodevops/img/guide_project_landing_page_v12_3.png)
![Project landing page](../autodevops/img/guide_project_landing_page_v12_10.png)
1. On the **Create new cluster on GKE** tab, click **Sign in with Google**.
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment