Read about our upcoming Code of Conduct on this issue

Commit aad0ae71 authored by James Lopez's avatar James Lopez
Browse files

squashed - fixed label and milestone association problems, updated specs and...

squashed - fixed label and milestone association problems, updated specs and refactored reader class a bit
parent 706f60a48bfc
......@@ -41,6 +41,7 @@ v 8.10.3 (unreleased)
- Fix hooks missing on imported GitLab projects
- Properly abort a merge when merge conflicts occur
- Ignore invalid IPs in X-Forwarded-For when trusted proxies are configured.
- Fix Import/Export issue importing milestones and labels not associated properly
v 8.10.2
- User can now search branches by name. !5144
......
class LabelLink < ActiveRecord::Base
include Importable
belongs_to :target, polymorphic: true
belongs_to :label
validates :target, presence: true
validates :label, presence: true
validates :target, presence: true, unless: :importing?
validates :label, presence: true, unless: :importing?
end
......@@ -1253,6 +1253,16 @@ def authorized_for_user?(user, min_access_level = nil)
authorized_for_user_by_shared_projects?(user, min_access_level)
end
def append_or_update_attribute(name, value)
old_values = public_send(name.to_s)
if Project.reflect_on_association(name).try(:macro) == :has_many && old_values.any?
update_attribute(name, old_values + value)
else
update_attribute(name, value)
end
end
private
def authorized_for_user_by_group?(user, min_access_level)
......
......@@ -2,7 +2,7 @@ module Gitlab
module ImportExport
extend self
VERSION = '0.1.2'
VERSION = '0.1.3'
FILENAME_LIMIT = 50
def export_path(relative_path:)
......
......@@ -3,11 +3,12 @@ project_tree:
- issues:
- :events
- notes:
- :author
- :events
- :labels
- milestones:
- :events
- :author
- :events
- label_links:
- :label
- milestone:
- :events
- snippets:
- notes:
:author
......@@ -20,6 +21,10 @@ project_tree:
- :events
- :merge_request_diff
- :events
- label_links:
- :label
- milestone:
- :events
- pipelines:
- notes:
- :author
......@@ -31,6 +36,9 @@ project_tree:
- :services
- :hooks
- :protected_branches
- :labels
- milestones:
- :events
# Only include the following attributes for the models specified.
included_attributes:
......@@ -55,6 +63,10 @@ excluded_attributes:
- :expired_at
merge_request_diff:
- :st_diffs
issues:
- :milestone_id
merge_requests:
- :milestone_id
methods:
statuses:
......
module Gitlab
module ImportExport
# Generates a hash that conforms with http://apidock.com/rails/Hash/to_json
# and its peculiar options.
class JsonHashBuilder
def self.build(model_objects, attributes_finder)
new(model_objects, attributes_finder).build
end
def initialize(model_objects, attributes_finder)
@model_objects = model_objects
@attributes_finder = attributes_finder
end
def build
process_model_objects(@model_objects)
end
private
# Called when the model is actually a hash containing other relations (more models)
# Returns the config in the right format for calling +to_json+
#
# +model_object_hash+ - A model relationship such as:
# {:merge_requests=>[:merge_request_diff, :notes]}
def process_model_objects(model_object_hash)
json_config_hash = {}
current_key = model_object_hash.keys.first
model_object_hash.values.flatten.each do |model_object|
@attributes_finder.parse(current_key) { |hash| json_config_hash[current_key] ||= hash }
handle_model_object(current_key, model_object, json_config_hash)
end
json_config_hash
end
# Creates or adds to an existing hash an individual model or list
#
# +current_key+ main model that will be a key in the hash
# +model_object+ model or list of models to include in the hash
# +json_config_hash+ the original hash containing the root model
def handle_model_object(current_key, model_object, json_config_hash)
model_or_sub_model = model_object.is_a?(Hash) ? process_model_objects(model_object) : model_object
if json_config_hash[current_key]
add_model_value(current_key, model_or_sub_model, json_config_hash)
else
create_model_value(current_key, model_or_sub_model, json_config_hash)
end
end
# Constructs a new hash that will hold the configuration for that particular object
# It may include exceptions or other attribute detail configuration, parsed by +@attributes_finder+
#
# +current_key+ main model that will be a key in the hash
# +value+ existing model to be included in the hash
# +json_config_hash+ the original hash containing the root model
def create_model_value(current_key, value, json_config_hash)
parsed_hash = { include: value }
parse_hash(value, parsed_hash)
json_config_hash[current_key] = parsed_hash
end
# Calls attributes finder to parse the hash and add any attributes to it
#
# +value+ existing model to be included in the hash
# +parsed_hash+ the original hash
def parse_hash(value, parsed_hash)
@attributes_finder.parse(value) do |hash|
parsed_hash = { include: hash_or_merge(value, hash) }
end
end
# Adds new model configuration to an existing hash with key +current_key+
# It may include exceptions or other attribute detail configuration, parsed by +@attributes_finder+
#
# +current_key+ main model that will be a key in the hash
# +value+ existing model to be included in the hash
# +json_config_hash+ the original hash containing the root model
def add_model_value(current_key, value, json_config_hash)
@attributes_finder.parse(value) { |hash| value = { value => hash } }
add_to_array(current_key, json_config_hash, value)
end
# Adds new model configuration to an existing hash with key +current_key+
# it creates a new array if it was previously a single value
#
# +current_key+ main model that will be a key in the hash
# +value+ existing model to be included in the hash
# +json_config_hash+ the original hash containing the root model
def add_to_array(current_key, json_config_hash, value)
old_values = json_config_hash[current_key][:include]
json_config_hash[current_key][:include] = ([old_values] + [value]).compact.flatten
end
# Construct a new hash or merge with an existing one a model configuration
# This is to fulfil +to_json+ requirements.
#
# +hash+ hash containing configuration generated mainly from +@attributes_finder+
# +value+ existing model to be included in the hash
def hash_or_merge(value, hash)
value.is_a?(Hash) ? value.merge(hash) : { value => hash }
end
end
end
end
......@@ -47,7 +47,7 @@ def create_relations
relation_key = relation.is_a?(Hash) ? relation.keys.first : relation
relation_hash = create_relation(relation_key, @tree_hash[relation_key.to_s])
saved << restored_project.update_attribute(relation_key, relation_hash)
saved << restored_project.append_or_update_attribute(relation_key, relation_hash)
end
saved.all?
end
......@@ -78,7 +78,7 @@ def create_sub_relations(relation, tree_hash)
relation_key = relation.keys.first.to_s
return if tree_hash[relation_key].blank?
tree_hash[relation_key].each do |relation_item|
[tree_hash[relation_key]].flatten.each do |relation_item|
relation.values.flatten.each do |sub_relation|
# We just use author to get the user ID, do not attempt to create an instance.
next if sub_relation == :author
......
......@@ -29,87 +29,12 @@ def project_tree
def build_hash(model_list)
model_list.map do |model_objects|
if model_objects.is_a?(Hash)
build_json_config_hash(model_objects)
Gitlab::ImportExport::JsonHashBuilder.build(model_objects, @attributes_finder)
else
@attributes_finder.find(model_objects)
end
end
end
# Called when the model is actually a hash containing other relations (more models)
# Returns the config in the right format for calling +to_json+
# +model_object_hash+ - A model relationship such as:
# {:merge_requests=>[:merge_request_diff, :notes]}
def build_json_config_hash(model_object_hash)
@json_config_hash = {}
model_object_hash.values.flatten.each do |model_object|
current_key = model_object_hash.keys.first
@attributes_finder.parse(current_key) { |hash| @json_config_hash[current_key] ||= hash }
handle_model_object(current_key, model_object)
process_sub_model(current_key, model_object) if model_object.is_a?(Hash)
end
@json_config_hash
end
# If the model is a hash, process the sub_models, which could also be hashes
# If there is a list, add to an existing array, otherwise use hash syntax
# +current_key+ main model that will be a key in the hash
# +model_object+ model or list of models to include in the hash
def process_sub_model(current_key, model_object)
sub_model_json = build_json_config_hash(model_object).dup
@json_config_hash.slice!(current_key)
if @json_config_hash[current_key] && @json_config_hash[current_key][:include]
@json_config_hash[current_key][:include] << sub_model_json
else
@json_config_hash[current_key] = { include: sub_model_json }
end
end
# Creates or adds to an existing hash an individual model or list
# +current_key+ main model that will be a key in the hash
# +model_object+ model or list of models to include in the hash
def handle_model_object(current_key, model_object)
if @json_config_hash[current_key]
add_model_value(current_key, model_object)
else
create_model_value(current_key, model_object)
end
end
# Constructs a new hash that will hold the configuration for that particular object
# It may include exceptions or other attribute detail configuration, parsed by +@attributes_finder+
# +current_key+ main model that will be a key in the hash
# +value+ existing model to be included in the hash
def create_model_value(current_key, value)
parsed_hash = { include: value }
@attributes_finder.parse(value) do |hash|
parsed_hash = { include: hash_or_merge(value, hash) }
end
@json_config_hash[current_key] = parsed_hash
end
# Adds new model configuration to an existing hash with key +current_key+
# It may include exceptions or other attribute detail configuration, parsed by +@attributes_finder+
# +current_key+ main model that will be a key in the hash
# +value+ existing model to be included in the hash
def add_model_value(current_key, value)
@attributes_finder.parse(value) { |hash| value = { value => hash } }
old_values = @json_config_hash[current_key][:include]
@json_config_hash[current_key][:include] = ([old_values] + [value]).compact.flatten
end
# Construct a new hash or merge with an existing one a model configuration
# This is to fulfil +to_json+ requirements.
# +value+ existing model to be included in the hash
# +hash+ hash containing configuration generated mainly from +@attributes_finder+
def hash_or_merge(value, hash)
value.is_a?(Hash) ? value.merge(hash) : { value => hash }
end
end
end
end
......@@ -13,6 +13,10 @@ class RelationFactory
BUILD_MODELS = %w[Ci::Build commit_status].freeze
IMPORTED_OBJECT_MAX_RETRIES = 5.freeze
EXISTING_OBJECT_CHECK = %i[milestone milestones label labels].freeze
def self.create(*args)
new(*args).create
end
......@@ -22,24 +26,35 @@ def initialize(relation_sym:, relation_hash:, members_mapper:, user:)
@relation_hash = relation_hash.except('id', 'noteable_id')
@members_mapper = members_mapper
@user = user
@imported_object_retries = 0
end
# Creates an object from an actual model with name "relation_sym" with params from
# the relation_hash, updating references with new object IDs, mapping users using
# the "members_mapper" object, also updating notes if required.
def create
set_note_author if @relation_name == :notes
setup_models
generate_imported_object
end
private
def setup_models
if @relation_name == :notes
set_note_author
# TODO: note attatchments not supported yet
@relation_hash['attachment'] = nil
end
update_user_references
update_project_references
reset_ci_tokens if @relation_name == 'Ci::Trigger'
@relation_hash['data'].deep_symbolize_keys! if @relation_name == :events && @relation_hash['data']
set_st_diffs if @relation_name == :merge_request_diff
generate_imported_object
end
private
def update_user_references
USER_REFERENCES.each do |reference|
if @relation_hash[reference]
......@@ -112,10 +127,14 @@ def relation_class
end
def imported_object
imported_object = relation_class.new(parsed_relation_hash)
yield(imported_object) if block_given?
imported_object.importing = true if imported_object.respond_to?(:importing)
imported_object
yield(existing_or_new_object) if block_given?
existing_or_new_object.importing = true if existing_or_new_object.respond_to?(:importing)
existing_or_new_object
rescue ActiveRecord::RecordNotUnique
# as the operation is not atomic, retry in the unlikely scenario an INSERT is
# performed on the same object between the SELECT and the INSERT
@imported_object_retries += 1
retry if @imported_object_retries < IMPORTED_OBJECT_MAX_RETRIES
end
def update_note_for_missing_author(author_name)
......@@ -134,6 +153,20 @@ def parsed_relation_hash
def set_st_diffs
@relation_hash['st_diffs'] = @relation_hash.delete('utf8_st_diffs')
end
def existing_or_new_object
# Only find existing records to avoid mapping tables such as milestones
# Otherwise always create the record, skipping the extra SELECT clause.
@existing_or_new_object ||= begin
if EXISTING_OBJECT_CHECK.include?(@relation_name)
existing_object = relation_class.find_or_initialize_by(parsed_relation_hash.slice('title', 'project_id'))
existing_object.assign_attributes(parsed_relation_hash)
existing_object
else
relation_class.new(parsed_relation_hash)
end
end
end
end
end
end
......@@ -18,7 +18,6 @@
"position": 0,
"branch_name": null,
"description": "Aliquam enim illo et possimus.",
"milestone_id": 18,
"state": "opened",
"iid": 10,
"updated_by_id": null,
......@@ -27,6 +26,52 @@
"due_date": null,
"moved_to_id": null,
"test_ee_field": "test",
"milestone": {
"id": 1,
"title": "v0.0",
"project_id": 8,
"description": "test milestone",
"due_date": null,
"created_at": "2016-06-14T15:02:04.415Z",
"updated_at": "2016-06-14T15:02:04.415Z",
"state": "active",
"iid": 1,
"events": [
{
"id": 487,
"target_type": "Milestone",
"target_id": 1,
"title": null,
"data": null,
"project_id": 46,
"created_at": "2016-06-14T15:02:04.418Z",
"updated_at": "2016-06-14T15:02:04.418Z",
"action": 1,
"author_id": 18
}
]
},
"label_links": [
{
"id": 2,
"label_id": 2,
"target_id": 3,
"target_type": "Issue",
"created_at": "2016-07-22T08:57:02.840Z",
"updated_at": "2016-07-22T08:57:02.840Z",
"label": {
"id": 2,
"title": "test2",
"color": "#428bca",
"project_id": 8,
"created_at": "2016-07-22T08:55:44.161Z",
"updated_at": "2016-07-22T08:55:44.161Z",
"template": false,
"description": "",
"priority": null
}
}
],
"notes": [
{
"id": 351,
......@@ -233,7 +278,6 @@
"position": 0,
"branch_name": null,
"description": "Voluptate vel reprehenderit facilis omnis voluptas magnam tenetur.",
"milestone_id": 16,
"state": "opened",
"iid": 9,
"updated_by_id": null,
......@@ -447,7 +491,6 @@
"position": 0,
"branch_name": null,
"description": "Ea recusandae neque autem tempora.",
"milestone_id": 16,
"state": "closed",
"iid": 8,
"updated_by_id": null,
......@@ -661,7 +704,6 @@
"position": 0,
"branch_name": null,
"description": "Maiores architecto quos in dolorem.",
"milestone_id": 17,
"state": "opened",
"iid": 7,
"updated_by_id": null,
......@@ -875,7 +917,6 @@
"position": 0,
"branch_name": null,
"description": "Ut aut ut et tenetur velit aut id modi.",
"milestone_id": 16,
"state": "opened",
"iid": 6,
"updated_by_id": null,
......@@ -1089,7 +1130,6 @@
"position": 0,
"branch_name": null,
"description": "Dicta nisi nihil non ipsa velit.",
"milestone_id": 20,
"state": "closed",
"iid": 5,
"updated_by_id": null,
......@@ -1303,7 +1343,6 @@
"position": 0,
"branch_name": null,
"description": "Ut et explicabo vel voluptatem consequuntur ut sed.",
"milestone_id": 19,
"state": "closed",
"iid": 4,
"updated_by_id": null,
......@@ -1517,7 +1556,6 @@
"position": 0,
"branch_name": null,
"description": "Non asperiores velit accusantium voluptate.",
"milestone_id": 18,
"state": "closed",
"iid": 3,
"updated_by_id": null,
......@@ -1731,7 +1769,6 @@
"position": 0,
"branch_name": null,
"description": "Molestiae corporis magnam et fugit aliquid nulla quia.",
"milestone_id": 17,
"state": "closed",
"iid": 2,
"updated_by_id": null,
......@@ -1945,7 +1982,6 @@
"position": 0,
"branch_name": null,
"description": "Quod ad architecto qui est sed quia.",
"milestone_id": 20,
"state": "closed",
"iid": 1,
"updated_by_id": null,
......@@ -2259,117 +2295,6 @@
"author_id": 25
}
]
},
{
"id": 18,
"title": "v2.0",
"project_id": 5,
"description": "Error dolorem rerum aut nulla.",
"due_date": null,
"created_at": "2016-06-14T15:02:04.576Z",
"updated_at": "2016-06-14T15:02:04.576Z",
"state": "active",
"iid": 3,
"events": [
{
"id": 242,
"target_type": "Milestone",
"target_id": 18,
"title": null,
"data": null,
"project_id": 36,
"created_at": "2016-06-14T15:02:04.579Z",
"updated_at": "2016-06-14T15:02:04.579Z",
"action": 1,
"author_id": 1
},
{
"id": 58,
"target_type": "Milestone",
"target_id": 18,
"title": null,
"data": null,
"project_id": 5,
"created_at": "2016-06-14T15:02:04.579Z",
"updated_at": "2016-06-14T15:02:04.579Z",
"action": 1,
"author_id": 22
}
]
},
{
"id": 17,
"title": "v1.0",
"project_id": 5,
"description": "Molestiae perspiciatis voluptates doloremque commodi veniam consequatur.",
"due_date": null,
"created_at": "2016-06-14T15:02:04.569Z",
"updated_at": "2016-06-14T15:02:04.569Z",
"state": "active",
"iid": 2,
"events": [
{
"id": 243,
"target_type": "Milestone",
"target_id": 17,
"title": null,
"data": null,
"project_id": 36,
"created_at": "2016-06-14T15:02:04.570Z",
"updated_at": "2016-06-14T15:02:04.570Z",
"action": 1,
"author_id": 1
},
{
"id": 57,
"target_type": "Milestone",
"target_id": 17,
"title": null,
"data": null,
"project_id": 5,
"created_at": "2016-06-14T15:02:04.570Z",
"updated_at": "2016-06-14T15:02:04.570Z",