Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 7 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,4 +1,11 @@
# GoodData Ruby SDK Changelog
## 2.1.13
- FEATURE: TMA-1676 Support LCM release across domain
- FEATURE: TMA-1672 Support sync process with generic datasource
- FEATURE: MSF-17743 upgrade custom v2 for rollout brick
- BUGFIX: MSF-17975 Introduce gdcshare to lcm bricks
- BUGFIX: TMA-1673 Update params processing to accept dot and space

## 2.1.12
- FEATURE: MSF-17621 Apply patched version for activesupport to fix vulnerable issue
- CONFIG: SETI-4379 Add gdc-fossa configuration for gooddata-ruby
Expand Down
2 changes: 1 addition & 1 deletion SDK_VERSION
Original file line number Diff line number Diff line change
@@ -1 +1 @@
2.1.12
2.1.13
2 changes: 1 addition & 1 deletion VERSION
Original file line number Diff line number Diff line change
@@ -1 +1 @@
3.7.21
3.7.22
47 changes: 47 additions & 0 deletions lib/gooddata/helpers/data_source_helpers.rb
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
# encoding: UTF-8
# frozen_string_literal: true
#
# Copyright (c) 2010-2020 GoodData Corporation. All rights reserved.
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree.
# frozen_string_literal: false

module GoodData
module Helpers
class << self
# Get a data source information from server by id
#
# @param [String] data_source_id The data source ID
# @param [Object] client The Rest Client object
# @return [Hash] Returns Data source
def get_data_source_by_id(data_source_id, client)
unless data_source_id.blank?
uri = "/gdc/dataload/dataSources/#{data_source_id}"
client.get(uri)
end
end

# Verify to see if the data source exists in the domain using its alias
#
# @param [String] ds_alias The data source's alias
# @param [Object] client The Rest Client object
# @return [String] Id of the data source or failed with the reason
def verify_data_source_alias(ds_alias, client)
domain = client.connection.server.url
fail "The data source alias is empty, check your data source configuration." unless ds_alias

uri = "/gdc/dataload/dataSources/internal/availableAlias?alias=#{ds_alias[:alias]}"
res = client.get(uri)
fail "Unable to get information about the Data Source '#{ds_alias[:alias]}' in the domain '#{domain}'" unless res
fail "Unable to find the #{ds_alias[:type]} Data Source '#{ds_alias[:alias]}' in the domain '#{domain}'" if res['availableAlias']['available']

ds_type = res['availableAlias']['existingDataSource']['type']
if ds_type && ds_type != ds_alias[:type]
fail "Wrong Data Source type - the '#{ds_type}' type is expected but the Data Source '#{ds_alias[:alias]}' in the domain '#{domain}' has the '#{ds_alias[:type]}' type"
else
res['availableAlias']['existingDataSource']['id']
end
end
end
end
end
64 changes: 59 additions & 5 deletions lib/gooddata/models/project.rb
Original file line number Diff line number Diff line change
Expand Up @@ -261,20 +261,25 @@ def transfer_etl(client, from_project, to_project)
# @option ads_output_stage_uri Uri of the source output stage. It must be in the same domain as the target project.
def transfer_processes(from_project, to_project, options = {})
options = GoodData::Helpers.symbolize_keys(options)
aliases = {}
to_project_processes = to_project.processes
additional_hidden_params = options[:additional_hidden_params] || {}
result = from_project.processes.uniq(&:name).map do |process|
fail "The process name #{process.name} must be unique in transfered project #{to_project}" if to_project_processes.count { |p| p.name == process.name } > 1
fail "The process name #{process.name} must be unique in transferred project #{to_project}" if to_project_processes.count { |p| p.name == process.name } > 1
next if process.type == :dataload || process.add_v2_component?
collect_process_aliases(process.data, from_project.client, aliases)

to_process = to_project_processes.find { |p| p.name == process.name }

data_sources = GoodData::Helpers.symbolize_keys_recursively!(process.data_sources)
data_sources = replace_data_source_ids(data_sources, to_project.client, aliases)
to_process = if process.path
to_process.delete if to_process
Process.deploy_from_appstore(process.path, name: process.name, client: to_project.client, project: to_project, data_sources: process.data_sources)
Process.deploy_from_appstore(process.path, name: process.name, client: to_project.client, project: to_project, data_sources: data_sources)
elsif process.component
to_process.delete if to_process
process_hash = GoodData::Helpers::DeepMergeableHash[GoodData::Helpers.symbolize_keys(process.to_hash)].deep_merge(additional_hidden_params)
process_hash = replace_process_data_source_ids(process_hash, to_project.client, aliases)
Process.deploy_component(process_hash, project: to_project, client: to_project.client)
else
Dir.mktmpdir('etl_transfer') do |dir|
Expand All @@ -283,11 +288,10 @@ def transfer_processes(from_project, to_project, options = {})
File.open(filename, 'w') do |f|
f << process.download
end

if to_process
to_process.deploy(filename, type: process.type, name: process.name, data_sources: process.data_sources)
to_process.deploy(filename, type: process.type, name: process.name, data_sources: data_sources)
else
to_project.deploy_process(filename, type: process.type, name: process.name, data_sources: process.data_sources)
to_project.deploy_process(filename, type: process.type, name: process.name, data_sources: data_sources)
end
end
end
Expand Down Expand Up @@ -318,6 +322,56 @@ def transfer_processes(from_project, to_project, options = {})
result.compact
end

def collect_process_aliases(process_data, client, aliases)
data_sources = process_data.dig('process', 'dataSources')
unless data_sources.blank?
data_sources.map do |data_source|
get_data_source_alias(data_source['id'], client, aliases)
end
end
component = process_data.dig('process', 'component')
get_data_source_alias(component['configLocation']['dataSourceConfig']['id'], client, aliases) if component&.dig('configLocation', 'dataSourceConfig')
aliases
end

def get_data_source_alias(data_source_id, client, aliases)
unless aliases[data_source_id]
data_source = GoodData::Helpers.get_data_source_by_id(data_source_id, client)
if data_source&.dig('dataSource', 'alias')
aliases[data_source_id] = {
:type => get_data_source_type(data_source),
:alias => data_source['dataSource']['alias']
}
end
end
aliases[data_source_id]
end

def get_data_source_type(data_source_data)
data_source_data&.dig('dataSource', 'connectionInfo') ? data_source_data['dataSource']['connectionInfo'].first[0].upcase : ""
end

def replace_process_data_source_ids(process_data, client, aliases)
component = process_data.dig(:process, :component)
if component&.dig(:configLocation, :dataSourceConfig)
the_alias = aliases[component[:configLocation][:dataSourceConfig][:id]]
process_data[:process][:component][:configLocation][:dataSourceConfig][:id] = GoodData::Helpers.verify_data_source_alias(the_alias, client)
end
process_data[:process][:dataSources] = replace_data_source_ids(process_data[:process][:dataSources], client, aliases)
process_data
end

def replace_data_source_ids(data_sources, client, aliases)
array_data_sources = []
if data_sources && !data_sources.empty?
data_sources.map do |data_source|
new_id = GoodData::Helpers.verify_data_source_alias(aliases[data_source[:id]], client)
array_data_sources.push(:id => new_id)
end
end
array_data_sources
end

def transfer_user_groups(from_project, to_project)
from_project.user_groups.map do |ug|
# migrate groups
Expand Down
6 changes: 5 additions & 1 deletion spec/unit/actions/synchronize_processes_spec.rb
Original file line number Diff line number Diff line change
Expand Up @@ -67,6 +67,7 @@
allow(dest_project).to receive(:client).and_return(dest_client)
allow(src_project).to receive(:add).and_return(add)
allow(dest_project).to receive(:add).and_return(add)
allow(src_project).to receive(:client).and_return(development_client)

allow(add).to receive(:output_stage).and_return(output_stage)
allow(output_stage).to receive(:output_stage_prefix).and_return('prefix')
Expand All @@ -80,6 +81,8 @@
allow(process).to receive(:path).and_return(nil)
allow(process).to receive(:component).and_return(process_component)
allow(process).to receive(:to_hash).and_return(process_hash)
allow(process).to receive(:data).and_return(process_hash)
allow(process).to receive(:data_sources).and_return([])

allow(process).to receive(:project).and_return(src_project)
allow(process).to receive(:add_v2_component?).and_return(false)
Expand All @@ -93,7 +96,8 @@
secretConfig: {
key: 'val'
}
}
},
dataSources: []
}
},
client: dest_client,
Expand Down