303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
|
# File 'lib/gooddata/models/project.rb', line 303
def transfer_processes(from_project, to_project, options = {})
options = GoodData::Helpers.symbolize_keys(options)
aliases = {}
to_project_processes = to_project.processes
additional_hidden_params = options[:additional_hidden_params] || {}
result = from_project.processes.uniq(&:name).map do |process|
fail "The process name #{process.name} must be unique in transferred project #{to_project}" if to_project_processes.count { |p| p.name == process.name } > 1
next if process.type == :dataload || process.add_v2_component?
collect_process_aliases(process.data, from_project.client, aliases)
to_process = to_project_processes.find { |p| p.name == process.name }
data_sources = GoodData::Helpers.symbolize_keys_recursively!(process.data_sources)
data_sources = replace_data_source_ids(data_sources, to_project.client, aliases)
to_process = if process.path
to_process.delete if to_process
Process.deploy_from_appstore(process.path, name: process.name, client: to_project.client, project: to_project, data_sources: data_sources)
elsif process.component
to_process.delete if to_process
process_hash = GoodData::Helpers::DeepMergeableHash[GoodData::Helpers.symbolize_keys(process.to_hash)].deep_merge(additional_hidden_params)
process_hash = replace_process_data_source_ids(process_hash, to_project.client, aliases)
Process.deploy_component(process_hash, project: to_project, client: to_project.client)
else
Dir.mktmpdir('etl_transfer') do |dir|
dir = Pathname(dir)
filename = dir + 'process.zip'
File.open(filename, 'w') do |f|
f << process.download
end
if to_process
to_process.deploy(filename, type: process.type, name: process.name, data_sources: data_sources)
else
to_project.deploy_process(filename, type: process.type, name: process.name, data_sources: data_sources)
end
end
end
{
from: from_project.pid,
to: to_project.pid,
name: process.name,
status: to_process ? 'successful' : 'failed'
}
end
transfer_output_stage(from_project, to_project, options)
result << {
from: from_project.pid,
to: to_project.pid,
name: 'Automated Data Distribution',
status: 'successful'
}
res = (from_project.processes + to_project.processes).map { |p| [p, p.name, p.type] }
res.group_by { |x| [x[1], x[2]] }
.select { |_, procs| procs.length == 1 && procs[2] != :dataload }
.reject { |_, procs| procs.first.first.add_v2_component? }
.flat_map { |_, procs| procs.select { |p| p[0].project.pid == to_project.pid }.map { |p| p[0] } }
.peach(&:delete)
result.compact
end
|