14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
|
# File 'lib/spark/build.rb', line 14
def self.build(options={})
scala_version = options[:scala_version] || DEFAULT_SCALA_VERSION
spark_core_version = options[:spark_core_version] || DEFAULT_CORE_VERSION
spark_version = options[:spark_version] || DEFAULT_SPARK_VERSION
hadoop_version = options[:hadoop_version] || DEFAULT_HADOOP_VERSION
target = options[:target] || Spark.target_dir
only_ext = options[:only_ext] || false
env = {
'SCALA_VERSION' => scala_version,
'SPARK_VERSION' => spark_version,
'SPARK_CORE_VERSION' => spark_core_version,
'HADOOP_VERSION' => hadoop_version,
'TARGET_DIR' => target
}
cmd = [SBT]
cmd << SBT_EXT
cmd << SBT_DEPS unless only_ext
cmd << SBT_CLEAN unless $DEBUG
Dir.chdir(Spark.spark_ext_dir) do
unless Kernel.system(env, cmd.join(' '))
raise Spark::BuildError, 'Spark cannot be assembled.'
end
end
end
|