Module: Spark::Build

Defined in:
lib/spark/build.rb

Constant Summary collapse

DEFAULT_SCALA_VERSION =
'2.10.4'
DEFAULT_CORE_VERSION =
'2.10'
DEFAULT_SPARK_VERSION =
'1.5.0'
DEFAULT_HADOOP_VERSION =
'1.0.4'
SBT =
'sbt/sbt'
SBT_DEPS =
'assemblyPackageDependency'
SBT_EXT =
'package'
SBT_CLEAN =
'clean'

Class Method Summary collapse

Class Method Details

.build(options = {}) ⇒ Object



14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
# File 'lib/spark/build.rb', line 14

def self.build(options={})
  scala_version      = options[:scala_version]      || DEFAULT_SCALA_VERSION
  spark_core_version = options[:spark_core_version] || DEFAULT_CORE_VERSION
  spark_version      = options[:spark_version]      || DEFAULT_SPARK_VERSION
  hadoop_version     = options[:hadoop_version]     || DEFAULT_HADOOP_VERSION
  target             = options[:target]             || Spark.target_dir
  only_ext           = options[:only_ext]           || false

  env = {
    'SCALA_VERSION' => scala_version,
    'SPARK_VERSION' => spark_version,
    'SPARK_CORE_VERSION' => spark_core_version,
    'HADOOP_VERSION' => hadoop_version,
    'TARGET_DIR' => target
  }

  cmd = [SBT]
  cmd << SBT_EXT
  cmd << SBT_DEPS unless only_ext
  cmd << SBT_CLEAN unless $DEBUG

  Dir.chdir(Spark.spark_ext_dir) do
    unless Kernel.system(env, cmd.join(' '))
      raise Spark::BuildError, 'Spark cannot be assembled.'
    end
  end
end