spark: activate R backend

This commit is contained in:
Christine Koppelt 2017-03-26 18:17:17 +02:00
parent 6c17ad677c
commit 258e3524e2

View file

@ -1,4 +1,5 @@
{ stdenv, fetchzip, makeWrapper, jre, pythonPackages
, RSupport? true, R
, mesosSupport ? true, mesos
, version
}:
@ -30,6 +31,7 @@ stdenv.mkDerivation rec {
};
buildInputs = [ makeWrapper jre pythonPackages.python pythonPackages.numpy ]
++ optional RSupport R
++ optional mesosSupport mesos;
untarDir = "${name}-bin-${hadoopVersion}";
@ -46,6 +48,9 @@ stdenv.mkDerivation rec {
export SPARK_HOME="$out/lib/${untarDir}"
export PYSPARK_PYTHON="${pythonPackages.python}/bin/${pythonPackages.python.executable}"
export PYTHONPATH="\$PYTHONPATH:$PYTHONPATH"
${optionalString RSupport
''export SPARKR_R_SHELL="${R}/bin/R"
export PATH=$PATH:"${R}/bin/R"''}
${optionalString mesosSupport
''export MESOS_NATIVE_LIBRARY="$MESOS_NATIVE_LIBRARY"''}
EOF
@ -57,7 +62,7 @@ stdenv.mkDerivation rec {
'';
meta = {
description = "Lightning-fast cluster computing";
description = "Apache Spark is a fast and general engine for large-scale data processing";
homepage = "http://spark.apache.org";
license = stdenv.lib.licenses.asl20;
platforms = stdenv.lib.platforms.all;