summaryrefslogtreecommitdiff
path: root/var
diff options
context:
space:
mode:
authorAlfredo Adolfo Gimenez <alfredo.gimenez@gmail.com>2016-10-02 18:49:30 -0700
committerTodd Gamblin <tgamblin@llnl.gov>2016-10-02 18:49:30 -0700
commit1ea958221877d992ec733b8dfb3dea8e2466a159 (patch)
tree08c004815b2e6f468bde29a8d9192a9353a3c8f7 /var
parentb22956bab2646ae52e118c5791a58cf46003bef8 (diff)
downloadspack-1ea958221877d992ec733b8dfb3dea8e2466a159.tar.gz
spack-1ea958221877d992ec733b8dfb3dea8e2466a159.tar.bz2
spack-1ea958221877d992ec733b8dfb3dea8e2466a159.tar.xz
spack-1ea958221877d992ec733b8dfb3dea8e2466a159.zip
Added hadoop, spark, and variant spark+hadoop (#1833)
* Added hadoop, spark, and variant spark+hadoop * Docstrings, dependency types, urls, copyright * Flake8 fixes, link dependency for hadoop * Build type for spark, env problem setting JAVA_HOME
Diffstat (limited to 'var')
-rw-r--r--var/spack/repos/builtin/packages/hadoop/package.py53
-rw-r--r--var/spack/repos/builtin/packages/spark/package.py75
2 files changed, 128 insertions, 0 deletions
diff --git a/var/spack/repos/builtin/packages/hadoop/package.py b/var/spack/repos/builtin/packages/hadoop/package.py
new file mode 100644
index 0000000000..fdcac10335
--- /dev/null
+++ b/var/spack/repos/builtin/packages/hadoop/package.py
@@ -0,0 +1,53 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Alfredo Gimenez, gimenez1@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+
+from spack import *
+
+
+class Hadoop(Package):
+ """The Apache Hadoop software library is a framework that
+ allows for the distributed processing of large data sets
+ across clusters of computers using simple programming models.
+ """
+
+ homepage = "http://hadoop.apache.org/"
+ url = "http://mirrors.ocf.berkeley.edu/apache/hadoop/common/hadoop-2.6.4/hadoop-2.6.4.tar.gz"
+
+ version('2.6.4', '37019f13d7dcd819727be158440b9442')
+
+ depends_on('jdk', type='run')
+
+ def install(self, spec, prefix):
+
+ def install_dir(dirname):
+ install_tree(dirname, join_path(prefix, dirname))
+
+ install_dir('bin')
+ install_dir('etc')
+ install_dir('include')
+ install_dir('lib')
+ install_dir('libexec')
+ install_dir('sbin')
+ install_dir('share')
diff --git a/var/spack/repos/builtin/packages/spark/package.py b/var/spack/repos/builtin/packages/spark/package.py
new file mode 100644
index 0000000000..10b0ebed7f
--- /dev/null
+++ b/var/spack/repos/builtin/packages/spark/package.py
@@ -0,0 +1,75 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Alfredo Gimenez, gimenez1@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+
+from spack import *
+
+import shutil
+
+
+class Spark(Package):
+ """Apache Spark is a fast and general engine
+ for large-scale data processing.
+ """
+
+ homepage = "http://spark.apache.org"
+ url = "http://mirrors.ocf.berkeley.edu/apache/spark/spark-2.0.0/spark-2.0.0-bin-without-hadoop.tgz"
+
+ variant('hadoop', default=False,
+ description='Build with Hadoop')
+
+ depends_on('jdk', type=('build', 'run'))
+ depends_on('hadoop', when='+hadoop', type=('build', 'run'))
+
+ version('2.0.0', '8a5307d973da6949a385aefb6ff747bb')
+ version('1.6.2', '304394fbe2899211217f0cd9e9b2b5d9')
+ version('1.6.1', 'fcf4961649f15af1fea78c882e65b001')
+
+ def install(self, spec, prefix):
+
+ def install_dir(dirname):
+ install_tree(dirname, join_path(prefix, dirname))
+
+ install_dir('bin')
+ install_dir('conf')
+ install_dir('jars')
+ install_dir('python')
+ install_dir('R')
+ install_dir('sbin')
+ install_dir('yarn')
+
+ # required for spark to recognize binary distribution
+ shutil.copy('RELEASE', prefix)
+
+ @when('+hadoop')
+ def setup_environment(self, spack_env, run_env):
+
+ env['JAVA_HOME'] = self.spec['jdk'].prefix
+ # spack_env.set('JAVA_HOME', self.spec['jdk'].prefix)
+
+ hadoop_bin_path = join_path(self.spec['hadoop'].prefix.bin, 'hadoop')
+ hadoop_bin = Executable(hadoop_bin_path)
+ hadoop_classpath = hadoop_bin('classpath', return_output=True)
+
+ run_env.set('SPARK_DIST_CLASSPATH', hadoop_classpath)