# Maintainer: Tao-Yi Lee pkgname=apache_spark _pkgname=spark pkgver=1.6.2 _fullpkgname="${_pkgname}-${pkgver}-bin-hadoop2.6" pkgrel=1 pkgdesc="Apache Sparkā„¢ is a fast and general engine for large-scale data processing." arch=('i686' 'x86_64') url="http://spark.apache.org" license=('apache') groups=() depends=('java-runtime-common') provides=() conflicts=() replaces=() backup=() options=(!debug !strip) install= source=("http://mirror.nexcess.net/apache/$_pkgname/$_pkgname-$pkgver/$_fullpkgname.tgz" "https://gist.githubusercontent.com/thrasibule/87a092d244d560aa5d081ea6a0a0db4c/raw/ef32a1fcad2306fbccca33a36c1fb6ccb3dee405/kernel.json") noextract=("$_fullpkgname.tgz") md5sums=('604936f2bd8af999d0d624b370f5c4b1' 'e3137862727df13ccf1194248a4f50aa') PKGEXT='.pkg.tar.gz' package() { _etc_profiled=${pkgdir}/etc/profile.d mkdir -p "${pkgdir}/opt" mkdir -p $_etc_profiled # install spark msg "Spark will be installed under /opt/$_fullpkgname" tar -xf $_fullpkgname.tgz -C ${pkgdir}/opt pushd "${pkgdir}/opt/$_fullpkgname/python/lib/" > /dev/null rm py4j-0.9-src.zip PY4J_LICENSE.txt popd > /dev/null sed -i '/py4j/d' "${pkgdir}/opt/$_fullpkgname/bin/pyspark" echo "export PATH=\$PATH:/opt/$_fullpkgname/bin" >> \ "${_etc_profiled}/${pkgname}.sh" echo "export SPARK_HOME=/opt/$_fullpkgname" >> \ "${_etc_profiled}/${pkgname}.sh" sed -i "s|\$SPARK_HOME|${SPARK_HOME}|;s|\$pkgver|${pkgver}|" kernel.json mkdir -p "${pkgdir}/usr/share/jupyter/kernels/pyspark" cp kernel.json "${pkgdir}/usr/share/jupyter/kernels/pyspark" } # vim:set ts=2 sw=2 et: