Index: ports/UIDs =================================================================== RCS file: /home/ncvs/ports/UIDs,v retrieving revision 1.181 diff -u -r1.181 UIDs --- ports/UIDs 31 Oct 2011 14:24:04 -0000 1.181 +++ ports/UIDs 29 Nov 2011 15:10:38 -0000 @@ -227,4 +227,5 @@ openerpd:*:951:951::0:0:Openerpd user:/nonexistent:/sbin/nologin bitten-slave:*:952:952:daemon:0:0:Bitten slave user:/var/lib/bitten-slave:/sbin/nologin _neubot:*:953:953::0:0:neubot daemon:/nonexistent:/sbin/nologin +hadoop:*:954:954::0:0:& user:/nonexistent:/bin/sh nobody:*:65534:65534::0:0:Unprivileged user:/nonexistent:/usr/sbin/nologin Index: ports/devel/Makefile =================================================================== RCS file: /home/ncvs/ports/devel/Makefile,v retrieving revision 1.4655 diff -u -r1.4655 Makefile --- ports/devel/Makefile 9 Nov 2011 01:31:59 -0000 1.4655 +++ ports/devel/Makefile 10 Nov 2011 15:20:27 -0000 @@ -547,6 +547,7 @@ SUBDIR += hachoir-core SUBDIR += hachoir-parser SUBDIR += hachoir-regex + SUBDIR += hadoop SUBDIR += happydoc SUBDIR += hapy SUBDIR += hcs12mem Index: ports/databases/Makefile =================================================================== RCS file: /home/ncvs/ports/databases/Makefile,v retrieving revision 1.1050 diff -u -r1.1050 Makefile --- ports/databases/Makefile 5 Nov 2011 18:41:27 -0000 1.1050 +++ ports/databases/Makefile 10 Nov 2011 15:20:47 -0000 @@ -104,6 +104,7 @@ SUBDIR += gtksql SUBDIR += hamsterdb SUBDIR += hiredis + SUBDIR += hive SUBDIR += hk_classes SUBDIR += hs-mysql SUBDIR += hsqldb Index: ports/textproc/Makefile =================================================================== RCS file: /home/ncvs/ports/textproc/Makefile,v retrieving revision 1.1678 diff -u -r1.1678 Makefile --- ports/textproc/Makefile 5 Nov 2011 17:09:54 -0000 1.1678 +++ ports/textproc/Makefile 10 Nov 2011 15:21:18 -0000 @@ -1045,6 +1045,7 @@ SUBDIR += pecl-yaml SUBDIR += perl2html SUBDIR += permute + SUBDIR += pig SUBDIR += php4-ctype SUBDIR += php4-domxml SUBDIR += php4-pspell Index: ports/devel/hadoop/Makefile =================================================================== RCS file: ports/devel/hadoop/Makefile diff -N ports/devel/hadoop/Makefile --- /dev/null 1 Jan 1970 00:00:00 -0000 +++ ports/devel/hadoop/Makefile 29 Nov 2011 15:11:03 -0000 @@ -0,0 +1,143 @@ +# New ports collection makefile for: hadoop +# Date created: 2011-08-07 +# Whom: Clement Laforet +# +# $FreeBSD$ + +PORTNAME= hadoop +PORTVERSION= 0.20.203.0 +CATEGORIES= devel java +MASTER_SITES= ${MASTER_SITE_APACHE} +MASTER_SITE_SUBDIR= ${PORTNAME}/core/${PORTNAME}-${PORTVERSION} +DISTNAME= ${PORTNAME}-${PORTVERSION}rc1 +DIST_SUBDIR= hadoop + +MAINTAINER= clement@FreeBSD.org +COMMENT= Apache Map/Reduce framework + +LICENSE= ASL +LICENSE_FILE= ${WRKSRC}/LICENSE.txt + +USE_JAVA= yes +JAVA_VERSION= 1.6 +USE_ANT= yes +ONLY_FOR_ARCHS= amd64 i386 + +WRKSRC= ${WRKDIR}/${PORTNAME}-${PORTVERSION} + +BUILD_DEPENDS+= ${LOCALBASE}/bin/gmake:${PORTSDIR}/devel/gmake +RUN_DEPENDS+= ${LOCALBASE}/bin/bash:${PORTSDIR}/shells/bash + +_HADOOP_DIR= share/${PORTNAME} +_HADOOP_ETC= etc/${PORTNAME} +HADOOP_HOME= ${PREFIX}/${_HADOOP_DIR} +HADOOP_ETC= ${PREFIX}/${_HADOOP_ETC} + +HADOOP_LOGDIR= /var/log/${PORTNAME} +HADOOP_RUNDIR= /var/run/${PORTNAME} +HADOOP_BIN= ${PREFIX}/bin/${PORTNAME} + +HADOOP_USER?= hadoop +HADOOP_GROUP?= ${HADOOP_USER} +HADOOP_UID?= 954 +HADOOP_GID?= ${HADOOP_UID} + +SUB_FILES+= pkg-install pkg-deinstall hadoop 000.java_home.env +USE_RC_SUBR+= tasktracker jobtracker datanode namenode secondarynamenode + +PLIST_SUB+= PORTVERSION="${PORTVERSION}"\ + HADOOP_HOME="${_HADOOP_DIR}" \ + HADOOP_ETC="${_HADOOP_ETC}" +SUB_LIST= HADOOP_LOGDIR="${HADOOP_LOGDIR}" \ + HADOOP_RUNDIR="${HADOOP_RUNDIR}" \ + HADOOP_USER="${HADOOP_USER}" \ + HADOOP_GROUP="${HADOOP_GROUP}" \ + HADOOP_UID="${HADOOP_UID}" \ + HADOOP_GID="${HADOOP_GID}" \ + HADOOP_HOME="${HADOOP_HOME}" \ + HADOOP_ETC="${HADOOP_ETC}" \ + JAVA_HOME="${JAVA_HOME}" + +PORTDOCS= * +FIX_PERMS= src/c++/pipes/install-sh src/c++/utils/install-sh src/c++/libhdfs/install-sh \ + src/c++/libhdfs/tests/test-libhdfs.sh + +FIX_DOCS= docs/cn/skin/css docs/cn/skin/scripts docs/cn/skin/translations \ + docs/jdiff docs/skin/css docs/skin/scripts docs/skin/translations + +DOC= CHANGES.txt LICENSE.txt NOTICE.txt README.txt + +DEFAULTS= src/core/core-default.xml src/hdfs/hdfs-default.xml src/mapred/mapred-default.xml hadoop-examples-${PORTVERSION}.jar + +DIST= bin contrib hadoop-ant-${PORTVERSION}.jar hadoop-core-${PORTVERSION}.jar \ + hadoop-test-${PORTVERSION}.jar hadoop-tools-${PORTVERSION}.jar lib webapps +CONF= capacity-scheduler.xml configuration.xsl core-site.xml hadoop-env.sh hadoop-metrics2.properties \ + hadoop-policy.xml hdfs-site.xml log4j.properties mapred-queue-acls.xml mapred-site.xml taskcontroller.cfg + +MAKE_ARGS= -Dbuild.classes=${WRKSRC}/hadoop-core-${PORTVERSION}.jar -Dversion=${PORTVERSION} -Dcompile.native=true \ + -Dcompile.c++=true -Dmake.cmd=${GMAKE} -Dlibhdfs=1 -Dlibrecordio=true -Dskip.record-parser=true +ALL_TARGET= compile-core-native compile-c++ compile-c++-libhdfs compile-c++-pipes compile-c++-utils + +.include + +.if ${ARCH} == "amd64" +_HADOOP_ARCH= FreeBSD-amd64-64 +.else +_HADOOP_ARCH= FreeBSD-i386-32 +.endif +PLIST_SUB+= HADOOP_ARCH=${_HADOOP_ARCH} + +pre-build: +.for f in ${FIX_PERMS} + @${CHMOD} +x ${WRKSRC}/${f} +.endfor +.for d in ${FIX_DOCS} + @${TOUCH} ${WRKSRC}/${d}/.empty +.endfor + +#do-build: +# @cd ${WRKSRC}; \ +# ${ANT} -Dbuild.classes=${WRKSRC}/hadoop-core-${PORTVERSION}.jar -Dversion=${PORTVERSION} -Dcompile.native=true \ +# -Dcompile.c++=true -Dmake.cmd=${GMAKE} compile-core-native compile-c++ compile-c++-libhdfs compile-c++-pipes \ +# compile-c++-utils -Dlibhdfs=1 -Dlibrecordio=true -Dskip.record-parser=true + +post-build: + @cd ${WRKSRC} ;${ANT} FreeBSD-dist + @${TAR} -cf - -C ${WRKSRC}/build/c++/ ${_HADOOP_ARCH}/lib ${_HADOOP_ARCH}/include | ${TAR} xf - -C ${WRKSRC}/c++/ + +pre-install: + @${SETENV} PKG_PREFIX=${PREFIX} ${SH} ${PKGINSTALL} ${PKGNAME} PRE-INSTALL + +do-install: + @${MKDIR} ${HADOOP_HOME} + @${TAR} -cf - -C ${WRKSRC}/ ${DIST} | ${TAR} -xf - -C ${HADOOP_HOME} + + @${MKDIR} ${HADOOP_ETC} ${HADOOP_ETC}/envvars.d + @${INSTALL_DATA} ${WRKDIR}/000.java_home.env ${HADOOP_ETC}/envvars.d/ + @${ECHO} "export HADOOP_HOME=${HADOOP_HOME}" > ${HADOOP_ETC}/envvars.d/001.hadoop_home.env + @${ECHO} "export HADOOP_CONF_DIR=${HADOOP_ETC}" > ${HADOOP_ETC}/envvars.d/002.hadoop_conf.env + @${ECHO} "export HADOOP_LOG_DIR=${HADOOP_LOGDIR}" > ${HADOOP_ETC}/envvars.d/003.hadoop_log.env + @${MKDIR} ${EXAMPLESDIR} + @${TAR} -cf - -C ${WRKSRC}/c++/${_HADOOP_ARCH}/ include lib| ${TAR} xf - -C ${PREFIX} + +.for f in ${DEFAULTS} + @${INSTALL_DATA} ${WRKSRC}/${f} ${EXAMPLESDIR} +.endfor + +.if !defined(NOPORTDOCS) + @${MKDIR} ${DOCSDIR} + @${TAR} -cf - -C ${WRKSRC}/ docs | ${TAR} -xf - -C ${DOCSDIR} +.for f in ${DOC} + @${INSTALL_DATA} ${WRKSRC}/${f} ${DOCSDIR} +.endfor +.endif + @${INSTALL_SCRIPT} ${WRKDIR}/hadoop ${PREFIX}/bin/ + @cd ${WRKSRC}; ${COPYTREE_SHARE} conf ${EXAMPLESDIR} +.for f in ${CONF} + @[ -f ${HADOOP_ETC}/${f} ] || ${INSTALL_DATA} ${EXAMPLESDIR}/conf/${f} ${HADOOP_ETC} +.endfor + +post-install: + @${SETENV} PKG_PREFIX=${PREFIX} ${SH} ${PKGINSTALL} ${PKGNAME} POST-INSTALL + +.include Index: ports/devel/hadoop/SED =================================================================== RCS file: ports/devel/hadoop/SED diff -N ports/devel/hadoop/SED --- /dev/null 1 Jan 1970 00:00:00 -0000 +++ ports/devel/hadoop/SED 10 Nov 2011 10:17:23 -0000 @@ -0,0 +1 @@ +for i in jobtracker datanode namenode secondarynamenode ; do sed "s/tasktracker/${i}/g" tasktracker.in > ${i}.in ; done Index: ports/devel/hadoop/distinfo =================================================================== RCS file: ports/devel/hadoop/distinfo diff -N ports/devel/hadoop/distinfo --- /dev/null 1 Jan 1970 00:00:00 -0000 +++ ports/devel/hadoop/distinfo 12 Oct 2011 21:25:44 -0000 @@ -0,0 +1,2 @@ +SHA256 (hadoop/hadoop-0.20.203.0rc1.tar.gz) = 05cab89bd52d9d3c5e0054107480e3d5286fe3b1104413c639fa416997926c53 +SIZE (hadoop/hadoop-0.20.203.0rc1.tar.gz) = 60569605 Index: ports/devel/hadoop/pkg-descr =================================================================== RCS file: ports/devel/hadoop/pkg-descr diff -N ports/devel/hadoop/pkg-descr --- /dev/null 1 Jan 1970 00:00:00 -0000 +++ ports/devel/hadoop/pkg-descr 12 Oct 2011 21:25:44 -0000 @@ -0,0 +1,5 @@ +The Apache Hadoop software library is a framework that allows for the +distributed processing of large data sets across clusters of computers +using a simple programming model. + +WWW: http://hadoop.apache.org/ Index: ports/devel/hadoop/pkg-plist =================================================================== RCS file: ports/devel/hadoop/pkg-plist diff -N ports/devel/hadoop/pkg-plist --- /dev/null 1 Jan 1970 00:00:00 -0000 +++ ports/devel/hadoop/pkg-plist 4 Nov 2011 13:09:42 -0000 @@ -0,0 +1,341 @@ +bin/hadoop +%%HADOOP_ETC%%/envvars.d/000.java_home.env +%%HADOOP_ETC%%/envvars.d/001.hadoop_home.env +%%HADOOP_ETC%%/envvars.d/002.hadoop_conf.env +%%HADOOP_ETC%%/envvars.d/003.hadoop_log.env +%%HADOOP_HOME%%/bin/hadoop +%%HADOOP_HOME%%/bin/hadoop-config.sh +%%HADOOP_HOME%%/bin/hadoop-daemon.sh +%%HADOOP_HOME%%/bin/hadoop-daemons.sh +%%HADOOP_HOME%%/bin/jsvc +%%HADOOP_HOME%%/bin/rcc +%%HADOOP_HOME%%/bin/slaves.sh +%%HADOOP_HOME%%/bin/start-all.sh +%%HADOOP_HOME%%/bin/start-balancer.sh +%%HADOOP_HOME%%/bin/start-dfs.sh +%%HADOOP_HOME%%/bin/start-jobhistoryserver.sh +%%HADOOP_HOME%%/bin/start-mapred.sh +%%HADOOP_HOME%%/bin/stop-all.sh +%%HADOOP_HOME%%/bin/stop-balancer.sh +%%HADOOP_HOME%%/bin/stop-dfs.sh +%%HADOOP_HOME%%/bin/stop-jobhistoryserver.sh +%%HADOOP_HOME%%/bin/stop-mapred.sh +%%HADOOP_HOME%%/contrib/capacity-scheduler/hadoop-capacity-scheduler-%%PORTVERSION%%.jar +%%HADOOP_HOME%%/contrib/datajoin/hadoop-datajoin-%%PORTVERSION%%.jar +%%HADOOP_HOME%%/contrib/eclipse-plugin/hadoop-eclipse-plugin-%%PORTVERSION%%.jar +%%HADOOP_HOME%%/contrib/failmon/hadoop-failmon-%%PORTVERSION%%.jar +%%HADOOP_HOME%%/contrib/fairscheduler/hadoop-fairscheduler-%%PORTVERSION%%.jar +%%HADOOP_HOME%%/contrib/gridmix/hadoop-gridmix-%%PORTVERSION%%.jar +%%HADOOP_HOME%%/contrib/hdfsproxy/bin/hdfsproxy +%%HADOOP_HOME%%/contrib/hdfsproxy/bin/hdfsproxy-config.sh +%%HADOOP_HOME%%/contrib/hdfsproxy/bin/hdfsproxy-daemon.sh +%%HADOOP_HOME%%/contrib/hdfsproxy/bin/hdfsproxy-daemons.sh +%%HADOOP_HOME%%/contrib/hdfsproxy/bin/hdfsproxy-slaves.sh +%%HADOOP_HOME%%/contrib/hdfsproxy/bin/start-hdfsproxy.sh +%%HADOOP_HOME%%/contrib/hdfsproxy/bin/stop-hdfsproxy.sh +%%HADOOP_HOME%%/contrib/hdfsproxy/conf/configuration.xsl +%%HADOOP_HOME%%/contrib/hdfsproxy/conf/hdfsproxy-default.xml +%%HADOOP_HOME%%/contrib/hdfsproxy/conf/hdfsproxy-env.sh +%%HADOOP_HOME%%/contrib/hdfsproxy/conf/hdfsproxy-env.sh.template +%%HADOOP_HOME%%/contrib/hdfsproxy/conf/hdfsproxy-hosts +%%HADOOP_HOME%%/contrib/hdfsproxy/conf/log4j.properties +%%HADOOP_HOME%%/contrib/hdfsproxy/conf/ssl-server.xml +%%HADOOP_HOME%%/contrib/hdfsproxy/conf/tomcat-forward-web.xml +%%HADOOP_HOME%%/contrib/hdfsproxy/conf/tomcat-web.xml +%%HADOOP_HOME%%/contrib/hdfsproxy/conf/user-certs.xml +%%HADOOP_HOME%%/contrib/hdfsproxy/conf/user-permissions.xml +%%HADOOP_HOME%%/contrib/hdfsproxy/README +%%HADOOP_HOME%%/contrib/hdfsproxy/build.xml +%%HADOOP_HOME%%/contrib/hdfsproxy/hdfsproxy-2.0.jar +%%HADOOP_HOME%%/contrib/hod/bin/VERSION +%%HADOOP_HOME%%/contrib/hod/bin/checknodes +%%HADOOP_HOME%%/contrib/hod/bin/hod +%%HADOOP_HOME%%/contrib/hod/bin/hodcleanup +%%HADOOP_HOME%%/contrib/hod/bin/hodring +%%HADOOP_HOME%%/contrib/hod/bin/ringmaster +%%HADOOP_HOME%%/contrib/hod/bin/verify-account +%%HADOOP_HOME%%/contrib/hod/conf/hodrc +%%HADOOP_HOME%%/contrib/hod/hodlib/AllocationManagers/__init__.py +%%HADOOP_HOME%%/contrib/hod/hodlib/AllocationManagers/goldAllocationManager.py +%%HADOOP_HOME%%/contrib/hod/hodlib/Common/__init__.py +%%HADOOP_HOME%%/contrib/hod/hodlib/Common/allocationManagerUtil.py +%%HADOOP_HOME%%/contrib/hod/hodlib/Common/desc.py +%%HADOOP_HOME%%/contrib/hod/hodlib/Common/descGenerator.py +%%HADOOP_HOME%%/contrib/hod/hodlib/Common/hodsvc.py +%%HADOOP_HOME%%/contrib/hod/hodlib/Common/logger.py +%%HADOOP_HOME%%/contrib/hod/hodlib/Common/miniHTMLParser.py +%%HADOOP_HOME%%/contrib/hod/hodlib/Common/nodepoolutil.py +%%HADOOP_HOME%%/contrib/hod/hodlib/Common/setup.py +%%HADOOP_HOME%%/contrib/hod/hodlib/Common/socketServers.py +%%HADOOP_HOME%%/contrib/hod/hodlib/Common/tcp.py +%%HADOOP_HOME%%/contrib/hod/hodlib/Common/threads.py +%%HADOOP_HOME%%/contrib/hod/hodlib/Common/types.py +%%HADOOP_HOME%%/contrib/hod/hodlib/Common/util.py +%%HADOOP_HOME%%/contrib/hod/hodlib/Common/xmlrpc.py +%%HADOOP_HOME%%/contrib/hod/hodlib/GridServices/__init__.py +%%HADOOP_HOME%%/contrib/hod/hodlib/GridServices/hdfs.py +%%HADOOP_HOME%%/contrib/hod/hodlib/GridServices/mapred.py +%%HADOOP_HOME%%/contrib/hod/hodlib/GridServices/service.py +%%HADOOP_HOME%%/contrib/hod/hodlib/Hod/__init__.py +%%HADOOP_HOME%%/contrib/hod/hodlib/Hod/hadoop.py +%%HADOOP_HOME%%/contrib/hod/hodlib/Hod/hod.py +%%HADOOP_HOME%%/contrib/hod/hodlib/Hod/nodePool.py +%%HADOOP_HOME%%/contrib/hod/hodlib/HodRing/__init__.py +%%HADOOP_HOME%%/contrib/hod/hodlib/HodRing/hodRing.py +%%HADOOP_HOME%%/contrib/hod/hodlib/NodePools/__init__.py +%%HADOOP_HOME%%/contrib/hod/hodlib/NodePools/torque.py +%%HADOOP_HOME%%/contrib/hod/hodlib/RingMaster/__init__.py +%%HADOOP_HOME%%/contrib/hod/hodlib/RingMaster/idleJobTracker.py +%%HADOOP_HOME%%/contrib/hod/hodlib/RingMaster/ringMaster.py +%%HADOOP_HOME%%/contrib/hod/hodlib/Schedulers/__init__.py +%%HADOOP_HOME%%/contrib/hod/hodlib/Schedulers/torque.py +%%HADOOP_HOME%%/contrib/hod/hodlib/ServiceProxy/__init__.py +%%HADOOP_HOME%%/contrib/hod/hodlib/ServiceProxy/serviceProxy.py +%%HADOOP_HOME%%/contrib/hod/hodlib/ServiceRegistry/__init__.py +%%HADOOP_HOME%%/contrib/hod/hodlib/ServiceRegistry/serviceRegistry.py +%%HADOOP_HOME%%/contrib/hod/hodlib/__init__.py +%%HADOOP_HOME%%/contrib/hod/ivy/libraries.properties +%%HADOOP_HOME%%/contrib/hod/support/checklimits.sh +%%HADOOP_HOME%%/contrib/hod/support/logcondense.py +%%HADOOP_HOME%%/contrib/hod/testing/__init__.py +%%HADOOP_HOME%%/contrib/hod/testing/helper.py +%%HADOOP_HOME%%/contrib/hod/testing/lib.py +%%HADOOP_HOME%%/contrib/hod/testing/main.py +%%HADOOP_HOME%%/contrib/hod/testing/testHadoop.py +%%HADOOP_HOME%%/contrib/hod/testing/testHod.py +%%HADOOP_HOME%%/contrib/hod/testing/testHodCleanup.py +%%HADOOP_HOME%%/contrib/hod/testing/testHodRing.py +%%HADOOP_HOME%%/contrib/hod/testing/testModule.py +%%HADOOP_HOME%%/contrib/hod/testing/testRingmasterRPCs.py +%%HADOOP_HOME%%/contrib/hod/testing/testThreads.py +%%HADOOP_HOME%%/contrib/hod/testing/testTypes.py +%%HADOOP_HOME%%/contrib/hod/testing/testUtil.py +%%HADOOP_HOME%%/contrib/hod/testing/testXmlrpc.py +%%HADOOP_HOME%%/contrib/hod/CHANGES.txt +%%HADOOP_HOME%%/contrib/hod/README +%%HADOOP_HOME%%/contrib/hod/build.xml +%%HADOOP_HOME%%/contrib/hod/config.txt +%%HADOOP_HOME%%/contrib/hod/getting_started.txt +%%HADOOP_HOME%%/contrib/hod/ivy.xml +%%HADOOP_HOME%%/contrib/index/hadoop-index-%%PORTVERSION%%.jar +%%HADOOP_HOME%%/contrib/streaming/hadoop-streaming-%%PORTVERSION%%.jar +%%HADOOP_HOME%%/contrib/thriftfs/hadoop-thriftfs-%%PORTVERSION%%.jar +%%HADOOP_HOME%%/contrib/vaidya/bin/vaidya.sh +%%HADOOP_HOME%%/contrib/vaidya/conf/postex_diagnosis_tests.xml +%%HADOOP_HOME%%/contrib/vaidya/hadoop-vaidya-%%PORTVERSION%%.jar +%%HADOOP_HOME%%/hadoop-ant-%%PORTVERSION%%.jar +%%HADOOP_HOME%%/hadoop-core-%%PORTVERSION%%.jar +%%HADOOP_HOME%%/hadoop-test-%%PORTVERSION%%.jar +%%HADOOP_HOME%%/hadoop-tools-%%PORTVERSION%%.jar +%%HADOOP_HOME%%/lib/jdiff/hadoop_0.17.0.xml +%%HADOOP_HOME%%/lib/jdiff/hadoop_0.18.1.xml +%%HADOOP_HOME%%/lib/jdiff/hadoop_0.18.2.xml +%%HADOOP_HOME%%/lib/jdiff/hadoop_0.18.3.xml +%%HADOOP_HOME%%/lib/jdiff/hadoop_0.19.0.xml +%%HADOOP_HOME%%/lib/jdiff/hadoop_0.19.1.xml +%%HADOOP_HOME%%/lib/jdiff/hadoop_0.19.2.xml +%%HADOOP_HOME%%/lib/jdiff/hadoop_0.20.1.xml +%%HADOOP_HOME%%/lib/jdiff/hadoop_%%PORTVERSION%%.xml +%%HADOOP_HOME%%/lib/jsp-2.1/jsp-2.1.jar +%%HADOOP_HOME%%/lib/jsp-2.1/jsp-api-2.1.jar +%%HADOOP_HOME%%/lib/native/Linux-amd64-64/libhadoop.a +%%HADOOP_HOME%%/lib/native/Linux-amd64-64/libhadoop.la +%%HADOOP_HOME%%/lib/native/Linux-amd64-64/libhadoop.so +%%HADOOP_HOME%%/lib/native/Linux-amd64-64/libhadoop.so.1 +%%HADOOP_HOME%%/lib/native/Linux-amd64-64/libhadoop.so.1.0.0 +%%HADOOP_HOME%%/lib/native/Linux-i386-32/libhadoop.a +%%HADOOP_HOME%%/lib/native/Linux-i386-32/libhadoop.la +%%HADOOP_HOME%%/lib/native/Linux-i386-32/libhadoop.so +%%HADOOP_HOME%%/lib/native/Linux-i386-32/libhadoop.so.1 +%%HADOOP_HOME%%/lib/native/Linux-i386-32/libhadoop.so.1.0.0 +%%HADOOP_HOME%%/lib/native/%%HADOOP_ARCH%%/libhadoop.a +%%HADOOP_HOME%%/lib/native/%%HADOOP_ARCH%%/libhadoop.la +%%HADOOP_HOME%%/lib/native/%%HADOOP_ARCH%%/libhadoop.so +%%HADOOP_HOME%%/lib/native/%%HADOOP_ARCH%%/libhadoop.so.1 +%%HADOOP_HOME%%/lib/aspectjrt-1.6.5.jar +%%HADOOP_HOME%%/lib/aspectjtools-1.6.5.jar +%%HADOOP_HOME%%/lib/commons-beanutils-1.7.0.jar +%%HADOOP_HOME%%/lib/commons-beanutils-core-1.8.0.jar +%%HADOOP_HOME%%/lib/commons-cli-1.2.jar +%%HADOOP_HOME%%/lib/commons-codec-1.4.jar +%%HADOOP_HOME%%/lib/commons-collections-3.2.1.jar +%%HADOOP_HOME%%/lib/commons-configuration-1.6.jar +%%HADOOP_HOME%%/lib/commons-daemon-1.0.1.jar +%%HADOOP_HOME%%/lib/commons-digester-1.8.jar +%%HADOOP_HOME%%/lib/commons-el-1.0.jar +%%HADOOP_HOME%%/lib/commons-httpclient-3.0.1.jar +%%HADOOP_HOME%%/lib/commons-lang-2.4.jar +%%HADOOP_HOME%%/lib/commons-logging-1.1.1.jar +%%HADOOP_HOME%%/lib/commons-logging-api-1.0.4.jar +%%HADOOP_HOME%%/lib/commons-math-2.1.jar +%%HADOOP_HOME%%/lib/commons-net-1.4.1.jar +%%HADOOP_HOME%%/lib/core-3.1.1.jar +%%HADOOP_HOME%%/lib/hsqldb-1.8.0.10.LICENSE.txt +%%HADOOP_HOME%%/lib/hsqldb-1.8.0.10.jar +%%HADOOP_HOME%%/lib/jackson-core-asl-1.0.1.jar +%%HADOOP_HOME%%/lib/jackson-mapper-asl-1.0.1.jar +%%HADOOP_HOME%%/lib/jasper-compiler-5.5.12.jar +%%HADOOP_HOME%%/lib/jasper-runtime-5.5.12.jar +%%HADOOP_HOME%%/lib/jets3t-0.6.1.jar +%%HADOOP_HOME%%/lib/jetty-6.1.26.jar +%%HADOOP_HOME%%/lib/jetty-util-6.1.26.jar +%%HADOOP_HOME%%/lib/jsch-0.1.42.jar +%%HADOOP_HOME%%/lib/junit-4.5.jar +%%HADOOP_HOME%%/lib/kfs-0.2.2.jar +%%HADOOP_HOME%%/lib/kfs-0.2.LICENSE.txt +%%HADOOP_HOME%%/lib/log4j-1.2.15.jar +%%HADOOP_HOME%%/lib/mockito-all-1.8.5.jar +%%HADOOP_HOME%%/lib/oro-2.0.8.jar +%%HADOOP_HOME%%/lib/servlet-api-2.5-20081211.jar +%%HADOOP_HOME%%/lib/slf4j-api-1.4.3.jar +%%HADOOP_HOME%%/lib/slf4j-log4j12-1.4.3.jar +%%HADOOP_HOME%%/lib/xmlenc-0.52.jar +%%HADOOP_HOME%%/webapps/task/index.html +%%HADOOP_HOME%%/webapps/task/WEB-INF/web.xml +%%HADOOP_HOME%%/webapps/static/sorttable.js +%%HADOOP_HOME%%/webapps/static/jobtracker.js +%%HADOOP_HOME%%/webapps/static/jobconf.xsl +%%HADOOP_HOME%%/webapps/static/hadoop.css +%%HADOOP_HOME%%/webapps/static/hadoop-logo.jpg +%%HADOOP_HOME%%/webapps/job/taskstatshistory.jsp +%%HADOOP_HOME%%/webapps/job/taskstats.jsp +%%HADOOP_HOME%%/webapps/job/taskdetailshistory.jsp +%%HADOOP_HOME%%/webapps/job/taskdetails.jsp +%%HADOOP_HOME%%/webapps/job/machines.jsp +%%HADOOP_HOME%%/webapps/job/loadhistory.jsp +%%HADOOP_HOME%%/webapps/job/legacyjobhistory.jsp +%%HADOOP_HOME%%/webapps/job/jobtracker.jsp +%%HADOOP_HOME%%/webapps/job/jobtaskshistory.jsp +%%HADOOP_HOME%%/webapps/job/jobtasks.jsp +%%HADOOP_HOME%%/webapps/job/jobqueue_details.jsp +%%HADOOP_HOME%%/webapps/job/jobhistoryhome.jsp +%%HADOOP_HOME%%/webapps/job/jobhistory.jsp +%%HADOOP_HOME%%/webapps/job/jobfailures.jsp +%%HADOOP_HOME%%/webapps/job/jobdetailshistory.jsp +%%HADOOP_HOME%%/webapps/job/jobdetails.jsp +%%HADOOP_HOME%%/webapps/job/jobconf_history.jsp +%%HADOOP_HOME%%/webapps/job/jobconf.jsp +%%HADOOP_HOME%%/webapps/job/jobblacklistedtrackers.jsp +%%HADOOP_HOME%%/webapps/job/job_authorization_error.jsp +%%HADOOP_HOME%%/webapps/job/index.html +%%HADOOP_HOME%%/webapps/job/gethistory.jsp +%%HADOOP_HOME%%/webapps/job/analysejobhistory.jsp +%%HADOOP_HOME%%/webapps/job/WEB-INF/web.xml +%%HADOOP_HOME%%/webapps/history/WEB-INF/web.xml +%%HADOOP_HOME%%/webapps/hdfs/index.html +%%HADOOP_HOME%%/webapps/hdfs/WEB-INF/web.xml +%%HADOOP_HOME%%/webapps/datanode/WEB-INF/web.xml +%%EXAMPLESDIR%%/hadoop-examples-%%PORTVERSION%%.jar +%%EXAMPLESDIR%%/core-default.xml +%%EXAMPLESDIR%%/hdfs-default.xml +%%EXAMPLESDIR%%/mapred-default.xml +%%EXAMPLESDIR%%/conf/masters +%%EXAMPLESDIR%%/conf/slaves +%%EXAMPLESDIR%%/conf/ssl-client.xml.example +%%EXAMPLESDIR%%/conf/ssl-server.xml.example +@unexec if cmp -s %D/%%HADOOP_ETC%%/capacity-scheduler.xml %D/%%EXAMPLESDIR%%/conf/capacity-scheduler.xml; then rm -f %D/%%HADOOP_ETC%%/capacity-scheduler.xml; fi +%%EXAMPLESDIR%%/conf/capacity-scheduler.xml +@exec [ -f %D/%%HADOOP_ETC%%/capacity-scheduler.xml ] || cp %D/%%EXAMPLESDIR%%/conf/capacity-scheduler.xml %D/%%HADOOP_ETC%% +@unexec if cmp -s %D/%%HADOOP_ETC%%/configuration.xsl %D/%%EXAMPLESDIR%%/conf/configuration.xsl; then rm -f %D/%%HADOOP_ETC%%/configuration.xsl; fi +%%EXAMPLESDIR%%/conf/configuration.xsl +@exec [ -f %D/%%HADOOP_ETC%%/configuration.xsl ] || cp %D/%%EXAMPLESDIR%%/conf/configuration.xsl %D/%%HADOOP_ETC%% +@unexec if cmp -s %D/%%HADOOP_ETC%%/core-site.xml %D/%%EXAMPLESDIR%%/conf/core-site.xml; then rm -f %D/%%HADOOP_ETC%%/core-site.xml; fi +%%EXAMPLESDIR%%/conf/core-site.xml +@exec [ -f %D/%%HADOOP_ETC%%/core-site.xml ] || cp %D/%%EXAMPLESDIR%%/conf/core-site.xml %D/%%HADOOP_ETC%% +@unexec if cmp -s %D/%%HADOOP_ETC%%/hadoop-env.sh %D/%%EXAMPLESDIR%%/conf/hadoop-env.sh; then rm -f %D/%%HADOOP_ETC%%/hadoop-env.sh; fi +%%EXAMPLESDIR%%/conf/hadoop-env.sh +@exec [ -f %D/%%HADOOP_ETC%%/hadoop-env.sh ] || cp %D/%%EXAMPLESDIR%%/conf/hadoop-env.sh %D/%%HADOOP_ETC%% +@unexec if cmp -s %D/%%HADOOP_ETC%%/hadoop-metrics2.properties %D/%%EXAMPLESDIR%%/conf/hadoop-metrics2.properties; then rm -f %D/%%HADOOP_ETC%%/hadoop-metrics2.properties; fi +%%EXAMPLESDIR%%/conf/hadoop-metrics2.properties +@exec [ -f %D/%%HADOOP_ETC%%/hadoop-metrics2.properties ] || cp %D/%%EXAMPLESDIR%%/conf/hadoop-metrics2.properties %D/%%HADOOP_ETC%% +@unexec if cmp -s %D/%%HADOOP_ETC%%/hadoop-policy.xml %D/%%EXAMPLESDIR%%/conf/hadoop-policy.xml; then rm -f %D/%%HADOOP_ETC%%/hadoop-policy.xml; fi +%%EXAMPLESDIR%%/conf/hadoop-policy.xml +@exec [ -f %D/%%HADOOP_ETC%%/hadoop-policy.xml ] || cp %D/%%EXAMPLESDIR%%/conf/hadoop-policy.xml %D/%%HADOOP_ETC%% +@unexec if cmp -s %D/%%HADOOP_ETC%%/hdfs-site.xml %D/%%EXAMPLESDIR%%/conf/hdfs-site.xml; then rm -f %D/%%HADOOP_ETC%%/hdfs-site.xml; fi +%%EXAMPLESDIR%%/conf/hdfs-site.xml +@exec [ -f %D/%%HADOOP_ETC%%/hdfs-site.xml ] || cp %D/%%EXAMPLESDIR%%/conf/hdfs-site.xml %D/%%HADOOP_ETC%% +@unexec if cmp -s %D/%%HADOOP_ETC%%/log4j.properties %D/%%EXAMPLESDIR%%/conf/log4j.properties; then rm -f %D/%%HADOOP_ETC%%/log4j.properties; fi +%%EXAMPLESDIR%%/conf/log4j.properties +@exec [ -f %D/%%HADOOP_ETC%%/log4j.properties ] || cp %D/%%EXAMPLESDIR%%/conf/log4j.properties %D/%%HADOOP_ETC%% +@unexec if cmp -s %D/%%HADOOP_ETC%%/mapred-queue-acls.xml %D/%%EXAMPLESDIR%%/conf/mapred-queue-acls.xml; then rm -f %D/%%HADOOP_ETC%%/mapred-queue-acls.xml; fi +%%EXAMPLESDIR%%/conf/mapred-queue-acls.xml +@exec [ -f %D/%%HADOOP_ETC%%/mapred-queue-acls.xml ] || cp %D/%%EXAMPLESDIR%%/conf/mapred-queue-acls.xml %D/%%HADOOP_ETC%% +@unexec if cmp -s %D/%%HADOOP_ETC%%/mapred-site.xml %D/%%EXAMPLESDIR%%/conf/mapred-site.xml; then rm -f %D/%%HADOOP_ETC%%/mapred-site.xml; fi +%%EXAMPLESDIR%%/conf/mapred-site.xml +@exec [ -f %D/%%HADOOP_ETC%%/mapred-site.xml ] || cp %D/%%EXAMPLESDIR%%/conf/mapred-site.xml %D/%%HADOOP_ETC%% +@unexec if cmp -s %D/%%HADOOP_ETC%%/taskcontroller.cfg %D/%%EXAMPLESDIR%%/conf/taskcontroller.cfg; then rm -f %D/%%HADOOP_ETC%%/taskcontroller.cfg; fi +%%EXAMPLESDIR%%/conf/taskcontroller.cfg +@exec [ -f %D/%%HADOOP_ETC%%/taskcontroller.cfg ] || cp %D/%%EXAMPLESDIR%%/conf/taskcontroller.cfg %D/%%HADOOP_ETC%% +include/hadoop/StringUtils.hh +include/hadoop/SerialUtils.hh +include/hadoop/Pipes.hh +include/hadoop/TemplateFactory.hh +lib/libhadooputils.a +lib/libhadooppipes.a +lib/libhdfs.so.0 +lib/libhdfs.so +lib/libhdfs.la +@exec mkdir -p %D/%%HADOOP_HOME%%/webapps/secondary/WEB-INF +@exec mkdir -p %D/%%HADOOP_HOME%%/contrib/hdfsproxy/logs +@dirrm %%EXAMPLESDIR%%/conf +@dirrm %%EXAMPLESDIR%% +@dirrm %%HADOOP_HOME%%/lib/native/Linux-i386-32 +@dirrm %%HADOOP_HOME%%/lib/native/Linux-amd64-64 +@dirrm %%HADOOP_HOME%%/lib/native/%%HADOOP_ARCH%% +@dirrm %%HADOOP_HOME%%/lib/native +@dirrm %%HADOOP_HOME%%/lib/jsp-2.1 +@dirrm %%HADOOP_HOME%%/lib/jdiff +@dirrm %%HADOOP_HOME%%/lib +@dirrm %%HADOOP_HOME%%/bin +@dirrm %%HADOOP_HOME%%/webapps/task/WEB-INF +@dirrm %%HADOOP_HOME%%/webapps/task +@dirrm %%HADOOP_HOME%%/webapps/static +@dirrm %%HADOOP_HOME%%/webapps/secondary/WEB-INF +@dirrm %%HADOOP_HOME%%/webapps/secondary +@dirrm %%HADOOP_HOME%%/webapps/job/WEB-INF +@dirrm %%HADOOP_HOME%%/webapps/job +@dirrm %%HADOOP_HOME%%/webapps/history/WEB-INF +@dirrm %%HADOOP_HOME%%/webapps/history +@dirrm %%HADOOP_HOME%%/webapps/hdfs/WEB-INF +@dirrm %%HADOOP_HOME%%/webapps/hdfs +@dirrm %%HADOOP_HOME%%/webapps/datanode/WEB-INF +@dirrm %%HADOOP_HOME%%/webapps/datanode +@dirrm %%HADOOP_HOME%%/webapps/ +@dirrm %%HADOOP_HOME%%/contrib/vaidya/conf +@dirrm %%HADOOP_HOME%%/contrib/vaidya/bin +@dirrm %%HADOOP_HOME%%/contrib/vaidya +@dirrm %%HADOOP_HOME%%/contrib/thriftfs +@dirrm %%HADOOP_HOME%%/contrib/streaming +@dirrm %%HADOOP_HOME%%/contrib/index +@dirrm %%HADOOP_HOME%%/contrib/hod/testing +@dirrm %%HADOOP_HOME%%/contrib/hod/support +@dirrm %%HADOOP_HOME%%/contrib/hod/ivy +@dirrm %%HADOOP_HOME%%/contrib/hod/hodlib/ServiceRegistry +@dirrm %%HADOOP_HOME%%/contrib/hod/hodlib/ServiceProxy +@dirrm %%HADOOP_HOME%%/contrib/hod/hodlib/Schedulers +@dirrm %%HADOOP_HOME%%/contrib/hod/hodlib/RingMaster +@dirrm %%HADOOP_HOME%%/contrib/hod/hodlib/NodePools +@dirrm %%HADOOP_HOME%%/contrib/hod/hodlib/HodRing +@dirrm %%HADOOP_HOME%%/contrib/hod/hodlib/Hod +@dirrm %%HADOOP_HOME%%/contrib/hod/hodlib/GridServices +@dirrm %%HADOOP_HOME%%/contrib/hod/hodlib/Common +@dirrm %%HADOOP_HOME%%/contrib/hod/hodlib/AllocationManagers +@dirrm %%HADOOP_HOME%%/contrib/hod/hodlib +@dirrm %%HADOOP_HOME%%/contrib/hod/conf +@dirrm %%HADOOP_HOME%%/contrib/hod/bin +@dirrm %%HADOOP_HOME%%/contrib/hod +@dirrm %%HADOOP_HOME%%/contrib/hdfsproxy/logs +@dirrm %%HADOOP_HOME%%/contrib/hdfsproxy/conf +@dirrm %%HADOOP_HOME%%/contrib/hdfsproxy/bin +@dirrm %%HADOOP_HOME%%/contrib/hdfsproxy +@dirrm %%HADOOP_HOME%%/contrib/gridmix +@dirrm %%HADOOP_HOME%%/contrib/fairscheduler +@dirrm %%HADOOP_HOME%%/contrib/failmon +@dirrm %%HADOOP_HOME%%/contrib/eclipse-plugin +@dirrm %%HADOOP_HOME%%/contrib/datajoin +@dirrm %%HADOOP_HOME%%/contrib/capacity-scheduler +@dirrm %%HADOOP_HOME%%/contrib/ +@dirrm %%HADOOP_HOME%% +@dirrm include/hadoop +@dirrmtry %%HADOOP_ETC%%/envvars.d +@dirrmtry %%HADOOP_ETC%% Index: ports/devel/hadoop/files/000.java_home.env.in =================================================================== RCS file: ports/devel/hadoop/files/000.java_home.env.in diff -N ports/devel/hadoop/files/000.java_home.env.in --- /dev/null 1 Jan 1970 00:00:00 -0000 +++ ports/devel/hadoop/files/000.java_home.env.in 12 Oct 2011 21:25:44 -0000 @@ -0,0 +1,2 @@ +export JAVA_HOME=${JAVA_HOME:-%%JAVA_HOME%%} + Index: ports/devel/hadoop/files/datanode.in =================================================================== RCS file: ports/devel/hadoop/files/datanode.in diff -N ports/devel/hadoop/files/datanode.in --- /dev/null 1 Jan 1970 00:00:00 -0000 +++ ports/devel/hadoop/files/datanode.in 12 Oct 2011 21:25:44 -0000 @@ -0,0 +1,38 @@ +#!/bin/sh +# +# $FreeBSD$ +# + +# PROVIDE: datanode +# REQUIRE: NETWORKING SERVERS +# BEFORE: DAEMON +# KEYWORD: shutdown + +datanode_enable="${tacktracker_enable:-"NO"}" +datanode_user="${datanode_user:-%%HADOOP_USER%%}" + +. /etc/rc.subr + +name="datanode" +rcvar=`set_rcvar` + +load_rc_config "${name}" + +for i in `ls %%HADOOP_ETC%%/envvars.d/*.env` +do + . ${i} +done + +export HADOOP_PID_DIR=/var/run/hadoop +export HADOOP_IDENT_STRING=ports +command="/usr/sbin/daemon" +flags="-c %%HADOOP_HOME%%/bin/hadoop-daemon.sh --config ${HADOOP_CONF_DIR} start datanode" + +stop_cmd=hadoop_stop + +hadoop_stop () { + su -m ${datanode_user} -c 'sh -c "%%HADOOP_HOME%%/bin/hadoop-daemon.sh --config ${HADOOP_CONF_DIR} stop datanode"' +} + +run_rc_command "$1" + Index: ports/devel/hadoop/files/hadoop.in =================================================================== RCS file: ports/devel/hadoop/files/hadoop.in diff -N ports/devel/hadoop/files/hadoop.in --- /dev/null 1 Jan 1970 00:00:00 -0000 +++ ports/devel/hadoop/files/hadoop.in 12 Oct 2011 21:25:44 -0000 @@ -0,0 +1,7 @@ +#!/usr/bin/env bash +for i in `ls %%HADOOP_ETC%%/envvars.d/*.env` +do + . ${i} +done + +exec %%HADOOP_HOME%%/bin/hadoop $@ Index: ports/devel/hadoop/files/jobtracker.in =================================================================== RCS file: ports/devel/hadoop/files/jobtracker.in diff -N ports/devel/hadoop/files/jobtracker.in --- /dev/null 1 Jan 1970 00:00:00 -0000 +++ ports/devel/hadoop/files/jobtracker.in 12 Oct 2011 21:25:44 -0000 @@ -0,0 +1,38 @@ +#!/bin/sh +# +# $FreeBSD$ +# + +# PROVIDE: jobtracker +# REQUIRE: NETWORKING SERVERS +# BEFORE: DAEMON +# KEYWORD: shutdown + +jobtracker_enable="${tacktracker_enable:-"NO"}" +jobtracker_user="${jobtracker_user:-%%HADOOP_USER%%}" + +. /etc/rc.subr + +name="jobtracker" +rcvar=`set_rcvar` + +load_rc_config "${name}" + +for i in `ls %%HADOOP_ETC%%/envvars.d/*.env` +do + . ${i} +done + +export HADOOP_PID_DIR=/var/run/hadoop +export HADOOP_IDENT_STRING=ports +command="/usr/sbin/daemon" +flags="-c %%HADOOP_HOME%%/bin/hadoop-daemon.sh --config ${HADOOP_CONF_DIR} start jobtracker" + +stop_cmd=hadoop_stop + +hadoop_stop () { + su -m ${jobtracker_user} -c 'sh -c "%%HADOOP_HOME%%/bin/hadoop-daemon.sh --config ${HADOOP_CONF_DIR} stop jobtracker"' +} + +run_rc_command "$1" + Index: ports/devel/hadoop/files/namenode.in =================================================================== RCS file: ports/devel/hadoop/files/namenode.in diff -N ports/devel/hadoop/files/namenode.in --- /dev/null 1 Jan 1970 00:00:00 -0000 +++ ports/devel/hadoop/files/namenode.in 12 Oct 2011 21:25:44 -0000 @@ -0,0 +1,38 @@ +#!/bin/sh +# +# $FreeBSD$ +# + +# PROVIDE: namenode +# REQUIRE: NETWORKING SERVERS +# BEFORE: DAEMON +# KEYWORD: shutdown + +namenode_enable="${tacktracker_enable:-"NO"}" +namenode_user="${namenode_user:-%%HADOOP_USER%%}" + +. /etc/rc.subr + +name="namenode" +rcvar=`set_rcvar` + +load_rc_config "${name}" + +for i in `ls %%HADOOP_ETC%%/envvars.d/*.env` +do + . ${i} +done + +export HADOOP_PID_DIR=/var/run/hadoop +export HADOOP_IDENT_STRING=ports +command="/usr/sbin/daemon" +flags="-c %%HADOOP_HOME%%/bin/hadoop-daemon.sh --config ${HADOOP_CONF_DIR} start namenode" + +stop_cmd=hadoop_stop + +hadoop_stop () { + su -m ${namenode_user} -c 'sh -c "%%HADOOP_HOME%%/bin/hadoop-daemon.sh --config ${HADOOP_CONF_DIR} stop namenode"' +} + +run_rc_command "$1" + Index: ports/devel/hadoop/files/patch-build.xml =================================================================== RCS file: ports/devel/hadoop/files/patch-build.xml diff -N ports/devel/hadoop/files/patch-build.xml --- /dev/null 1 Jan 1970 00:00:00 -0000 +++ ports/devel/hadoop/files/patch-build.xml 12 Oct 2011 21:25:44 -0000 @@ -0,0 +1,80 @@ +--- build.xml.orig 2011-05-04 08:30:16.000000000 +0200 ++++ build.xml 2011-08-08 00:08:22.000000000 +0200 +@@ -372,7 +372,7 @@ + + + +- ++ + + + +- + + +@@ -1669,7 +1669,7 @@ + + + +- ++ + + + +@@ -1703,7 +1703,7 @@ + + + +- ++ + + +@@ -1726,7 +1726,7 @@ + + + +- ++ + + + +@@ -1747,7 +1747,7 @@ + + + +- ++ + + + +@@ -2326,5 +2326,23 @@ + + + ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ + ++ ++ ++ ++ ++ ++ ++ + Index: ports/devel/hadoop/files/patch-src__c++__libhdfs__hdfs.c =================================================================== RCS file: ports/devel/hadoop/files/patch-src__c++__libhdfs__hdfs.c diff -N ports/devel/hadoop/files/patch-src__c++__libhdfs__hdfs.c --- /dev/null 1 Jan 1970 00:00:00 -0000 +++ ports/devel/hadoop/files/patch-src__c++__libhdfs__hdfs.c 12 Oct 2011 21:25:44 -0000 @@ -0,0 +1,11 @@ +--- src/c++/libhdfs/hdfs.c.orig 2011-08-07 16:38:59.000000000 +0200 ++++ src/c++/libhdfs/hdfs.c 2011-08-07 16:39:18.000000000 +0200 +@@ -252,7 +252,7 @@ + cURI = malloc(strlen(host)+16); + sprintf(cURI, "hdfs://%s:%d", host, (int)(port)); + if (cURI == NULL) { +- fprintf (stderr, "Couldn't allocate an object of size %d", ++ fprintf (stderr, "Couldn't allocate an object of size %llu", + strlen(host) + 16); + errno = EINTERNAL; + goto done; Index: ports/devel/hadoop/files/patch-src__c++__libhdfs__hdfsJniHelper.c =================================================================== RCS file: ports/devel/hadoop/files/patch-src__c++__libhdfs__hdfsJniHelper.c diff -N ports/devel/hadoop/files/patch-src__c++__libhdfs__hdfsJniHelper.c --- /dev/null 1 Jan 1970 00:00:00 -0000 +++ ports/devel/hadoop/files/patch-src__c++__libhdfs__hdfsJniHelper.c 12 Oct 2011 21:25:44 -0000 @@ -0,0 +1,11 @@ +--- src/c++/libhdfs/hdfsJniHelper.c.orig 2011-05-04 08:30:16.000000000 +0200 ++++ src/c++/libhdfs/hdfsJniHelper.c 2011-08-07 16:40:54.000000000 +0200 +@@ -15,7 +15,7 @@ + */ + + #include +-#include ++//#include + #include "hdfsJniHelper.h" + + static pthread_mutex_t hdfsHashMutex = PTHREAD_MUTEX_INITIALIZER; Index: ports/devel/hadoop/files/patch-src__native__Makefile.in =================================================================== RCS file: ports/devel/hadoop/files/patch-src__native__Makefile.in diff -N ports/devel/hadoop/files/patch-src__native__Makefile.in --- /dev/null 1 Jan 1970 00:00:00 -0000 +++ ports/devel/hadoop/files/patch-src__native__Makefile.in 12 Oct 2011 21:25:44 -0000 @@ -0,0 +1,11 @@ +--- src/native/Makefile.in.orig 2011-08-07 16:41:46.000000000 +0200 ++++ src/native/Makefile.in 2011-08-07 16:41:56.000000000 +0200 +@@ -240,7 +240,7 @@ + src/org/apache/hadoop/io/nativeio/NativeIO.c + + libhadoop_la_LDFLAGS = -version-info 1:0:0 +-libhadoop_la_LIBADD = -ldl -ljvm ++libhadoop_la_LIBADD = -ljvm + all: config.h + $(MAKE) $(AM_MAKEFLAGS) all-am + Index: ports/devel/hadoop/files/patch-src__native__configure.ac =================================================================== RCS file: ports/devel/hadoop/files/patch-src__native__configure.ac diff -N ports/devel/hadoop/files/patch-src__native__configure.ac --- /dev/null 1 Jan 1970 00:00:00 -0000 +++ ports/devel/hadoop/files/patch-src__native__configure.ac 12 Oct 2011 21:25:44 -0000 @@ -0,0 +1,13 @@ +--- src/native/configure.ac.orig 2011-05-04 08:30:16.000000000 +0200 ++++ src/native/configure.ac 2011-08-07 16:17:58.000000000 +0200 +@@ -46,10 +46,6 @@ + AC_PROG_CC + AC_PROG_LIBTOOL + +-# Checks for libraries. +-dnl Check for '-ldl' +-AC_CHECK_LIB([dl], [dlopen]) +- + dnl Check for '-ljvm' + JNI_LDFLAGS="" + if test $JAVA_HOME != "" Index: ports/devel/hadoop/files/patch-src__native__src__org__apache__hadoop__io__nativeio__NativeIO.c =================================================================== RCS file: ports/devel/hadoop/files/patch-src__native__src__org__apache__hadoop__io__nativeio__NativeIO.c diff -N ports/devel/hadoop/files/patch-src__native__src__org__apache__hadoop__io__nativeio__NativeIO.c --- /dev/null 1 Jan 1970 00:00:00 -0000 +++ ports/devel/hadoop/files/patch-src__native__src__org__apache__hadoop__io__nativeio__NativeIO.c 12 Oct 2011 21:25:44 -0000 @@ -0,0 +1,11 @@ +--- src/native/src/org/apache/hadoop/io/nativeio/NativeIO.c.orig 2011-08-10 13:43:50.000000000 +0200 ++++ src/native/src/org/apache/hadoop/io/nativeio/NativeIO.c 2011-08-10 13:44:32.000000000 +0200 +@@ -188,7 +188,7 @@ + if (flags & O_CREAT) { + fd = open(path, flags, mode); + } else { +- fd = open(path, flags); ++ fd = open(path, flags | O_CREAT); + } + + if (fd == -1) { Index: ports/devel/hadoop/files/patch-src__native__src__org__apache__hadoop__security__JniBasedUnixGroupsNetgroupMapping.c =================================================================== RCS file: ports/devel/hadoop/files/patch-src__native__src__org__apache__hadoop__security__JniBasedUnixGroupsNetgroupMapping.c diff -N ports/devel/hadoop/files/patch-src__native__src__org__apache__hadoop__security__JniBasedUnixGroupsNetgroupMapping.c --- /dev/null 1 Jan 1970 00:00:00 -0000 +++ ports/devel/hadoop/files/patch-src__native__src__org__apache__hadoop__security__JniBasedUnixGroupsNetgroupMapping.c 12 Oct 2011 21:25:44 -0000 @@ -0,0 +1,12 @@ +--- src/native/src/org/apache/hadoop/security/JniBasedUnixGroupsNetgroupMapping.c.orig 2011-08-07 16:43:00.000000000 +0200 ++++ src/native/src/org/apache/hadoop/security/JniBasedUnixGroupsNetgroupMapping.c 2011-08-07 16:43:30.000000000 +0200 +@@ -73,7 +73,8 @@ + // was successfull or not (as long as it was called we need to call + // endnetgrent) + setnetgrentCalledFlag = 1; +- if(setnetgrent(cgroup) == 1) { ++ setnetgrent(cgroup); ++ if(1 == 1) { + UserList *current = NULL; + // three pointers are for host, user, domain, we only care + // about user now Index: ports/devel/hadoop/files/pkg-deinstall.in =================================================================== RCS file: ports/devel/hadoop/files/pkg-deinstall.in diff -N ports/devel/hadoop/files/pkg-deinstall.in --- /dev/null 1 Jan 1970 00:00:00 -0000 +++ ports/devel/hadoop/files/pkg-deinstall.in 12 Oct 2011 21:25:44 -0000 @@ -0,0 +1,14 @@ +#!/bin/sh +# $FreeBSD$ + +PATH="/bin:/sbin:/usr/bin:/usr/sbin" + +RUNDIR=%%HADOOP_RUNDIR%% +LOGDIR=%%HADOOP_LOGDIR%% + +if [ "$2" = "POST-DEINSTALL" ]; then + echo "=> Deleting ${RUNDIR} if empty..." + rm -d ${RUNDIR} 2>/dev/null || true + echo "=> Deleting ${LOGDIR} if empty..." + rm -d ${LOGDIR} 2>/dev/null || true +fi Index: ports/devel/hadoop/files/pkg-install.in =================================================================== RCS file: ports/devel/hadoop/files/pkg-install.in diff -N ports/devel/hadoop/files/pkg-install.in --- /dev/null 1 Jan 1970 00:00:00 -0000 +++ ports/devel/hadoop/files/pkg-install.in 12 Oct 2011 21:25:44 -0000 @@ -0,0 +1,54 @@ +#!/bin/sh +# $FreeBSD$ +PATH="/bin:/sbin:/usr/bin:/usr/sbin" + +HADOOP_USER=%%HADOOP_USER%% +HADOOP_GROUP=%%HADOOP_GROUP%% +UID=%%HADOOP_UID%% +GID=%%HADOOP_GID%% + +RUNDIR=%%HADOOP_RUNDIR%% +LOGDIR=%%HADOOP_LOGDIR%% + +PW="pw" +CHOWN="chown" +INSTALL_DIR="install -d -o ${HADOOP_USER} -g ${HADOOP_GROUP} -m 0755" + +if [ "$2" = "PRE-INSTALL" ]; then + + if ! ${PW} groupshow ${HADOOP_GROUP} 2>/dev/null 1>&2; then + if ${PW} groupadd ${HADOOP_GROUP} -g $GID; then + echo "=> Added group \"${HADOOP_GROUP}\"." + else + echo "=> Adding group \"${HADOOP_GROUP}\" failed..." + exit 1 + fi + fi + + if ! ${PW} usershow ${HADOOP_USER} 2>/dev/null 1>&2; then + if ${PW} useradd ${HADOOP_USER} -u $UID -g ${HADOOP_GROUP} -h - \ + -s "/sbin/nologin" -d "/nonexistent" \ + -c "Hadoop Daemons"; then + echo "=> Added user \"${HADOOP_USER}\"." + else + echo "=> Adding user \"${HADOOP_USER}\" failed..." + exit 1 + fi + fi + +elif [ "$2" = "POST-INSTALL" ]; then + if [ -d ${RUNDIR} ]; then + echo "=> ${RUNDIR} already exists." + else + echo -n "=> Creating RUNDIR ${RUNDIR}... " + ${INSTALL_DIR} ${RUNDIR} || echo "failed" + fi + if [ -d ${LOGDIR} ]; then + echo "=> ${LOGDIR} already exists." + else + echo -n "=> Creating LOGDIR ${LOGDIR}... " + ${INSTALL_DIR} ${LOGDIR} || echo "failed" + fi +fi + +exit 0 Index: ports/devel/hadoop/files/secondarynamenode.in =================================================================== RCS file: ports/devel/hadoop/files/secondarynamenode.in diff -N ports/devel/hadoop/files/secondarynamenode.in --- /dev/null 1 Jan 1970 00:00:00 -0000 +++ ports/devel/hadoop/files/secondarynamenode.in 12 Oct 2011 21:25:44 -0000 @@ -0,0 +1,38 @@ +#!/bin/sh +# +# $FreeBSD$ +# + +# PROVIDE: secondarynamenode +# REQUIRE: NETWORKING SERVERS +# BEFORE: DAEMON +# KEYWORD: shutdown + +secondarynamenode_enable="${tacktracker_enable:-"NO"}" +secondarynamenode_user="${secondarynamenode_user:-%%HADOOP_USER%%}" + +. /etc/rc.subr + +name="secondarynamenode" +rcvar=`set_rcvar` + +load_rc_config "${name}" + +for i in `ls %%HADOOP_ETC%%/envvars.d/*.env` +do + . ${i} +done + +export HADOOP_PID_DIR=/var/run/hadoop +export HADOOP_IDENT_STRING=ports +command="/usr/sbin/daemon" +flags="-c %%HADOOP_HOME%%/bin/hadoop-daemon.sh --config ${HADOOP_CONF_DIR} start secondarynamenode" + +stop_cmd=hadoop_stop + +hadoop_stop () { + su -m ${secondarynamenode_user} -c 'sh -c "%%HADOOP_HOME%%/bin/hadoop-daemon.sh --config ${HADOOP_CONF_DIR} stop secondarynamenode"' +} + +run_rc_command "$1" + Index: ports/devel/hadoop/files/tasktracker.in =================================================================== RCS file: ports/devel/hadoop/files/tasktracker.in diff -N ports/devel/hadoop/files/tasktracker.in --- /dev/null 1 Jan 1970 00:00:00 -0000 +++ ports/devel/hadoop/files/tasktracker.in 12 Oct 2011 21:25:44 -0000 @@ -0,0 +1,38 @@ +#!/bin/sh +# +# $FreeBSD$ +# + +# PROVIDE: tasktracker +# REQUIRE: NETWORKING SERVERS +# BEFORE: DAEMON +# KEYWORD: shutdown + +tasktracker_enable="${tacktracker_enable:-"NO"}" +tasktracker_user="${tasktracker_user:-%%HADOOP_USER%%}" + +. /etc/rc.subr + +name="tasktracker" +rcvar=`set_rcvar` + +load_rc_config "${name}" + +for i in `ls %%HADOOP_ETC%%/envvars.d/*.env` +do + . ${i} +done + +export HADOOP_PID_DIR=/var/run/hadoop + +command="/usr/sbin/daemon" +flags="-c %%HADOOP_HOME%%/bin/hadoop-daemon.sh --config ${HADOOP_CONF_DIR} start tasktracker" + +stop_cmd=hadoop_stop + +hadoop_stop () { + su -m ${tasktracker_user} -c 'sh -c "%%HADOOP_HOME%%/bin/hadoop-daemon.sh --config ${HADOOP_CONF_DIR} stop tasktracker"' +} + +run_rc_command "$1" + Index: ports/databases/hive/Makefile =================================================================== RCS file: ports/databases/hive/Makefile diff -N ports/databases/hive/Makefile --- /dev/null 1 Jan 1970 00:00:00 -0000 +++ ports/databases/hive/Makefile 17 Nov 2011 13:46:43 -0000 @@ -0,0 +1,85 @@ +# New ports collection makefile for: hive +# Date created: 2011-08-07 +# Whom: Clement Laforet +# +# $FreeBSD$ + +PORTNAME= hive +PORTVERSION= 0.7.1 +CATEGORIES= databases java +MASTER_SITES= ${MASTER_SITE_APACHE} +MASTER_SITE_SUBDIR= ${PORTNAME}/${PORTNAME}-${PORTVERSION} +DISTNAME= ${PORTNAME}-${PORTVERSION}-bin +DIST_SUBDIR= hadoop + +MAINTAINER= clement@FreeBSD.org +COMMENT= A data warehouse system for Hadoop + +LICENSE= ASL +LICENSE_FILE= ${WRKSRC}/LICENSE + +USE_JAVA= yes +JAVA_VERSION= 1.6+ + +NO_BUILD= yes + +RUN_DEPENDS+= ${LOCALBASE}/bin/bash:${PORTSDIR}/shells/bash \ + ${LOCALBASE}/bin/hadoop:${PORTSDIR}/devel/hadoop + +.if defined(WITH_MYSQL) +RUN_DEPENDS+= ${JAVALIBDIR}/mysql-connector-java.jar:${PORTSDIR}/databases/mysql-connector-java +SUB_LIST+= HIVE_MYSQL="export HIVE_AUX_JARS_PATH=${JAVALIBDIR}/mysql-connector-java.jar" +.else +SUB_LIST+= HIVE_MYSQL="" +.endif + +_HIVE_ETC= etc/${PORTNAME} +HIVE_ETC= ${PREFIX}/${_HIVE_ETC} +HADOOP_HOME= ${LOCALBASE}/share/hadoop +HADOOP_ETC= ${LOCALBASE}/etc/hadoop + +PLIST_SUB+= HIVE_ETC="${_HIVE_ETC}" \ + HADOOP_HOME="${HADOOP_HOME}" +SUB_LIST+= ${PLIST_SUB} \ + JAVA_HOME="${JAVA_HOME}" \ + HIVE_BIN="${DATADIR}/bin/${PORTNAME}" \ + HIVE_ETC="${HIVE_ETC}" \ + HADOOP_HOME="${HADOOP_HOME}" \ + P="${PREFIX}" \ + HADOOP_ETC="${HADOOP_ETC}" + +SUB_FILES+= hive-env.sh hive + +PORTDOCS= * + +DOC= NOTICE README.txt RELEASE_NOTES.txt + +DEFAULTS= conf/hive-default.xml + +DIST= bin lib scripts + +CONF= hive-exec-log4j.properties hive-log4j.properties hive-site.xml hive-env.sh + +post-extract: + @${RM} ${WRKSRC}/conf/hive-env.sh.template +pre-install: + @${TOUCH} ${WRKSRC}/conf/hive-site.xml + @${INSTALL_DATA} ${WRKDIR}/hive-env.sh ${WRKSRC}/conf/ +do-install: + @${MKDIR} ${DATADIR} + @${TAR} -cf - -C ${WRKSRC}/ ${DIST} | ${TAR} -xf - -C ${DATADIR} + @${MKDIR} ${EXAMPLESDIR} + @${TAR} -cf - -C ${WRKSRC}/ conf | ${TAR} -xf - -C ${EXAMPLESDIR} + +.if !defined(NOPORTDOCS) + @${MKDIR} ${DOCSDIR} +.for f in ${DOC} + @${INSTALL_DATA} ${WRKSRC}/${f} ${DOCSDIR} +.endfor +.endif + @${MKDIR} ${HIVE_ETC} +.for f in ${CONF} + @[ -f ${HIVE_ETC}/${f} ] || ${INSTALL_DATA} ${EXAMPLESDIR}/conf/${f} ${HIVE_ETC} +.endfor + ${INSTALL_SCRIPT} ${WRKDIR}/hive ${PREFIX}/bin/hive +.include Index: ports/databases/hive/distinfo =================================================================== RCS file: ports/databases/hive/distinfo diff -N ports/databases/hive/distinfo --- /dev/null 1 Jan 1970 00:00:00 -0000 +++ ports/databases/hive/distinfo 12 Oct 2011 21:25:01 -0000 @@ -0,0 +1,2 @@ +SHA256 (hadoop/hive-0.7.1-bin.tar.gz) = 387d349cb779d5ec7a1247aa692689eafea6658cf844be96044936877b00cab0 +SIZE (hadoop/hive-0.7.1-bin.tar.gz) = 18896660 Index: ports/databases/hive/pkg-descr =================================================================== RCS file: ports/databases/hive/pkg-descr diff -N ports/databases/hive/pkg-descr --- /dev/null 1 Jan 1970 00:00:00 -0000 +++ ports/databases/hive/pkg-descr 12 Oct 2011 21:25:01 -0000 @@ -0,0 +1,7 @@ +Hive is a data warehouse system for Hadoop that facilitates easy data +summarization, ad-hoc queries, and the analysis of large datasets +stored in Hadoop compatible file systems. Hive provides a mechanism +to project structure onto this data and query the data using a +SQL-like language called HiveQL. + +WWW: http://hive.apache.org/ Index: ports/databases/hive/pkg-plist =================================================================== RCS file: ports/databases/hive/pkg-plist diff -N ports/databases/hive/pkg-plist --- /dev/null 1 Jan 1970 00:00:00 -0000 +++ ports/databases/hive/pkg-plist 12 Oct 2011 21:25:01 -0000 @@ -0,0 +1,238 @@ +bin/hive +@exec mkdir -p %D/%%HIVE_ETC%% || true +@unexec if cmp %D/%%HIVE_ETC%%/hive-env.sh %D/%%EXAMPLESDIR%%/conf/hive-env.sh; then rm -f %D/%%HIVE_ETC%%/hive-env.sh; fi +%%EXAMPLESDIR%%/conf/hive-env.sh +@exec [ -f %D/%%HIVE_ETC%%/hive-env.sh ] || cp %D/%%EXAMPLESDIR%%/conf/hive-env.sh %D/%%HIVE_ETC%% +@unexec if cmp %D/%%HIVE_ETC%%/hive-exec-log4j.properties %D/%%EXAMPLESDIR%%/conf/hive-exec-log4j.properties; then rm -f %D/%%HIVE_ETC%%/hive-exec-log4j.properties; fi +%%EXAMPLESDIR%%/conf/hive-exec-log4j.properties +@exec [ -f %D/%%HIVE_ETC%%/hive-exec-log4j.properties ] || cp %D/%%EXAMPLESDIR%%/conf/hive-exec-log4j.properties %D/%%HIVE_ETC%% +@unexec if cmp %D/%%HIVE_ETC%%/hive-log4j.properties %D/%%EXAMPLESDIR%%/conf/hive-log4j.properties; then rm -f %D/%%HIVE_ETC%%/hive-log4j.properties; fi +%%EXAMPLESDIR%%/conf/hive-log4j.properties +@exec [ -f %D/%%HIVE_ETC%%/hive-log4j.properties ] || cp %D/%%EXAMPLESDIR%%/conf/hive-log4j.properties %D/%%HIVE_ETC%% +@unexec if cmp %D/%%HIVE_ETC%%/hive-site.xml %D/%%EXAMPLESDIR%%/conf/hive-site.xml; then rm -f %D/%%HIVE_ETC%%/hive-site.xml; fi +%%EXAMPLESDIR%%/conf/hive-site.xml +@exec [ -f %D/%%HIVE_ETC%%/hive-site.xml ] || cp %D/%%EXAMPLESDIR%%/conf/hive-site.xml %D/%%HIVE_ETC%% +%%DATADIR%%/bin/ext/util/execHiveCmd.sh +%%DATADIR%%/bin/ext/cli.sh +%%DATADIR%%/bin/ext/help.sh +%%DATADIR%%/bin/ext/hiveserver.sh +%%DATADIR%%/bin/ext/hwi.sh +%%DATADIR%%/bin/ext/jar.sh +%%DATADIR%%/bin/ext/lineage.sh +%%DATADIR%%/bin/ext/metastore.sh +%%DATADIR%%/bin/ext/rcfilecat.sh +%%DATADIR%%/bin/hive +%%DATADIR%%/bin/hive-config.sh +%%DATADIR%%/lib/py/fb303/FacebookService-remote +%%DATADIR%%/lib/py/fb303/FacebookBase.py +%%DATADIR%%/lib/py/fb303/FacebookService.py +%%DATADIR%%/lib/py/fb303/__init__.py +%%DATADIR%%/lib/py/fb303/constants.py +%%DATADIR%%/lib/py/fb303/ttypes.py +%%DATADIR%%/lib/py/hive_metastore/ThriftHiveMetastore-remote +%%DATADIR%%/lib/py/hive_metastore/ThriftHiveMetastore.py +%%DATADIR%%/lib/py/hive_metastore/__init__.py +%%DATADIR%%/lib/py/hive_metastore/constants.py +%%DATADIR%%/lib/py/hive_metastore/ttypes.py +%%DATADIR%%/lib/py/hive_service/ThriftHive-remote +%%DATADIR%%/lib/py/hive_service/ThriftHive.py +%%DATADIR%%/lib/py/hive_service/__init__.py +%%DATADIR%%/lib/py/hive_service/constants.py +%%DATADIR%%/lib/py/hive_service/ttypes.py +%%DATADIR%%/lib/py/fb303_scripts/__init__.py +%%DATADIR%%/lib/py/fb303_scripts/fb303_simple_mgmt.py +%%DATADIR%%/lib/py/hive_serde/__init__.py +%%DATADIR%%/lib/py/hive_serde/constants.py +%%DATADIR%%/lib/py/hive_serde/ttypes.py +%%DATADIR%%/lib/py/queryplan/__init__.py +%%DATADIR%%/lib/py/queryplan/constants.py +%%DATADIR%%/lib/py/queryplan/ttypes.py +%%DATADIR%%/lib/py/thrift/protocol/TBinaryProtocol.py +%%DATADIR%%/lib/py/thrift/protocol/TProtocol.py +%%DATADIR%%/lib/py/thrift/protocol/__init__.py +%%DATADIR%%/lib/py/thrift/protocol/fastbinary.c +%%DATADIR%%/lib/py/thrift/reflection/limited/__init__.py +%%DATADIR%%/lib/py/thrift/reflection/limited/constants.py +%%DATADIR%%/lib/py/thrift/reflection/limited/ttypes.py +%%DATADIR%%/lib/py/thrift/reflection/__init__.py +%%DATADIR%%/lib/py/thrift/server/THttpServer.py +%%DATADIR%%/lib/py/thrift/server/TNonblockingServer.py +%%DATADIR%%/lib/py/thrift/server/TServer.py +%%DATADIR%%/lib/py/thrift/server/__init__.py +%%DATADIR%%/lib/py/thrift/transport/THttpClient.py +%%DATADIR%%/lib/py/thrift/transport/TSocket.py +%%DATADIR%%/lib/py/thrift/transport/TTransport.py +%%DATADIR%%/lib/py/thrift/transport/TTwisted.py +%%DATADIR%%/lib/py/thrift/transport/__init__.py +%%DATADIR%%/lib/py/thrift/TSCons.py +%%DATADIR%%/lib/py/thrift/Thrift.py +%%DATADIR%%/lib/py/thrift/__init__.py +%%DATADIR%%/lib/php/ext/thrift_protocol/tags/1.0.0/config.m4 +%%DATADIR%%/lib/php/ext/thrift_protocol/tags/1.0.0/php_thrift_protocol.cpp +%%DATADIR%%/lib/php/ext/thrift_protocol/tags/1.0.0/php_thrift_protocol.h +%%DATADIR%%/lib/php/ext/thrift_protocol/config.m4 +%%DATADIR%%/lib/php/ext/thrift_protocol/php_thrift_protocol.cpp +%%DATADIR%%/lib/php/ext/thrift_protocol/php_thrift_protocol.h +%%DATADIR%%/lib/php/packages/fb303/FacebookService.php +%%DATADIR%%/lib/php/packages/fb303/fb303_types.php +%%DATADIR%%/lib/php/packages/hive_metastore/hive_metastore/ThriftHiveMetastore.php +%%DATADIR%%/lib/php/packages/hive_metastore/hive_metastore/hive_metastore_constants.php +%%DATADIR%%/lib/php/packages/hive_metastore/hive_metastore/hive_metastore_types.php +%%DATADIR%%/lib/php/packages/hive_service/hive_service/ThriftHive.php +%%DATADIR%%/lib/php/packages/hive_service/hive_service/hive_service_types.php +%%DATADIR%%/lib/php/packages/queryplan/queryplan/queryplan_types.php +%%DATADIR%%/lib/php/packages/serde/serde/serde_constants.php +%%DATADIR%%/lib/php/packages/serde/serde/serde_types.php +%%DATADIR%%/lib/php/protocol/TBinaryProtocol.php +%%DATADIR%%/lib/php/protocol/TProtocol.php +%%DATADIR%%/lib/php/transport/TBufferedTransport.php +%%DATADIR%%/lib/php/transport/TFramedTransport.php +%%DATADIR%%/lib/php/transport/THttpClient.php +%%DATADIR%%/lib/php/transport/TMemoryBuffer.php +%%DATADIR%%/lib/php/transport/TNullTransport.php +%%DATADIR%%/lib/php/transport/TPhpStream.php +%%DATADIR%%/lib/php/transport/TSocket.php +%%DATADIR%%/lib/php/transport/TSocketPool.php +%%DATADIR%%/lib/php/transport/TTransport.php +%%DATADIR%%/lib/php/Thrift.php +%%DATADIR%%/lib/php/autoload.php +%%DATADIR%%/lib/ant-contrib-1.0b3.jar +%%DATADIR%%/lib/antlr-runtime-3.0.1.jar +%%DATADIR%%/lib/asm-3.1.jar +%%DATADIR%%/lib/commons-cli-1.2.jar +%%DATADIR%%/lib/commons-codec-1.3.jar +%%DATADIR%%/lib/commons-collections-3.2.1.jar +%%DATADIR%%/lib/commons-dbcp-1.4.jar +%%DATADIR%%/lib/commons-lang-2.4.jar +%%DATADIR%%/lib/commons-logging-1.0.4.jar +%%DATADIR%%/lib/commons-logging-api-1.0.4.jar +%%DATADIR%%/lib/commons-pool-1.5.4.jar +%%DATADIR%%/lib/datanucleus-connectionpool-2.0.3.jar +%%DATADIR%%/lib/datanucleus-core-2.0.3.jar +%%DATADIR%%/lib/derby.jar +%%DATADIR%%/lib/datanucleus-enhancer-2.0.3.jar +%%DATADIR%%/lib/datanucleus-rdbms-2.0.3.jar +%%DATADIR%%/lib/guava-r06.jar +%%DATADIR%%/lib/hbase-0.89.0-SNAPSHOT-tests.jar +%%DATADIR%%/lib/hbase-0.89.0-SNAPSHOT.jar +%%DATADIR%%/lib/hive-anttasks-0.7.1.jar +%%DATADIR%%/lib/hive-cli-0.7.1.jar +%%DATADIR%%/lib/hive-common-0.7.1.jar +%%DATADIR%%/lib/hive-contrib-0.7.1.jar +%%DATADIR%%/lib/hive-exec-0.7.1.jar +%%DATADIR%%/lib/hive-hbase-handler-0.7.1.jar +%%DATADIR%%/lib/hive-hwi-0.7.1.jar +%%DATADIR%%/lib/hive-hwi-0.7.1.war +%%DATADIR%%/lib/hive-jdbc-0.7.1.jar +%%DATADIR%%/lib/hive-metastore-0.7.1.jar +%%DATADIR%%/lib/json.jar +%%DATADIR%%/lib/hive-serde-0.7.1.jar +%%DATADIR%%/lib/hive-service-0.7.1.jar +%%DATADIR%%/lib/hive-shims-0.7.1.jar +%%DATADIR%%/lib/jdo2-api-2.3-ec.jar +%%DATADIR%%/lib/jline-0.9.94.jar +%%DATADIR%%/lib/junit-3.8.1.jar +%%DATADIR%%/lib/libfb303.jar +%%DATADIR%%/lib/libthrift.jar +%%DATADIR%%/lib/log4j-1.2.15.jar +%%DATADIR%%/lib/log4j-1.2.16.jar +%%DATADIR%%/lib/slf4j-api-1.6.1.jar +%%DATADIR%%/lib/slf4j-log4j12-1.6.1.jar +%%DATADIR%%/lib/stringtemplate-3.1b1.jar +%%DATADIR%%/lib/thrift-0.5.0.jar +%%DATADIR%%/lib/thrift-fb303-0.5.0.jar +%%DATADIR%%/lib/velocity-1.5.jar +%%DATADIR%%/lib/zookeeper-3.3.1.jar +%%DATADIR%%/scripts/metastore/upgrade/derby/001-HIVE-972.derby.sql +%%DATADIR%%/scripts/metastore/upgrade/derby/002-HIVE-1068.derby.sql +%%DATADIR%%/scripts/metastore/upgrade/derby/003-HIVE-675.derby.sql +%%DATADIR%%/scripts/metastore/upgrade/derby/004-HIVE-1364.derby.sql +%%DATADIR%%/scripts/metastore/upgrade/derby/005-HIVE-417.derby.sql +%%DATADIR%%/scripts/metastore/upgrade/derby/006-HIVE-1823.derby.sql +%%DATADIR%%/scripts/metastore/upgrade/derby/007-HIVE-78.derby.sql +%%DATADIR%%/scripts/metastore/upgrade/derby/README +%%DATADIR%%/scripts/metastore/upgrade/derby/hive-schema-0.3.0.derby.sql +%%DATADIR%%/scripts/metastore/upgrade/derby/hive-schema-0.4.0.derby.sql +%%DATADIR%%/scripts/metastore/upgrade/derby/hive-schema-0.4.1.derby.sql +%%DATADIR%%/scripts/metastore/upgrade/derby/hive-schema-0.5.0.derby.sql +%%DATADIR%%/scripts/metastore/upgrade/derby/hive-schema-0.6.0.derby.sql +%%DATADIR%%/scripts/metastore/upgrade/derby/hive-schema-0.7.0.derby.sql +%%DATADIR%%/scripts/metastore/upgrade/derby/upgrade-0.5.0-to-0.6.0.derby.sql +%%DATADIR%%/scripts/metastore/upgrade/derby/upgrade-0.6.0-to-0.7.0.derby.sql +%%DATADIR%%/scripts/metastore/upgrade/mysql/001-HIVE-972.mysql.sql +%%DATADIR%%/scripts/metastore/upgrade/mysql/002-HIVE-1068.mysql.sql +%%DATADIR%%/scripts/metastore/upgrade/mysql/003-HIVE-675.mysql.sql +%%DATADIR%%/scripts/metastore/upgrade/mysql/004-HIVE-1364.mysql.sql +%%DATADIR%%/scripts/metastore/upgrade/mysql/005-HIVE-417.mysql.sql +%%DATADIR%%/scripts/metastore/upgrade/mysql/006-HIVE-1823.mysql.sql +%%DATADIR%%/scripts/metastore/upgrade/mysql/007-HIVE-78.mysql.sql +%%DATADIR%%/scripts/metastore/upgrade/mysql/README +%%DATADIR%%/scripts/metastore/upgrade/mysql/hive-schema-0.3.0.mysql.sql +%%DATADIR%%/scripts/metastore/upgrade/mysql/hive-schema-0.4.0.mysql.sql +%%DATADIR%%/scripts/metastore/upgrade/mysql/hive-schema-0.4.1.mysql.sql +%%DATADIR%%/scripts/metastore/upgrade/mysql/hive-schema-0.5.0.mysql.sql +%%DATADIR%%/scripts/metastore/upgrade/mysql/hive-schema-0.6.0.mysql.sql +%%DATADIR%%/scripts/metastore/upgrade/mysql/hive-schema-0.7.0.mysql.sql +%%DATADIR%%/scripts/metastore/upgrade/mysql/upgrade-0.5.0-to-0.6.0.mysql.sql +%%DATADIR%%/scripts/metastore/upgrade/mysql/upgrade-0.6.0-to-0.7.0.mysql.sql +%%DATADIR%%/scripts/metastore/upgrade/postgres/001-HIVE-972.postgres.sql +%%DATADIR%%/scripts/metastore/upgrade/postgres/002-HIVE-1068.postgres.sql +%%DATADIR%%/scripts/metastore/upgrade/postgres/003-HIVE-675.postgres.sql +%%DATADIR%%/scripts/metastore/upgrade/postgres/004-HIVE-1364.postgres.sql +%%DATADIR%%/scripts/metastore/upgrade/postgres/005-HIVE-417.postgres.sql +%%DATADIR%%/scripts/metastore/upgrade/postgres/006-HIVE-1823.postgres.sql +%%DATADIR%%/scripts/metastore/upgrade/postgres/007-HIVE-78.postgres.sql +%%DATADIR%%/scripts/metastore/upgrade/postgres/README +%%DATADIR%%/scripts/metastore/upgrade/postgres/hive-schema-0.3.0.postgres.sql +%%DATADIR%%/scripts/metastore/upgrade/postgres/hive-schema-0.4.0.postgres.sql +%%DATADIR%%/scripts/metastore/upgrade/postgres/hive-schema-0.4.1.postgres.sql +%%DATADIR%%/scripts/metastore/upgrade/postgres/hive-schema-0.5.0.postgres.sql +%%DATADIR%%/scripts/metastore/upgrade/postgres/hive-schema-0.6.0.postgres.sql +%%DATADIR%%/scripts/metastore/upgrade/postgres/hive-schema-0.7.0.postgres.sql +%%DATADIR%%/scripts/metastore/upgrade/postgres/upgrade-0.5.0-to-0.6.0.postgres.sql +%%DATADIR%%/scripts/metastore/upgrade/postgres/upgrade-0.6.0-to-0.7.0.postgres.sql +%%EXAMPLESDIR%%/conf/hive-default.xml +@dirrm %%DATADIR%%/scripts/metastore/upgrade/postgres +@dirrm %%DATADIR%%/scripts/metastore/upgrade/mysql +@dirrm %%DATADIR%%/scripts/metastore/upgrade/derby +@dirrm %%DATADIR%%/scripts/metastore/upgrade +@dirrm %%DATADIR%%/scripts/metastore +@dirrm %%DATADIR%%/scripts/ +@dirrm %%DATADIR%%/lib/py/thrift/transport +@dirrm %%DATADIR%%/lib/py/thrift/server +@dirrm %%DATADIR%%/lib/py/thrift/reflection/limited +@dirrm %%DATADIR%%/lib/py/thrift/reflection +@dirrm %%DATADIR%%/lib/py/thrift/protocol +@dirrm %%DATADIR%%/lib/py/thrift +@dirrm %%DATADIR%%/lib/py/queryplan +@dirrm %%DATADIR%%/lib/py/hive_service +@dirrm %%DATADIR%%/lib/py/hive_serde +@dirrm %%DATADIR%%/lib/py/hive_metastore +@dirrm %%DATADIR%%/lib/py/fb303_scripts +@dirrm %%DATADIR%%/lib/py/fb303 +@dirrm %%DATADIR%%/lib/py +@dirrm %%DATADIR%%/lib/php/transport +@dirrm %%DATADIR%%/lib/php/protocol +@dirrm %%DATADIR%%/lib/php/packages/serde/serde +@dirrm %%DATADIR%%/lib/php/packages/serde +@dirrm %%DATADIR%%/lib/php/packages/queryplan/queryplan +@dirrm %%DATADIR%%/lib/php/packages/queryplan +@dirrm %%DATADIR%%/lib/php/packages/hive_service/hive_service +@dirrm %%DATADIR%%/lib/php/packages/hive_service +@dirrm %%DATADIR%%/lib/php/packages/hive_metastore/hive_metastore +@dirrm %%DATADIR%%/lib/php/packages/hive_metastore +@dirrm %%DATADIR%%/lib/php/packages/fb303 +@dirrm %%DATADIR%%/lib/php/packages +@dirrm %%DATADIR%%/lib/php/ext/thrift_protocol/tags/1.0.0 +@dirrm %%DATADIR%%/lib/php/ext/thrift_protocol/tags +@dirrm %%DATADIR%%/lib/php/ext/thrift_protocol +@dirrm %%DATADIR%%/lib/php/ext +@dirrm %%DATADIR%%/lib/php +@dirrm %%DATADIR%%/lib/ +@dirrm %%DATADIR%%/bin/ext/util +@dirrm %%DATADIR%%/bin/ext +@dirrm %%DATADIR%%/bin/ +@dirrm %%DATADIR%% +@dirrm %%EXAMPLESDIR%%/conf +@dirrm %%EXAMPLESDIR%% +@dirrmtry %%HIVE_ETC%% Index: ports/databases/hive/files/hive-env.sh.in =================================================================== RCS file: ports/databases/hive/files/hive-env.sh.in diff -N ports/databases/hive/files/hive-env.sh.in --- /dev/null 1 Jan 1970 00:00:00 -0000 +++ ports/databases/hive/files/hive-env.sh.in 12 Oct 2011 21:25:01 -0000 @@ -0,0 +1,3 @@ +export HADOOP_HOME=%%HADOOP_HOME%% +export HIVE_CONF_DIR=%%P%%/%%HIVE_ETC%% +%%HIVE_MYSQL%% Index: ports/databases/hive/files/hive.in =================================================================== RCS file: ports/databases/hive/files/hive.in diff -N ports/databases/hive/files/hive.in --- /dev/null 1 Jan 1970 00:00:00 -0000 +++ ports/databases/hive/files/hive.in 12 Oct 2011 21:25:01 -0000 @@ -0,0 +1,9 @@ +#!/usr/bin/env bash +for i in `ls %%HADOOP_ETC%%/envvars.d/*.env` +do + . ${i} +done + +export HIVE_CONF_DIR=%%P%%/%%HIVE_ETC%% + +exec %%HIVE_BIN%% $@ Index: ports/textproc/pig/Makefile =================================================================== RCS file: ports/textproc/pig/Makefile diff -N ports/textproc/pig/Makefile --- /dev/null 1 Jan 1970 00:00:00 -0000 +++ ports/textproc/pig/Makefile 17 Nov 2011 13:47:01 -0000 @@ -0,0 +1,74 @@ +# New ports collection makefile for: pig +# Date created: 2011-08-08 +# Whom: Clement Laforet +# +# $FreeBSD$ + +PORTNAME= pig +PORTVERSION= 0.9.1 +CATEGORIES= devel java +MASTER_SITES= ${MASTER_SITE_APACHE} +MASTER_SITE_SUBDIR= ${PORTNAME}/${PORTNAME}-${PORTVERSION} +DIST_SUBDIR= hadoop + +MAINTAINER= clement@FreeBSD.org +COMMENT= A high-level language for analyzing large data sets + +LICENSE= ASL +LICENSE_FILE= ${WRKSRC}/LICENSE.txt + +USE_JAVA= yes +JAVA_VERSION= 1.6+ + +NO_BUILD= yes + +RUN_DEPENDS+= ${LOCALBASE}/bin/bash:${PORTSDIR}/shells/bash \ + ${LOCALBASE}/bin/hadoop:${PORTSDIR}/devel/hadoop + +_PIG_ETC= etc/${PORTNAME} +PIG_ETC= ${PREFIX}/${_PIG_ETC} +HADOOP_HOME= ${LOCALBASE}/share/hadoop +HADOOP_ETC= ${LOCALBASE}/etc/hadoop + +PLIST_SUB+= PIG_ETC="${_PIG_ETC}" \ + HADOOP_HOME="${HADOOP_HOME}" +SUB_LIST+= ${PLIST_SUB} \ + JAVA_HOME="${JAVA_HOME}" \ + PIG_BIN="${DATADIR}/bin/${PORTNAME}" \ + HADOOP_ETC="${HADOOP_ETC}" + +SUB_FILES+= pig-env.sh pig + +PORTDOCS= * + +DOC= NOTICE.txt README.txt RELEASE_NOTES.txt CHANGES.txt + +DEFAULTS= conf/pig-default.xml + +DIST= bin lib pig-${PORTVERSION}.jar pig-${PORTVERSION}-withouthadoop.jar + +CONF= pig.properties pig-env.sh log4j.properties.template + +post-extract: +# @${RM} ${WRKSRC}/conf/pig-env.sh.template +pre-install: + @${INSTALL_DATA} ${WRKDIR}/pig-env.sh ${WRKSRC}/conf/ +do-install: + @${MKDIR} ${DATADIR} + @${TAR} -cf - -C ${WRKSRC}/ ${DIST} | ${TAR} -xf - -C ${DATADIR} + @${MKDIR} ${EXAMPLESDIR} + @${TAR} -cf - -C ${WRKSRC}/ conf | ${TAR} -xf - -C ${EXAMPLESDIR} + +.if !defined(NOPORTDOCS) + @${MKDIR} ${DOCSDIR} + @${TAR} -cf - -C ${WRKSRC}/ docs | ${TAR} -xf - -C ${DOCSDIR} +.for f in ${DOC} + @${INSTALL_DATA} ${WRKSRC}/${f} ${DOCSDIR} +.endfor +.endif + @${MKDIR} ${PIG_ETC} +.for f in ${CONF} + @[ -f ${PIG_ETC}/${f} ] || ${INSTALL_DATA} ${EXAMPLESDIR}/conf/${f} ${PIG_ETC} +.endfor + ${INSTALL_SCRIPT} ${WRKDIR}/pig ${PREFIX}/bin/pig +.include Index: ports/textproc/pig/distinfo =================================================================== RCS file: ports/textproc/pig/distinfo diff -N ports/textproc/pig/distinfo --- /dev/null 1 Jan 1970 00:00:00 -0000 +++ ports/textproc/pig/distinfo 12 Oct 2011 21:25:17 -0000 @@ -0,0 +1,2 @@ +SHA256 (hadoop/pig-0.9.1.tar.gz) = e5864e9a5f0bc8660d601d71b3f120d45c9ba24e61ec6bed11686ac2cbc7a916 +SIZE (hadoop/pig-0.9.1.tar.gz) = 42408493 Index: ports/textproc/pig/pkg-descr =================================================================== RCS file: ports/textproc/pig/pkg-descr diff -N ports/textproc/pig/pkg-descr --- /dev/null 1 Jan 1970 00:00:00 -0000 +++ ports/textproc/pig/pkg-descr 12 Oct 2011 21:25:17 -0000 @@ -0,0 +1,8 @@ +Apache Pig is a platform for analyzing large data sets that consists of +a high-level language for expressing data analysis programs, coupled +with infrastructure for evaluating these programs. The salient property +of Pig programs is that their structure is amenable to substantial +parallelization, which in turns enables them to handle very large data +sets. + +WWW: http://pig.apache.org/ Index: ports/textproc/pig/pkg-plist =================================================================== RCS file: ports/textproc/pig/pkg-plist diff -N ports/textproc/pig/pkg-plist --- /dev/null 1 Jan 1970 00:00:00 -0000 +++ ports/textproc/pig/pkg-plist 10 Nov 2011 14:31:47 -0000 @@ -0,0 +1,25 @@ +@exec mkdir -p %D/%%PIG_ETC%% +bin/pig +@unexec if cmp %D/%%PIG_ETC%%/pig-env.sh %D/%%EXAMPLESDIR%%/conf/pig-env.sh; then rm -f %D/%%PIG_ETC%%/pig-env.sh; fi +%%EXAMPLESDIR%%/conf/pig-env.sh +@exec [ -f %D/%%PIG_ETC%%/pig-env.sh ] || cp %D/%%EXAMPLESDIR%%/conf/pig-env.sh %D/%%PIG_ETC%% +@unexec if cmp %D/%%PIG_ETC%%/log4j.properties.template %D/%%EXAMPLESDIR%%/conf/log4j.properties.template; then rm -f %D/%%PIG_ETC%%/log4j.properties.template; fi +%%EXAMPLESDIR%%/conf/log4j.properties.template +@exec [ -f %D/%%PIG_ETC%%/log4j.properties.template ] || cp %D/%%EXAMPLESDIR%%/conf/log4j.properties.template %D/%%PIG_ETC%% +@unexec if cmp %D/%%PIG_ETC%%/pig.properties %D/%%EXAMPLESDIR%%/conf/pig.properties; then rm -f %D/%%PIG_ETC%%/pig.properties; fi +%%EXAMPLESDIR%%/conf/pig.properties +@exec [ -f %D/%%PIG_ETC%%/pig.properties ] || cp %D/%%EXAMPLESDIR%%/conf/pig.properties %D/%%PIG_ETC%% +%%DATADIR%%/bin/pig +%%DATADIR%%/lib/jdiff/pig_0.8.1.xml +%%DATADIR%%/lib/jdiff/pig_0.9.1.xml +%%DATADIR%%/lib/automaton.jar +%%DATADIR%%/lib/jython-2.5.0.jar +%%DATADIR%%/pig-0.9.1.jar +%%DATADIR%%/pig-0.9.1-withouthadoop.jar +@dirrm %%DATADIR%%/lib/jdiff +@dirrm %%DATADIR%%/lib +@dirrm %%DATADIR%%/bin +@dirrm %%DATADIR%% +@dirrm %%EXAMPLESDIR%%/conf +@dirrm %%EXAMPLESDIR%% +@dirrmtry %%PIG_ETC%% Index: ports/textproc/pig/files/pig-env.sh.in =================================================================== RCS file: ports/textproc/pig/files/pig-env.sh.in diff -N ports/textproc/pig/files/pig-env.sh.in --- /dev/null 1 Jan 1970 00:00:00 -0000 +++ ports/textproc/pig/files/pig-env.sh.in 12 Oct 2011 21:25:17 -0000 @@ -0,0 +1,2 @@ +export HADOOP_HOME=%%HADOOP_HOME%% +export PIG_CONF_DIR=%%PIG_ETC%% Index: ports/textproc/pig/files/pig.in =================================================================== RCS file: ports/textproc/pig/files/pig.in diff -N ports/textproc/pig/files/pig.in --- /dev/null 1 Jan 1970 00:00:00 -0000 +++ ports/textproc/pig/files/pig.in 12 Oct 2011 21:25:17 -0000 @@ -0,0 +1,10 @@ +#!/bin/sh +export JAVA_HOME=${JAVA_HOME:-%%JAVA_HOME%%} +for i in `ls %%HADOOP_ETC%%/envvars.d/*.env` +do + . ${i} +done +export PIG_CONF_DIR=%%PIG_ETC%% + +export PIG_CLASSPATH=${HADOOP_HOME}:${HADOOP_CONF} +exec %%PIG_BIN%% $@