Index: ports/GIDs =================================================================== RCS file: /home/ncvs/ports/GIDs,v retrieving revision 1.163 diff -u -r1.163 GIDs --- ports/GIDs 12 Jan 2012 13:14:44 -0000 1.163 +++ ports/GIDs 1 Feb 2012 16:27:51 -0000 @@ -226,4 +226,5 @@ openerpd:*:951: bitten-slave:*:952: _neubot:*:953: +hadoop:*:954: nobody:*:65534: Index: ports/UIDs =================================================================== RCS file: /home/ncvs/ports/UIDs,v retrieving revision 1.183 diff -u -r1.183 UIDs --- ports/UIDs 12 Jan 2012 13:14:44 -0000 1.183 +++ ports/UIDs 1 Feb 2012 16:26:04 -0000 @@ -228,4 +228,5 @@ openerpd:*:951:951::0:0:Openerpd user:/nonexistent:/sbin/nologin bitten-slave:*:952:952:daemon:0:0:Bitten slave user:/var/lib/bitten-slave:/sbin/nologin _neubot:*:953:953::0:0:neubot daemon:/nonexistent:/sbin/nologin +hadoop:*:954:954::0:0:hadoop user:/nonexistent:/sbin/nologin nobody:*:65534:65534::0:0:Unprivileged user:/nonexistent:/usr/sbin/nologin Index: ports/devel/Makefile =================================================================== RCS file: /home/ncvs/ports/devel/Makefile,v retrieving revision 1.4819 diff -u -r1.4819 Makefile --- ports/devel/Makefile 30 Jan 2012 09:15:00 -0000 1.4819 +++ ports/devel/Makefile 1 Feb 2012 16:26:31 -0000 @@ -550,6 +550,7 @@ SUBDIR += hachoir-core SUBDIR += hachoir-parser SUBDIR += hachoir-regex + SUBDIR += hadoop SUBDIR += happydoc SUBDIR += hapy SUBDIR += hcs12mem Index: ports/devel/hadoop/Makefile =================================================================== RCS file: ports/devel/hadoop/Makefile diff -N ports/devel/hadoop/Makefile --- /dev/null 1 Jan 1970 00:00:00 -0000 +++ ports/devel/hadoop/Makefile 1 Feb 2012 16:29:35 -0000 @@ -0,0 +1,143 @@ +# New ports collection makefile for: hadoop +# Date created: 01-02-2012 +# Whom: Clement Laforet +# +# $FreeBSD$ + +PORTNAME= hadoop +PORTVERSION= 1.0.0 +CATEGORIES= devel java +MASTER_SITES= ${MASTER_SITE_APACHE} +MASTER_SITE_SUBDIR= ${PORTNAME}/core/${PORTNAME}-${PORTVERSION} +DIST_SUBDIR= hadoop + +MAINTAINER= clement@FreeBSD.org +COMMENT= Apache Map/Reduce framework + +LICENSE= AL2 +LICENSE_FILE= ${WRKSRC}/LICENSE.txt + +USE_JAVA= yes +JAVA_VERSION= 1.6 +USE_ANT= yes +ONLY_FOR_ARCHS= amd64 i386 + +WRKSRC= ${WRKDIR}/${PORTNAME}-${PORTVERSION} + +BUILD_DEPENDS+= ${LOCALBASE}/bin/gmake:${PORTSDIR}/devel/gmake \ + ${LOCALBASE}/bin/gsed:${PORTSDIR}/textproc/gsed +RUN_DEPENDS+= ${LOCALBASE}/bin/bash:${PORTSDIR}/shells/bash + +_HADOOP_DIR= share/${PORTNAME} +_HADOOP_ETC= etc/${PORTNAME} +HADOOP_HOME= ${PREFIX}/${_HADOOP_DIR} +HADOOP_ETC= ${PREFIX}/${_HADOOP_ETC} + +HADOOP_LOGDIR= /var/log/${PORTNAME} +HADOOP_RUNDIR= /var/run/${PORTNAME} +HADOOP_BIN= ${PREFIX}/bin/${PORTNAME} + +USERS= hadoop +GROUPS= hadoop +HADOOP_USER= ${USERS} +HADOOP_GROUP= ${GROUPS} + +SUB_FILES+= pkg-install pkg-deinstall hadoop 000.java_home.env +USE_RC_SUBR+= tasktracker jobtracker datanode namenode secondarynamenode + +PLIST_SUB+= PORTVERSION="${PORTVERSION}"\ + HADOOP_HOME="${_HADOOP_DIR}" \ + HADOOP_ETC="${_HADOOP_ETC}" +SUB_LIST= HADOOP_LOGDIR="${HADOOP_LOGDIR}" \ + HADOOP_RUNDIR="${HADOOP_RUNDIR}" \ + HADOOP_USER="${HADOOP_USER}" \ + HADOOP_GROUP="${HADOOP_GROUP}" \ + HADOOP_UID="${HADOOP_UID}" \ + HADOOP_GID="${HADOOP_GID}" \ + HADOOP_HOME="${HADOOP_HOME}" \ + HADOOP_ETC="${HADOOP_ETC}" \ + JAVA_HOME="${JAVA_HOME}" + +PORTDOCS= * +FIX_PERMS= src/c++/pipes/install-sh src/c++/utils/install-sh src/c++/libhdfs/install-sh \ + src/c++/libhdfs/tests/test-libhdfs.sh + +FIX_DOCS= docs/cn/skin/css docs/cn/skin/scripts docs/cn/skin/translations \ + docs/jdiff docs/skin/css docs/skin/scripts docs/skin/translations + +DOC= CHANGES.txt LICENSE.txt NOTICE.txt README.txt + +DEFAULTS= src/core/core-default.xml src/hdfs/hdfs-default.xml src/mapred/mapred-default.xml hadoop-examples-${PORTVERSION}.jar + +DIST= bin contrib hadoop-ant-${PORTVERSION}.jar hadoop-core-${PORTVERSION}.jar \ + hadoop-test-${PORTVERSION}.jar hadoop-tools-${PORTVERSION}.jar lib webapps +CONF= capacity-scheduler.xml configuration.xsl core-site.xml hadoop-env.sh hadoop-metrics2.properties \ + hadoop-policy.xml hdfs-site.xml log4j.properties mapred-queue-acls.xml mapred-site.xml taskcontroller.cfg + +MAKE_ARGS= -Dbuild.classes=${WRKSRC}/hadoop-core-${PORTVERSION}.jar -Dversion=${PORTVERSION} -Dcompile.native=true \ + -Dcompile.c++=true -Dmake.cmd=${GMAKE} -Dlibhdfs=1 -Dlibrecordio=true -Dskip.record-parser=true +ALL_TARGET= compile-core-native compile-c++ compile-c++-libhdfs compile-c++-pipes compile-c++-utils + +.include + +.if ${ARCH} == "amd64" +_HADOOP_ARCH= FreeBSD-amd64-64 +.else +_HADOOP_ARCH= FreeBSD-i386-32 +.endif +PLIST_SUB+= HADOOP_ARCH=${_HADOOP_ARCH} + +pre-build: +.for f in ${FIX_PERMS} + @${CHMOD} +x ${WRKSRC}/${f} +.endfor +.for d in ${FIX_DOCS} + @${TOUCH} ${WRKSRC}/${d}/.empty +.endfor + +#do-build: +# @cd ${WRKSRC}; \ +# ${ANT} -Dbuild.classes=${WRKSRC}/hadoop-core-${PORTVERSION}.jar -Dversion=${PORTVERSION} -Dcompile.native=true \ +# -Dcompile.c++=true -Dmake.cmd=${GMAKE} compile-core-native compile-c++ compile-c++-libhdfs compile-c++-pipes \ +# compile-c++-utils -Dlibhdfs=1 -Dlibrecordio=true -Dskip.record-parser=true + +post-build: + @cd ${WRKSRC} ;${ANT} FreeBSD-dist + @${TAR} -cf - -C ${WRKSRC}/build/c++/ ${_HADOOP_ARCH}/lib ${_HADOOP_ARCH}/include | ${TAR} xf - -C ${WRKSRC}/c++/ + +pre-install: + @${SETENV} PKG_PREFIX=${PREFIX} ${SH} ${PKGINSTALL} ${PKGNAME} PRE-INSTALL + +do-install: + @${MKDIR} ${HADOOP_HOME} + @${TAR} -cf - -C ${WRKSRC}/ ${DIST} | ${TAR} -xf - -C ${HADOOP_HOME} + + @${MKDIR} ${HADOOP_ETC} ${HADOOP_ETC}/envvars.d + @${INSTALL_DATA} ${WRKDIR}/000.java_home.env ${HADOOP_ETC}/envvars.d/ + @${ECHO} "export HADOOP_HOME=${HADOOP_HOME}" > ${HADOOP_ETC}/envvars.d/001.hadoop_home.env + @${ECHO} "export HADOOP_CONF_DIR=${HADOOP_ETC}" > ${HADOOP_ETC}/envvars.d/002.hadoop_conf.env + @${ECHO} "export HADOOP_LOG_DIR=${HADOOP_LOGDIR}" > ${HADOOP_ETC}/envvars.d/003.hadoop_log.env + @${MKDIR} ${EXAMPLESDIR} + @${TAR} -cf - -C ${WRKSRC}/c++/${_HADOOP_ARCH}/ include lib| ${TAR} xf - -C ${PREFIX} + +.for f in ${DEFAULTS} + @${INSTALL_DATA} ${WRKSRC}/${f} ${EXAMPLESDIR} +.endfor + +.if !defined(NOPORTDOCS) + @${MKDIR} ${DOCSDIR} + @${TAR} -cf - -C ${WRKSRC}/ docs | ${TAR} -xf - -C ${DOCSDIR} +.for f in ${DOC} + @${INSTALL_DATA} ${WRKSRC}/${f} ${DOCSDIR} +.endfor +.endif + @${INSTALL_SCRIPT} ${WRKDIR}/hadoop ${PREFIX}/bin/ + @cd ${WRKSRC}; ${COPYTREE_SHARE} conf ${EXAMPLESDIR} +.for f in ${CONF} + @[ -f ${HADOOP_ETC}/${f} ] || ${INSTALL_DATA} ${EXAMPLESDIR}/conf/${f} ${HADOOP_ETC} +.endfor + +post-install: + @${SETENV} PKG_PREFIX=${PREFIX} ${SH} ${PKGINSTALL} ${PKGNAME} POST-INSTALL + +.include Index: ports/devel/hadoop/distinfo =================================================================== RCS file: ports/devel/hadoop/distinfo diff -N ports/devel/hadoop/distinfo --- /dev/null 1 Jan 1970 00:00:00 -0000 +++ ports/devel/hadoop/distinfo 1 Feb 2012 16:25:01 -0000 @@ -0,0 +1,2 @@ +SHA256 (hadoop/hadoop-1.0.0.tar.gz) = 587bc9389d062f4e8042f2604b2d9a574080d4178614cccc07c5e5d743836f71 +SIZE (hadoop/hadoop-1.0.0.tar.gz) = 59468784 Index: ports/devel/hadoop/pkg-descr =================================================================== RCS file: ports/devel/hadoop/pkg-descr diff -N ports/devel/hadoop/pkg-descr --- /dev/null 1 Jan 1970 00:00:00 -0000 +++ ports/devel/hadoop/pkg-descr 1 Feb 2012 16:25:01 -0000 @@ -0,0 +1,5 @@ +The Apache Hadoop software library is a framework that allows for the +distributed processing of large data sets across clusters of computers +using a simple programming model. + +WWW: http://hadoop.apache.org/ Index: ports/devel/hadoop/pkg-plist =================================================================== RCS file: ports/devel/hadoop/pkg-plist diff -N ports/devel/hadoop/pkg-plist --- /dev/null 1 Jan 1970 00:00:00 -0000 +++ ports/devel/hadoop/pkg-plist 1 Feb 2012 16:25:01 -0000 @@ -0,0 +1,345 @@ +bin/hadoop +%%HADOOP_ETC%%/envvars.d/000.java_home.env +%%HADOOP_ETC%%/envvars.d/001.hadoop_home.env +%%HADOOP_ETC%%/envvars.d/002.hadoop_conf.env +%%HADOOP_ETC%%/envvars.d/003.hadoop_log.env +%%HADOOP_HOME%%/bin/hadoop +%%HADOOP_HOME%%/bin/hadoop-config.sh +%%HADOOP_HOME%%/bin/hadoop-daemon.sh +%%HADOOP_HOME%%/bin/hadoop-daemons.sh +%%HADOOP_HOME%%/bin/rcc +%%HADOOP_HOME%%/bin/slaves.sh +%%HADOOP_HOME%%/bin/start-all.sh +%%HADOOP_HOME%%/bin/start-balancer.sh +%%HADOOP_HOME%%/bin/start-dfs.sh +%%HADOOP_HOME%%/bin/start-jobhistoryserver.sh +%%HADOOP_HOME%%/bin/start-mapred.sh +%%HADOOP_HOME%%/bin/stop-all.sh +%%HADOOP_HOME%%/bin/stop-balancer.sh +%%HADOOP_HOME%%/bin/stop-dfs.sh +%%HADOOP_HOME%%/bin/stop-jobhistoryserver.sh +%%HADOOP_HOME%%/bin/stop-mapred.sh +%%HADOOP_HOME%%/bin/task-controller +%%HADOOP_HOME%%/lib/hadoop-capacity-scheduler-%%PORTVERSION%%.jar +%%HADOOP_HOME%%/contrib/datajoin/hadoop-datajoin-%%PORTVERSION%%.jar +%%HADOOP_HOME%%/contrib/failmon/hadoop-failmon-%%PORTVERSION%%.jar +%%HADOOP_HOME%%/lib/hadoop-fairscheduler-%%PORTVERSION%%.jar +%%HADOOP_HOME%%/contrib/gridmix/hadoop-gridmix-%%PORTVERSION%%.jar +%%HADOOP_HOME%%/contrib/hdfsproxy/bin/hdfsproxy +%%HADOOP_HOME%%/contrib/hdfsproxy/bin/hdfsproxy-config.sh +%%HADOOP_HOME%%/contrib/hdfsproxy/bin/hdfsproxy-daemon.sh +%%HADOOP_HOME%%/contrib/hdfsproxy/bin/hdfsproxy-daemons.sh +%%HADOOP_HOME%%/contrib/hdfsproxy/bin/hdfsproxy-slaves.sh +%%HADOOP_HOME%%/contrib/hdfsproxy/bin/start-hdfsproxy.sh +%%HADOOP_HOME%%/contrib/hdfsproxy/bin/stop-hdfsproxy.sh +%%HADOOP_HOME%%/contrib/hdfsproxy/conf/configuration.xsl +%%HADOOP_HOME%%/contrib/hdfsproxy/conf/hdfsproxy-default.xml +%%HADOOP_HOME%%/contrib/hdfsproxy/conf/hdfsproxy-env.sh +%%HADOOP_HOME%%/contrib/hdfsproxy/conf/hdfsproxy-env.sh.template +%%HADOOP_HOME%%/contrib/hdfsproxy/conf/hdfsproxy-hosts +%%HADOOP_HOME%%/contrib/hdfsproxy/conf/log4j.properties +%%HADOOP_HOME%%/contrib/hdfsproxy/conf/ssl-server.xml +%%HADOOP_HOME%%/contrib/hdfsproxy/conf/tomcat-forward-web.xml +%%HADOOP_HOME%%/contrib/hdfsproxy/conf/tomcat-web.xml +%%HADOOP_HOME%%/contrib/hdfsproxy/conf/user-certs.xml +%%HADOOP_HOME%%/contrib/hdfsproxy/conf/user-permissions.xml +%%HADOOP_HOME%%/contrib/hdfsproxy/README +%%HADOOP_HOME%%/contrib/hdfsproxy/build.xml +%%HADOOP_HOME%%/contrib/hdfsproxy/hdfsproxy-2.0.jar +%%HADOOP_HOME%%/contrib/hod/bin/VERSION +%%HADOOP_HOME%%/contrib/hod/bin/checknodes +%%HADOOP_HOME%%/contrib/hod/bin/hod +%%HADOOP_HOME%%/contrib/hod/bin/hodcleanup +%%HADOOP_HOME%%/contrib/hod/bin/hodring +%%HADOOP_HOME%%/contrib/hod/bin/ringmaster +%%HADOOP_HOME%%/contrib/hod/bin/verify-account +%%HADOOP_HOME%%/contrib/hod/conf/hodrc +%%HADOOP_HOME%%/contrib/hod/hodlib/AllocationManagers/__init__.py +%%HADOOP_HOME%%/contrib/hod/hodlib/AllocationManagers/goldAllocationManager.py +%%HADOOP_HOME%%/contrib/hod/hodlib/Common/__init__.py +%%HADOOP_HOME%%/contrib/hod/hodlib/Common/allocationManagerUtil.py +%%HADOOP_HOME%%/contrib/hod/hodlib/Common/desc.py +%%HADOOP_HOME%%/contrib/hod/hodlib/Common/descGenerator.py +%%HADOOP_HOME%%/contrib/hod/hodlib/Common/hodsvc.py +%%HADOOP_HOME%%/contrib/hod/hodlib/Common/logger.py +%%HADOOP_HOME%%/contrib/hod/hodlib/Common/miniHTMLParser.py +%%HADOOP_HOME%%/contrib/hod/hodlib/Common/nodepoolutil.py +%%HADOOP_HOME%%/contrib/hod/hodlib/Common/setup.py +%%HADOOP_HOME%%/contrib/hod/hodlib/Common/socketServers.py +%%HADOOP_HOME%%/contrib/hod/hodlib/Common/tcp.py +%%HADOOP_HOME%%/contrib/hod/hodlib/Common/threads.py +%%HADOOP_HOME%%/contrib/hod/hodlib/Common/types.py +%%HADOOP_HOME%%/contrib/hod/hodlib/Common/util.py +%%HADOOP_HOME%%/contrib/hod/hodlib/Common/xmlrpc.py +%%HADOOP_HOME%%/contrib/hod/hodlib/GridServices/__init__.py +%%HADOOP_HOME%%/contrib/hod/hodlib/GridServices/hdfs.py +%%HADOOP_HOME%%/contrib/hod/hodlib/GridServices/mapred.py +%%HADOOP_HOME%%/contrib/hod/hodlib/GridServices/service.py +%%HADOOP_HOME%%/contrib/hod/hodlib/Hod/__init__.py +%%HADOOP_HOME%%/contrib/hod/hodlib/Hod/hadoop.py +%%HADOOP_HOME%%/contrib/hod/hodlib/Hod/hod.py +%%HADOOP_HOME%%/contrib/hod/hodlib/Hod/nodePool.py +%%HADOOP_HOME%%/contrib/hod/hodlib/HodRing/__init__.py +%%HADOOP_HOME%%/contrib/hod/hodlib/HodRing/hodRing.py +%%HADOOP_HOME%%/contrib/hod/hodlib/NodePools/__init__.py +%%HADOOP_HOME%%/contrib/hod/hodlib/NodePools/torque.py +%%HADOOP_HOME%%/contrib/hod/hodlib/RingMaster/__init__.py +%%HADOOP_HOME%%/contrib/hod/hodlib/RingMaster/idleJobTracker.py +%%HADOOP_HOME%%/contrib/hod/hodlib/RingMaster/ringMaster.py +%%HADOOP_HOME%%/contrib/hod/hodlib/Schedulers/__init__.py +%%HADOOP_HOME%%/contrib/hod/hodlib/Schedulers/torque.py +%%HADOOP_HOME%%/contrib/hod/hodlib/ServiceProxy/__init__.py +%%HADOOP_HOME%%/contrib/hod/hodlib/ServiceProxy/serviceProxy.py +%%HADOOP_HOME%%/contrib/hod/hodlib/ServiceRegistry/__init__.py +%%HADOOP_HOME%%/contrib/hod/hodlib/ServiceRegistry/serviceRegistry.py +%%HADOOP_HOME%%/contrib/hod/hodlib/__init__.py +%%HADOOP_HOME%%/contrib/hod/ivy/libraries.properties +%%HADOOP_HOME%%/contrib/hod/support/checklimits.sh +%%HADOOP_HOME%%/contrib/hod/support/logcondense.py +%%HADOOP_HOME%%/contrib/hod/testing/__init__.py +%%HADOOP_HOME%%/contrib/hod/testing/helper.py +%%HADOOP_HOME%%/contrib/hod/testing/lib.py +%%HADOOP_HOME%%/contrib/hod/testing/main.py +%%HADOOP_HOME%%/contrib/hod/testing/testHadoop.py +%%HADOOP_HOME%%/contrib/hod/testing/testHod.py +%%HADOOP_HOME%%/contrib/hod/testing/testHodCleanup.py +%%HADOOP_HOME%%/contrib/hod/testing/testHodRing.py +%%HADOOP_HOME%%/contrib/hod/testing/testModule.py +%%HADOOP_HOME%%/contrib/hod/testing/testRingmasterRPCs.py +%%HADOOP_HOME%%/contrib/hod/testing/testThreads.py +%%HADOOP_HOME%%/contrib/hod/testing/testTypes.py +%%HADOOP_HOME%%/contrib/hod/testing/testUtil.py +%%HADOOP_HOME%%/contrib/hod/testing/testXmlrpc.py +%%HADOOP_HOME%%/contrib/hod/CHANGES.txt +%%HADOOP_HOME%%/contrib/hod/README +%%HADOOP_HOME%%/contrib/hod/build.xml +%%HADOOP_HOME%%/contrib/hod/config.txt +%%HADOOP_HOME%%/contrib/hod/getting_started.txt +%%HADOOP_HOME%%/contrib/hod/ivy.xml +%%HADOOP_HOME%%/contrib/index/hadoop-index-%%PORTVERSION%%.jar +%%HADOOP_HOME%%/contrib/streaming/hadoop-streaming-%%PORTVERSION%%.jar +%%HADOOP_HOME%%/lib/hadoop-thriftfs-%%PORTVERSION%%.jar +%%HADOOP_HOME%%/contrib/vaidya/bin/vaidya.sh +%%HADOOP_HOME%%/contrib/vaidya/conf/postex_diagnosis_tests.xml +%%HADOOP_HOME%%/contrib/vaidya/hadoop-vaidya-%%PORTVERSION%%.jar +%%HADOOP_HOME%%/hadoop-ant-%%PORTVERSION%%.jar +%%HADOOP_HOME%%/hadoop-core-%%PORTVERSION%%.jar +%%HADOOP_HOME%%/hadoop-test-%%PORTVERSION%%.jar +%%HADOOP_HOME%%/hadoop-tools-%%PORTVERSION%%.jar +%%HADOOP_HOME%%/lib/jdiff/hadoop_0.17.0.xml +%%HADOOP_HOME%%/lib/jdiff/hadoop_0.18.1.xml +%%HADOOP_HOME%%/lib/jdiff/hadoop_0.18.2.xml +%%HADOOP_HOME%%/lib/jdiff/hadoop_0.18.3.xml +%%HADOOP_HOME%%/lib/jdiff/hadoop_0.19.0.xml +%%HADOOP_HOME%%/lib/jdiff/hadoop_0.19.1.xml +%%HADOOP_HOME%%/lib/jdiff/hadoop_0.19.2.xml +%%HADOOP_HOME%%/lib/jdiff/hadoop_0.20.1.xml +%%HADOOP_HOME%%/lib/jdiff/hadoop_0.20.205.0.xml +%%HADOOP_HOME%%/lib/jdiff/hadoop_%%PORTVERSION%%.xml +%%HADOOP_HOME%%/lib/jsp-2.1/jsp-2.1.jar +%%HADOOP_HOME%%/lib/jsp-2.1/jsp-api-2.1.jar +%%HADOOP_HOME%%/lib/native/Linux-amd64-64/libhadoop.a +%%HADOOP_HOME%%/lib/native/Linux-amd64-64/libhadoop.la +%%HADOOP_HOME%%/lib/native/Linux-amd64-64/libhadoop.so +%%HADOOP_HOME%%/lib/native/Linux-amd64-64/libhadoop.so.1 +%%HADOOP_HOME%%/lib/native/Linux-amd64-64/libhadoop.so.1.0.0 +%%HADOOP_HOME%%/lib/native/Linux-i386-32/libhadoop.a +%%HADOOP_HOME%%/lib/native/Linux-i386-32/libhadoop.la +%%HADOOP_HOME%%/lib/native/Linux-i386-32/libhadoop.so +%%HADOOP_HOME%%/lib/native/Linux-i386-32/libhadoop.so.1 +%%HADOOP_HOME%%/lib/native/Linux-i386-32/libhadoop.so.1.0.0 +%%HADOOP_HOME%%/lib/native/%%HADOOP_ARCH%%/libhadoop.a +%%HADOOP_HOME%%/lib/native/%%HADOOP_ARCH%%/libhadoop.la +%%HADOOP_HOME%%/lib/native/%%HADOOP_ARCH%%/libhadoop.so +%%HADOOP_HOME%%/lib/native/%%HADOOP_ARCH%%/libhadoop.so.1 +%%HADOOP_HOME%%/lib/asm-3.2.jar +%%HADOOP_HOME%%/lib/aspectjrt-1.6.5.jar +%%HADOOP_HOME%%/lib/aspectjtools-1.6.5.jar +%%HADOOP_HOME%%/lib/commons-beanutils-1.7.0.jar +%%HADOOP_HOME%%/lib/commons-beanutils-core-1.8.0.jar +%%HADOOP_HOME%%/lib/commons-cli-1.2.jar +%%HADOOP_HOME%%/lib/commons-codec-1.4.jar +%%HADOOP_HOME%%/lib/commons-collections-3.2.1.jar +%%HADOOP_HOME%%/lib/commons-configuration-1.6.jar +%%HADOOP_HOME%%/lib/commons-daemon-1.0.1.jar +%%HADOOP_HOME%%/lib/commons-digester-1.8.jar +%%HADOOP_HOME%%/lib/commons-el-1.0.jar +%%HADOOP_HOME%%/lib/commons-httpclient-3.0.1.jar +%%HADOOP_HOME%%/lib/commons-lang-2.4.jar +%%HADOOP_HOME%%/lib/commons-logging-1.1.1.jar +%%HADOOP_HOME%%/lib/commons-logging-api-1.0.4.jar +%%HADOOP_HOME%%/lib/commons-math-2.1.jar +%%HADOOP_HOME%%/lib/commons-net-1.4.1.jar +%%HADOOP_HOME%%/lib/core-3.1.1.jar +%%HADOOP_HOME%%/lib/hsqldb-1.8.0.10.LICENSE.txt +%%HADOOP_HOME%%/lib/hsqldb-1.8.0.10.jar +%%HADOOP_HOME%%/lib/jackson-core-asl-1.0.1.jar +%%HADOOP_HOME%%/lib/jackson-mapper-asl-1.0.1.jar +%%HADOOP_HOME%%/lib/jasper-compiler-5.5.12.jar +%%HADOOP_HOME%%/lib/jasper-runtime-5.5.12.jar +%%HADOOP_HOME%%/lib/jdeb-0.8.jar +%%HADOOP_HOME%%/lib/jersey-core-1.8.jar +%%HADOOP_HOME%%/lib/jersey-json-1.8.jar +%%HADOOP_HOME%%/lib/jersey-server-1.8.jar +%%HADOOP_HOME%%/lib/jets3t-0.6.1.jar +%%HADOOP_HOME%%/lib/jetty-6.1.26.jar +%%HADOOP_HOME%%/lib/jetty-util-6.1.26.jar +%%HADOOP_HOME%%/lib/jsch-0.1.42.jar +%%HADOOP_HOME%%/lib/junit-4.5.jar +%%HADOOP_HOME%%/lib/kfs-0.2.2.jar +%%HADOOP_HOME%%/lib/kfs-0.2.LICENSE.txt +%%HADOOP_HOME%%/lib/log4j-1.2.15.jar +%%HADOOP_HOME%%/lib/mockito-all-1.8.5.jar +%%HADOOP_HOME%%/lib/oro-2.0.8.jar +%%HADOOP_HOME%%/lib/servlet-api-2.5-20081211.jar +%%HADOOP_HOME%%/lib/slf4j-api-1.4.3.jar +%%HADOOP_HOME%%/lib/slf4j-log4j12-1.4.3.jar +%%HADOOP_HOME%%/lib/xmlenc-0.52.jar +%%HADOOP_HOME%%/webapps/task/index.html +%%HADOOP_HOME%%/webapps/task/WEB-INF/web.xml +%%HADOOP_HOME%%/webapps/static/sorttable.js +%%HADOOP_HOME%%/webapps/static/jobtracker.js +%%HADOOP_HOME%%/webapps/static/jobconf.xsl +%%HADOOP_HOME%%/webapps/static/hadoop.css +%%HADOOP_HOME%%/webapps/static/hadoop-logo.jpg +%%HADOOP_HOME%%/webapps/job/taskstatshistory.jsp +%%HADOOP_HOME%%/webapps/job/taskstats.jsp +%%HADOOP_HOME%%/webapps/job/taskdetailshistory.jsp +%%HADOOP_HOME%%/webapps/job/taskdetails.jsp +%%HADOOP_HOME%%/webapps/job/machines.jsp +%%HADOOP_HOME%%/webapps/job/loadhistory.jsp +%%HADOOP_HOME%%/webapps/job/legacyjobhistory.jsp +%%HADOOP_HOME%%/webapps/job/jobtracker.jsp +%%HADOOP_HOME%%/webapps/job/jobtaskshistory.jsp +%%HADOOP_HOME%%/webapps/job/jobtasks.jsp +%%HADOOP_HOME%%/webapps/job/jobqueue_details.jsp +%%HADOOP_HOME%%/webapps/job/jobhistoryhome.jsp +%%HADOOP_HOME%%/webapps/job/jobhistory.jsp +%%HADOOP_HOME%%/webapps/job/jobfailures.jsp +%%HADOOP_HOME%%/webapps/job/jobdetailshistory.jsp +%%HADOOP_HOME%%/webapps/job/jobdetails.jsp +%%HADOOP_HOME%%/webapps/job/jobconf_history.jsp +%%HADOOP_HOME%%/webapps/job/jobconf.jsp +%%HADOOP_HOME%%/webapps/job/jobblacklistedtrackers.jsp +%%HADOOP_HOME%%/webapps/job/job_authorization_error.jsp +%%HADOOP_HOME%%/webapps/job/index.html +%%HADOOP_HOME%%/webapps/job/gethistory.jsp +%%HADOOP_HOME%%/webapps/job/analysejobhistory.jsp +%%HADOOP_HOME%%/webapps/job/WEB-INF/web.xml +%%HADOOP_HOME%%/webapps/history/WEB-INF/web.xml +%%HADOOP_HOME%%/webapps/hdfs/index.html +%%HADOOP_HOME%%/webapps/hdfs/WEB-INF/web.xml +%%HADOOP_HOME%%/webapps/datanode/WEB-INF/web.xml +%%EXAMPLESDIR%%/hadoop-examples-%%PORTVERSION%%.jar +%%EXAMPLESDIR%%/core-default.xml +%%EXAMPLESDIR%%/hdfs-default.xml +%%EXAMPLESDIR%%/mapred-default.xml +%%EXAMPLESDIR%%/conf/masters +%%EXAMPLESDIR%%/conf/slaves +%%EXAMPLESDIR%%/conf/ssl-client.xml.example +%%EXAMPLESDIR%%/conf/ssl-server.xml.example +@unexec if cmp -s %D/%%HADOOP_ETC%%/capacity-scheduler.xml %D/%%EXAMPLESDIR%%/conf/capacity-scheduler.xml; then rm -f %D/%%HADOOP_ETC%%/capacity-scheduler.xml; fi +%%EXAMPLESDIR%%/conf/capacity-scheduler.xml +@exec [ -f %D/%%HADOOP_ETC%%/capacity-scheduler.xml ] || cp %D/%%EXAMPLESDIR%%/conf/capacity-scheduler.xml %D/%%HADOOP_ETC%% +@unexec if cmp -s %D/%%HADOOP_ETC%%/configuration.xsl %D/%%EXAMPLESDIR%%/conf/configuration.xsl; then rm -f %D/%%HADOOP_ETC%%/configuration.xsl; fi +%%EXAMPLESDIR%%/conf/configuration.xsl +@exec [ -f %D/%%HADOOP_ETC%%/configuration.xsl ] || cp %D/%%EXAMPLESDIR%%/conf/configuration.xsl %D/%%HADOOP_ETC%% +@unexec if cmp -s %D/%%HADOOP_ETC%%/core-site.xml %D/%%EXAMPLESDIR%%/conf/core-site.xml; then rm -f %D/%%HADOOP_ETC%%/core-site.xml; fi +%%EXAMPLESDIR%%/conf/core-site.xml +@exec [ -f %D/%%HADOOP_ETC%%/core-site.xml ] || cp %D/%%EXAMPLESDIR%%/conf/core-site.xml %D/%%HADOOP_ETC%% +@unexec if cmp -s %D/%%HADOOP_ETC%%/hadoop-env.sh %D/%%EXAMPLESDIR%%/conf/hadoop-env.sh; then rm -f %D/%%HADOOP_ETC%%/hadoop-env.sh; fi +%%EXAMPLESDIR%%/conf/hadoop-env.sh +@exec [ -f %D/%%HADOOP_ETC%%/hadoop-env.sh ] || cp %D/%%EXAMPLESDIR%%/conf/hadoop-env.sh %D/%%HADOOP_ETC%% +@unexec if cmp -s %D/%%HADOOP_ETC%%/hadoop-metrics2.properties %D/%%EXAMPLESDIR%%/conf/hadoop-metrics2.properties; then rm -f %D/%%HADOOP_ETC%%/hadoop-metrics2.properties; fi +%%EXAMPLESDIR%%/conf/hadoop-metrics2.properties +@exec [ -f %D/%%HADOOP_ETC%%/hadoop-metrics2.properties ] || cp %D/%%EXAMPLESDIR%%/conf/hadoop-metrics2.properties %D/%%HADOOP_ETC%% +@unexec if cmp -s %D/%%HADOOP_ETC%%/hadoop-policy.xml %D/%%EXAMPLESDIR%%/conf/hadoop-policy.xml; then rm -f %D/%%HADOOP_ETC%%/hadoop-policy.xml; fi +%%EXAMPLESDIR%%/conf/hadoop-policy.xml +@exec [ -f %D/%%HADOOP_ETC%%/hadoop-policy.xml ] || cp %D/%%EXAMPLESDIR%%/conf/hadoop-policy.xml %D/%%HADOOP_ETC%% +@unexec if cmp -s %D/%%HADOOP_ETC%%/hdfs-site.xml %D/%%EXAMPLESDIR%%/conf/hdfs-site.xml; then rm -f %D/%%HADOOP_ETC%%/hdfs-site.xml; fi +%%EXAMPLESDIR%%/conf/hdfs-site.xml +@exec [ -f %D/%%HADOOP_ETC%%/hdfs-site.xml ] || cp %D/%%EXAMPLESDIR%%/conf/hdfs-site.xml %D/%%HADOOP_ETC%% +@unexec if cmp -s %D/%%HADOOP_ETC%%/log4j.properties %D/%%EXAMPLESDIR%%/conf/log4j.properties; then rm -f %D/%%HADOOP_ETC%%/log4j.properties; fi +%%EXAMPLESDIR%%/conf/log4j.properties +@exec [ -f %D/%%HADOOP_ETC%%/log4j.properties ] || cp %D/%%EXAMPLESDIR%%/conf/log4j.properties %D/%%HADOOP_ETC%% +@unexec if cmp -s %D/%%HADOOP_ETC%%/mapred-queue-acls.xml %D/%%EXAMPLESDIR%%/conf/mapred-queue-acls.xml; then rm -f %D/%%HADOOP_ETC%%/mapred-queue-acls.xml; fi +%%EXAMPLESDIR%%/conf/mapred-queue-acls.xml +@exec [ -f %D/%%HADOOP_ETC%%/mapred-queue-acls.xml ] || cp %D/%%EXAMPLESDIR%%/conf/mapred-queue-acls.xml %D/%%HADOOP_ETC%% +@unexec if cmp -s %D/%%HADOOP_ETC%%/mapred-site.xml %D/%%EXAMPLESDIR%%/conf/mapred-site.xml; then rm -f %D/%%HADOOP_ETC%%/mapred-site.xml; fi +%%EXAMPLESDIR%%/conf/mapred-site.xml +@exec [ -f %D/%%HADOOP_ETC%%/mapred-site.xml ] || cp %D/%%EXAMPLESDIR%%/conf/mapred-site.xml %D/%%HADOOP_ETC%% +@unexec if cmp -s %D/%%HADOOP_ETC%%/taskcontroller.cfg %D/%%EXAMPLESDIR%%/conf/taskcontroller.cfg; then rm -f %D/%%HADOOP_ETC%%/taskcontroller.cfg; fi +%%EXAMPLESDIR%%/conf/taskcontroller.cfg +@exec [ -f %D/%%HADOOP_ETC%%/taskcontroller.cfg ] || cp %D/%%EXAMPLESDIR%%/conf/taskcontroller.cfg %D/%%HADOOP_ETC%% +@unexec if cmp -s %D/%%HADOOP_ETC%%/fair-scheduler.xml %D/%%EXAMPLESDIR%%/conf/fair-scheduler.xml; then rm -f %D/%%HADOOP_ETC%%/fair-scheduler.xml; fi +%%EXAMPLESDIR%%/conf/fair-scheduler.xml +@exec [ -f %D/%%HADOOP_ETC%%/fair-scheduler.xml ] || cp %D/%%EXAMPLESDIR%%/conf/fair-scheduler.xml %D/%%HADOOP_ETC%% +include/hadoop/StringUtils.hh +include/hadoop/SerialUtils.hh +include/hadoop/Pipes.hh +include/hadoop/TemplateFactory.hh +lib/libhadooputils.a +lib/libhadooppipes.a +lib/libhdfs.so.0 +lib/libhdfs.so +lib/libhdfs.la +@exec mkdir -p %D/%%HADOOP_HOME%%/webapps/secondary/WEB-INF +@exec mkdir -p %D/%%HADOOP_HOME%%/contrib/hdfsproxy/logs +@dirrm %%EXAMPLESDIR%%/conf +@dirrm %%EXAMPLESDIR%% +@dirrm %%HADOOP_HOME%%/lib/native/Linux-i386-32 +@dirrm %%HADOOP_HOME%%/lib/native/Linux-amd64-64 +@dirrm %%HADOOP_HOME%%/lib/native/%%HADOOP_ARCH%% +@dirrm %%HADOOP_HOME%%/lib/native +@dirrm %%HADOOP_HOME%%/lib/jsp-2.1 +@dirrm %%HADOOP_HOME%%/lib/jdiff +@dirrm %%HADOOP_HOME%%/lib +@dirrm %%HADOOP_HOME%%/bin +@dirrm %%HADOOP_HOME%%/webapps/task/WEB-INF +@dirrm %%HADOOP_HOME%%/webapps/task +@dirrm %%HADOOP_HOME%%/webapps/static +@dirrm %%HADOOP_HOME%%/webapps/secondary/WEB-INF +@dirrm %%HADOOP_HOME%%/webapps/secondary +@dirrm %%HADOOP_HOME%%/webapps/job/WEB-INF +@dirrm %%HADOOP_HOME%%/webapps/job +@dirrm %%HADOOP_HOME%%/webapps/history/WEB-INF +@dirrm %%HADOOP_HOME%%/webapps/history +@dirrm %%HADOOP_HOME%%/webapps/hdfs/WEB-INF +@dirrm %%HADOOP_HOME%%/webapps/hdfs +@dirrm %%HADOOP_HOME%%/webapps/datanode/WEB-INF +@dirrm %%HADOOP_HOME%%/webapps/datanode +@dirrm %%HADOOP_HOME%%/webapps/ +@dirrm %%HADOOP_HOME%%/contrib/vaidya/conf +@dirrm %%HADOOP_HOME%%/contrib/vaidya/bin +@dirrm %%HADOOP_HOME%%/contrib/vaidya +@dirrm %%HADOOP_HOME%%/contrib/streaming +@dirrm %%HADOOP_HOME%%/contrib/index +@dirrm %%HADOOP_HOME%%/contrib/hod/testing +@dirrm %%HADOOP_HOME%%/contrib/hod/support +@dirrm %%HADOOP_HOME%%/contrib/hod/ivy +@dirrm %%HADOOP_HOME%%/contrib/hod/hodlib/ServiceRegistry +@dirrm %%HADOOP_HOME%%/contrib/hod/hodlib/ServiceProxy +@dirrm %%HADOOP_HOME%%/contrib/hod/hodlib/Schedulers +@dirrm %%HADOOP_HOME%%/contrib/hod/hodlib/RingMaster +@dirrm %%HADOOP_HOME%%/contrib/hod/hodlib/NodePools +@dirrm %%HADOOP_HOME%%/contrib/hod/hodlib/HodRing +@dirrm %%HADOOP_HOME%%/contrib/hod/hodlib/Hod +@dirrm %%HADOOP_HOME%%/contrib/hod/hodlib/GridServices +@dirrm %%HADOOP_HOME%%/contrib/hod/hodlib/Common +@dirrm %%HADOOP_HOME%%/contrib/hod/hodlib/AllocationManagers +@dirrm %%HADOOP_HOME%%/contrib/hod/hodlib +@dirrm %%HADOOP_HOME%%/contrib/hod/conf +@dirrm %%HADOOP_HOME%%/contrib/hod/bin +@dirrm %%HADOOP_HOME%%/contrib/hod +@dirrm %%HADOOP_HOME%%/contrib/hdfsproxy/logs +@dirrm %%HADOOP_HOME%%/contrib/hdfsproxy/conf +@dirrm %%HADOOP_HOME%%/contrib/hdfsproxy/bin +@dirrm %%HADOOP_HOME%%/contrib/hdfsproxy +@dirrm %%HADOOP_HOME%%/contrib/gridmix +@dirrm %%HADOOP_HOME%%/contrib/failmon +@dirrm %%HADOOP_HOME%%/contrib/datajoin +@dirrm %%HADOOP_HOME%%/contrib/ +@dirrm %%HADOOP_HOME%% +@dirrm include/hadoop +@dirrmtry %%HADOOP_ETC%%/envvars.d +@dirrmtry %%HADOOP_ETC%% Index: ports/devel/hadoop/files/000.java_home.env.in =================================================================== RCS file: ports/devel/hadoop/files/000.java_home.env.in diff -N ports/devel/hadoop/files/000.java_home.env.in --- /dev/null 1 Jan 1970 00:00:00 -0000 +++ ports/devel/hadoop/files/000.java_home.env.in 1 Feb 2012 16:25:01 -0000 @@ -0,0 +1,2 @@ +export JAVA_HOME=${JAVA_HOME:-%%JAVA_HOME%%} + Index: ports/devel/hadoop/files/datanode.in =================================================================== RCS file: ports/devel/hadoop/files/datanode.in diff -N ports/devel/hadoop/files/datanode.in --- /dev/null 1 Jan 1970 00:00:00 -0000 +++ ports/devel/hadoop/files/datanode.in 1 Feb 2012 16:25:01 -0000 @@ -0,0 +1,38 @@ +#!/bin/sh +# +# $FreeBSD$ +# + +# PROVIDE: datanode +# REQUIRE: NETWORKING SERVERS +# BEFORE: DAEMON +# KEYWORD: shutdown + +datanode_enable="${tacktracker_enable:-"NO"}" +datanode_user="${datanode_user:-%%HADOOP_USER%%}" + +. /etc/rc.subr + +name="datanode" +rcvar=`set_rcvar` + +load_rc_config "${name}" + +for i in `ls %%HADOOP_ETC%%/envvars.d/*.env` +do + . ${i} +done + +export HADOOP_PID_DIR=/var/run/hadoop +export HADOOP_IDENT_STRING=ports +command="/usr/sbin/daemon" +flags="-c %%HADOOP_HOME%%/bin/hadoop-daemon.sh --config ${HADOOP_CONF_DIR} start datanode" + +stop_cmd=hadoop_stop + +hadoop_stop () { + su -m ${datanode_user} -c 'sh -c "%%HADOOP_HOME%%/bin/hadoop-daemon.sh --config ${HADOOP_CONF_DIR} stop datanode"' +} + +run_rc_command "$1" + Index: ports/devel/hadoop/files/hadoop.in =================================================================== RCS file: ports/devel/hadoop/files/hadoop.in diff -N ports/devel/hadoop/files/hadoop.in --- /dev/null 1 Jan 1970 00:00:00 -0000 +++ ports/devel/hadoop/files/hadoop.in 1 Feb 2012 16:25:01 -0000 @@ -0,0 +1,7 @@ +#!/usr/bin/env bash +for i in `ls %%HADOOP_ETC%%/envvars.d/*.env` +do + . ${i} +done + +exec %%HADOOP_HOME%%/bin/hadoop $@ Index: ports/devel/hadoop/files/jobtracker.in =================================================================== RCS file: ports/devel/hadoop/files/jobtracker.in diff -N ports/devel/hadoop/files/jobtracker.in --- /dev/null 1 Jan 1970 00:00:00 -0000 +++ ports/devel/hadoop/files/jobtracker.in 1 Feb 2012 16:25:01 -0000 @@ -0,0 +1,38 @@ +#!/bin/sh +# +# $FreeBSD$ +# + +# PROVIDE: jobtracker +# REQUIRE: NETWORKING SERVERS +# BEFORE: DAEMON +# KEYWORD: shutdown + +jobtracker_enable="${tacktracker_enable:-"NO"}" +jobtracker_user="${jobtracker_user:-%%HADOOP_USER%%}" + +. /etc/rc.subr + +name="jobtracker" +rcvar=`set_rcvar` + +load_rc_config "${name}" + +for i in `ls %%HADOOP_ETC%%/envvars.d/*.env` +do + . ${i} +done + +export HADOOP_PID_DIR=/var/run/hadoop +export HADOOP_IDENT_STRING=ports +command="/usr/sbin/daemon" +flags="-c %%HADOOP_HOME%%/bin/hadoop-daemon.sh --config ${HADOOP_CONF_DIR} start jobtracker" + +stop_cmd=hadoop_stop + +hadoop_stop () { + su -m ${jobtracker_user} -c 'sh -c "%%HADOOP_HOME%%/bin/hadoop-daemon.sh --config ${HADOOP_CONF_DIR} stop jobtracker"' +} + +run_rc_command "$1" + Index: ports/devel/hadoop/files/namenode.in =================================================================== RCS file: ports/devel/hadoop/files/namenode.in diff -N ports/devel/hadoop/files/namenode.in --- /dev/null 1 Jan 1970 00:00:00 -0000 +++ ports/devel/hadoop/files/namenode.in 1 Feb 2012 16:25:01 -0000 @@ -0,0 +1,38 @@ +#!/bin/sh +# +# $FreeBSD$ +# + +# PROVIDE: namenode +# REQUIRE: NETWORKING SERVERS +# BEFORE: DAEMON +# KEYWORD: shutdown + +namenode_enable="${tacktracker_enable:-"NO"}" +namenode_user="${namenode_user:-%%HADOOP_USER%%}" + +. /etc/rc.subr + +name="namenode" +rcvar=`set_rcvar` + +load_rc_config "${name}" + +for i in `ls %%HADOOP_ETC%%/envvars.d/*.env` +do + . ${i} +done + +export HADOOP_PID_DIR=/var/run/hadoop +export HADOOP_IDENT_STRING=ports +command="/usr/sbin/daemon" +flags="-c %%HADOOP_HOME%%/bin/hadoop-daemon.sh --config ${HADOOP_CONF_DIR} start namenode" + +stop_cmd=hadoop_stop + +hadoop_stop () { + su -m ${namenode_user} -c 'sh -c "%%HADOOP_HOME%%/bin/hadoop-daemon.sh --config ${HADOOP_CONF_DIR} stop namenode"' +} + +run_rc_command "$1" + Index: ports/devel/hadoop/files/patch-build.xml =================================================================== RCS file: ports/devel/hadoop/files/patch-build.xml diff -N ports/devel/hadoop/files/patch-build.xml --- /dev/null 1 Jan 1970 00:00:00 -0000 +++ ports/devel/hadoop/files/patch-build.xml 1 Feb 2012 16:25:01 -0000 @@ -0,0 +1,80 @@ +--- build.xml.orig 2011-05-04 08:30:16.000000000 +0200 ++++ build.xml 2011-08-08 00:08:22.000000000 +0200 +@@ -372,7 +372,7 @@ + + + +- ++ + + + +- + + +@@ -1669,7 +1669,7 @@ + + + +- ++ + + + +@@ -1703,7 +1703,7 @@ + + + +- ++ + + +@@ -1726,7 +1726,7 @@ + + + +- ++ + + + +@@ -1747,7 +1747,7 @@ + + + +- ++ + + + +@@ -2326,5 +2326,23 @@ + + + ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ + ++ ++ ++ ++ ++ ++ ++ + Index: ports/devel/hadoop/files/patch-src__c++__libhdfs__hdfs.c =================================================================== RCS file: ports/devel/hadoop/files/patch-src__c++__libhdfs__hdfs.c diff -N ports/devel/hadoop/files/patch-src__c++__libhdfs__hdfs.c --- /dev/null 1 Jan 1970 00:00:00 -0000 +++ ports/devel/hadoop/files/patch-src__c++__libhdfs__hdfs.c 1 Feb 2012 16:25:01 -0000 @@ -0,0 +1,11 @@ +--- src/c++/libhdfs/hdfs.c.orig 2011-08-07 16:38:59.000000000 +0200 ++++ src/c++/libhdfs/hdfs.c 2011-08-07 16:39:18.000000000 +0200 +@@ -252,7 +252,7 @@ + cURI = malloc(strlen(host)+16); + sprintf(cURI, "hdfs://%s:%d", host, (int)(port)); + if (cURI == NULL) { +- fprintf (stderr, "Couldn't allocate an object of size %d", ++ fprintf (stderr, "Couldn't allocate an object of size %llu", + strlen(host) + 16); + errno = EINTERNAL; + goto done; Index: ports/devel/hadoop/files/patch-src__c++__libhdfs__hdfsJniHelper.c =================================================================== RCS file: ports/devel/hadoop/files/patch-src__c++__libhdfs__hdfsJniHelper.c diff -N ports/devel/hadoop/files/patch-src__c++__libhdfs__hdfsJniHelper.c --- /dev/null 1 Jan 1970 00:00:00 -0000 +++ ports/devel/hadoop/files/patch-src__c++__libhdfs__hdfsJniHelper.c 1 Feb 2012 16:25:01 -0000 @@ -0,0 +1,11 @@ +--- src/c++/libhdfs/hdfsJniHelper.c.orig 2011-05-04 08:30:16.000000000 +0200 ++++ src/c++/libhdfs/hdfsJniHelper.c 2011-08-07 16:40:54.000000000 +0200 +@@ -15,7 +15,7 @@ + */ + + #include +-#include ++//#include + #include "hdfsJniHelper.h" + + static pthread_mutex_t hdfsHashMutex = PTHREAD_MUTEX_INITIALIZER; Index: ports/devel/hadoop/files/patch-src__native__Makefile.in =================================================================== RCS file: ports/devel/hadoop/files/patch-src__native__Makefile.in diff -N ports/devel/hadoop/files/patch-src__native__Makefile.in --- /dev/null 1 Jan 1970 00:00:00 -0000 +++ ports/devel/hadoop/files/patch-src__native__Makefile.in 1 Feb 2012 16:25:01 -0000 @@ -0,0 +1,105 @@ +--- src/native/Makefile.in.orig 2011-05-04 08:30:16.000000000 +0200 ++++ src/native/Makefile.in 2011-12-09 10:38:40.000000000 +0100 +@@ -92,10 +92,7 @@ + libLTLIBRARIES_INSTALL = $(INSTALL) + LTLIBRARIES = $(lib_LTLIBRARIES) + libhadoop_la_DEPENDENCIES = +-am_libhadoop_la_OBJECTS = ZlibCompressor.lo ZlibDecompressor.lo \ +- getGroup.lo JniBasedUnixGroupsMapping.lo \ +- JniBasedUnixGroupsNetgroupMapping.lo file_descriptor.lo \ +- errno_enum.lo NativeIO.lo ++am_libhadoop_la_OBJECTS = ZlibCompressor.lo ZlibDecompressor.lo + libhadoop_la_OBJECTS = $(am_libhadoop_la_OBJECTS) + DEFAULT_INCLUDES = -I. -I$(srcdir) -I. + depcomp = $(SHELL) $(top_srcdir)/config/depcomp +@@ -223,24 +220,15 @@ + sysconfdir = @sysconfdir@ + target_alias = @target_alias@ + AM_CPPFLAGS = @JNI_CPPFLAGS@ -I$(HADOOP_NATIVE_SRCDIR)/src \ +- -Isrc/org/apache/hadoop/io/compress/zlib \ +- -Isrc/org/apache/hadoop/io/nativeio \ +- -Isrc/org/apache/hadoop/security ++ -Isrc/org/apache/hadoop/io/compress/zlib + + AM_LDFLAGS = @JNI_LDFLAGS@ -m$(JVM_DATA_MODEL) + AM_CFLAGS = -g -Wall -fPIC -O2 -m$(JVM_DATA_MODEL) + lib_LTLIBRARIES = libhadoop.la + libhadoop_la_SOURCES = src/org/apache/hadoop/io/compress/zlib/ZlibCompressor.c \ +- src/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.c \ +- src/org/apache/hadoop/security/getGroup.c \ +- src/org/apache/hadoop/security/JniBasedUnixGroupsMapping.c \ +- src/org/apache/hadoop/security/JniBasedUnixGroupsNetgroupMapping.c \ +- src/org/apache/hadoop/io/nativeio/file_descriptor.c \ +- src/org/apache/hadoop/io/nativeio/errno_enum.c \ +- src/org/apache/hadoop/io/nativeio/NativeIO.c +- ++ src/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.c + libhadoop_la_LDFLAGS = -version-info 1:0:0 +-libhadoop_la_LIBADD = -ldl -ljvm ++libhadoop_la_LIBADD = -ljvm + all: config.h + $(MAKE) $(AM_MAKEFLAGS) all-am + +@@ -332,14 +320,8 @@ + distclean-compile: + -rm -f *.tab.c + +-@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/JniBasedUnixGroupsMapping.Plo@am__quote@ +-@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/JniBasedUnixGroupsNetgroupMapping.Plo@am__quote@ +-@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/NativeIO.Plo@am__quote@ + @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/ZlibCompressor.Plo@am__quote@ + @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/ZlibDecompressor.Plo@am__quote@ +-@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/errno_enum.Plo@am__quote@ +-@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/file_descriptor.Plo@am__quote@ +-@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/getGroup.Plo@am__quote@ + + .c.o: + @am__fastdepCC_TRUE@ if $(COMPILE) -MT $@ -MD -MP -MF "$(DEPDIR)/$*.Tpo" -c -o $@ $<; \ +@@ -376,47 +358,6 @@ + @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ + @am__fastdepCC_FALSE@ $(LIBTOOL) --tag=CC --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o ZlibDecompressor.lo `test -f 'src/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.c' || echo '$(srcdir)/'`src/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.c + +-getGroup.lo: src/org/apache/hadoop/security/getGroup.c +-@am__fastdepCC_TRUE@ if $(LIBTOOL) --tag=CC --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT getGroup.lo -MD -MP -MF "$(DEPDIR)/getGroup.Tpo" -c -o getGroup.lo `test -f 'src/org/apache/hadoop/security/getGroup.c' || echo '$(srcdir)/'`src/org/apache/hadoop/security/getGroup.c; \ +-@am__fastdepCC_TRUE@ then mv -f "$(DEPDIR)/getGroup.Tpo" "$(DEPDIR)/getGroup.Plo"; else rm -f "$(DEPDIR)/getGroup.Tpo"; exit 1; fi +-@AMDEP_TRUE@@am__fastdepCC_FALSE@ source='src/org/apache/hadoop/security/getGroup.c' object='getGroup.lo' libtool=yes @AMDEPBACKSLASH@ +-@AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ +-@am__fastdepCC_FALSE@ $(LIBTOOL) --tag=CC --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o getGroup.lo `test -f 'src/org/apache/hadoop/security/getGroup.c' || echo '$(srcdir)/'`src/org/apache/hadoop/security/getGroup.c +- +-JniBasedUnixGroupsMapping.lo: src/org/apache/hadoop/security/JniBasedUnixGroupsMapping.c +-@am__fastdepCC_TRUE@ if $(LIBTOOL) --tag=CC --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT JniBasedUnixGroupsMapping.lo -MD -MP -MF "$(DEPDIR)/JniBasedUnixGroupsMapping.Tpo" -c -o JniBasedUnixGroupsMapping.lo `test -f 'src/org/apache/hadoop/security/JniBasedUnixGroupsMapping.c' || echo '$(srcdir)/'`src/org/apache/hadoop/security/JniBasedUnixGroupsMapping.c; \ +-@am__fastdepCC_TRUE@ then mv -f "$(DEPDIR)/JniBasedUnixGroupsMapping.Tpo" "$(DEPDIR)/JniBasedUnixGroupsMapping.Plo"; else rm -f "$(DEPDIR)/JniBasedUnixGroupsMapping.Tpo"; exit 1; fi +-@AMDEP_TRUE@@am__fastdepCC_FALSE@ source='src/org/apache/hadoop/security/JniBasedUnixGroupsMapping.c' object='JniBasedUnixGroupsMapping.lo' libtool=yes @AMDEPBACKSLASH@ +-@AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ +-@am__fastdepCC_FALSE@ $(LIBTOOL) --tag=CC --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o JniBasedUnixGroupsMapping.lo `test -f 'src/org/apache/hadoop/security/JniBasedUnixGroupsMapping.c' || echo '$(srcdir)/'`src/org/apache/hadoop/security/JniBasedUnixGroupsMapping.c +- +-JniBasedUnixGroupsNetgroupMapping.lo: src/org/apache/hadoop/security/JniBasedUnixGroupsNetgroupMapping.c +-@am__fastdepCC_TRUE@ if $(LIBTOOL) --tag=CC --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT JniBasedUnixGroupsNetgroupMapping.lo -MD -MP -MF "$(DEPDIR)/JniBasedUnixGroupsNetgroupMapping.Tpo" -c -o JniBasedUnixGroupsNetgroupMapping.lo `test -f 'src/org/apache/hadoop/security/JniBasedUnixGroupsNetgroupMapping.c' || echo '$(srcdir)/'`src/org/apache/hadoop/security/JniBasedUnixGroupsNetgroupMapping.c; \ +-@am__fastdepCC_TRUE@ then mv -f "$(DEPDIR)/JniBasedUnixGroupsNetgroupMapping.Tpo" "$(DEPDIR)/JniBasedUnixGroupsNetgroupMapping.Plo"; else rm -f "$(DEPDIR)/JniBasedUnixGroupsNetgroupMapping.Tpo"; exit 1; fi +-@AMDEP_TRUE@@am__fastdepCC_FALSE@ source='src/org/apache/hadoop/security/JniBasedUnixGroupsNetgroupMapping.c' object='JniBasedUnixGroupsNetgroupMapping.lo' libtool=yes @AMDEPBACKSLASH@ +-@AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ +-@am__fastdepCC_FALSE@ $(LIBTOOL) --tag=CC --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o JniBasedUnixGroupsNetgroupMapping.lo `test -f 'src/org/apache/hadoop/security/JniBasedUnixGroupsNetgroupMapping.c' || echo '$(srcdir)/'`src/org/apache/hadoop/security/JniBasedUnixGroupsNetgroupMapping.c +- +-file_descriptor.lo: src/org/apache/hadoop/io/nativeio/file_descriptor.c +-@am__fastdepCC_TRUE@ if $(LIBTOOL) --tag=CC --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT file_descriptor.lo -MD -MP -MF "$(DEPDIR)/file_descriptor.Tpo" -c -o file_descriptor.lo `test -f 'src/org/apache/hadoop/io/nativeio/file_descriptor.c' || echo '$(srcdir)/'`src/org/apache/hadoop/io/nativeio/file_descriptor.c; \ +-@am__fastdepCC_TRUE@ then mv -f "$(DEPDIR)/file_descriptor.Tpo" "$(DEPDIR)/file_descriptor.Plo"; else rm -f "$(DEPDIR)/file_descriptor.Tpo"; exit 1; fi +-@AMDEP_TRUE@@am__fastdepCC_FALSE@ source='src/org/apache/hadoop/io/nativeio/file_descriptor.c' object='file_descriptor.lo' libtool=yes @AMDEPBACKSLASH@ +-@AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ +-@am__fastdepCC_FALSE@ $(LIBTOOL) --tag=CC --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o file_descriptor.lo `test -f 'src/org/apache/hadoop/io/nativeio/file_descriptor.c' || echo '$(srcdir)/'`src/org/apache/hadoop/io/nativeio/file_descriptor.c +- +-errno_enum.lo: src/org/apache/hadoop/io/nativeio/errno_enum.c +-@am__fastdepCC_TRUE@ if $(LIBTOOL) --tag=CC --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT errno_enum.lo -MD -MP -MF "$(DEPDIR)/errno_enum.Tpo" -c -o errno_enum.lo `test -f 'src/org/apache/hadoop/io/nativeio/errno_enum.c' || echo '$(srcdir)/'`src/org/apache/hadoop/io/nativeio/errno_enum.c; \ +-@am__fastdepCC_TRUE@ then mv -f "$(DEPDIR)/errno_enum.Tpo" "$(DEPDIR)/errno_enum.Plo"; else rm -f "$(DEPDIR)/errno_enum.Tpo"; exit 1; fi +-@AMDEP_TRUE@@am__fastdepCC_FALSE@ source='src/org/apache/hadoop/io/nativeio/errno_enum.c' object='errno_enum.lo' libtool=yes @AMDEPBACKSLASH@ +-@AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ +-@am__fastdepCC_FALSE@ $(LIBTOOL) --tag=CC --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o errno_enum.lo `test -f 'src/org/apache/hadoop/io/nativeio/errno_enum.c' || echo '$(srcdir)/'`src/org/apache/hadoop/io/nativeio/errno_enum.c +- +-NativeIO.lo: src/org/apache/hadoop/io/nativeio/NativeIO.c +-@am__fastdepCC_TRUE@ if $(LIBTOOL) --tag=CC --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT NativeIO.lo -MD -MP -MF "$(DEPDIR)/NativeIO.Tpo" -c -o NativeIO.lo `test -f 'src/org/apache/hadoop/io/nativeio/NativeIO.c' || echo '$(srcdir)/'`src/org/apache/hadoop/io/nativeio/NativeIO.c; \ +-@am__fastdepCC_TRUE@ then mv -f "$(DEPDIR)/NativeIO.Tpo" "$(DEPDIR)/NativeIO.Plo"; else rm -f "$(DEPDIR)/NativeIO.Tpo"; exit 1; fi +-@AMDEP_TRUE@@am__fastdepCC_FALSE@ source='src/org/apache/hadoop/io/nativeio/NativeIO.c' object='NativeIO.lo' libtool=yes @AMDEPBACKSLASH@ +-@AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ +-@am__fastdepCC_FALSE@ $(LIBTOOL) --tag=CC --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o NativeIO.lo `test -f 'src/org/apache/hadoop/io/nativeio/NativeIO.c' || echo '$(srcdir)/'`src/org/apache/hadoop/io/nativeio/NativeIO.c + + mostlyclean-libtool: + -rm -f *.lo Index: ports/devel/hadoop/files/patch-src__native__configure =================================================================== RCS file: ports/devel/hadoop/files/patch-src__native__configure diff -N ports/devel/hadoop/files/patch-src__native__configure --- /dev/null 1 Jan 1970 00:00:00 -0000 +++ ports/devel/hadoop/files/patch-src__native__configure 1 Feb 2012 16:25:01 -0000 @@ -0,0 +1,11 @@ +--- src/native/configure.orig 2011-12-08 23:11:17.000000000 +0100 ++++ src/native/configure 2011-12-08 23:11:50.000000000 +0100 +@@ -20504,7 +20504,7 @@ + echo 'int main(int argc, char **argv){return 0;}' > conftest.c + if test -z "`${CC} ${LDFLAGS} -o conftest conftest.c -lz 2>&1`"; then + if test ! -z "`which objdump | grep -v 'no objdump'`"; then +- ac_cv_libname_z="`objdump -p conftest | grep NEEDED | grep z | sed 's/\W*NEEDED\W*\(.*\)\W*$/\"\1\"/'`" ++ ac_cv_libname_z="`objdump -p conftest | grep NEEDED | grep z | gsed 's/\W*NEEDED\W*\(.*\)\W*$/\"\1\"/'`" + elif test ! -z "`which ldd | grep -v 'no ldd'`"; then + ac_cv_libname_z="`ldd conftest | grep z | sed 's/^[^A-Za-z0-9]*\([A-Za-z0-9\.]*\)[^A-Za-z0-9]*=>.*$/\"\1\"/'`" + else Index: ports/devel/hadoop/files/patch-src__native__configure.ac =================================================================== RCS file: ports/devel/hadoop/files/patch-src__native__configure.ac diff -N ports/devel/hadoop/files/patch-src__native__configure.ac --- /dev/null 1 Jan 1970 00:00:00 -0000 +++ ports/devel/hadoop/files/patch-src__native__configure.ac 1 Feb 2012 16:25:01 -0000 @@ -0,0 +1,13 @@ +--- src/native/configure.ac.orig 2011-05-04 08:30:16.000000000 +0200 ++++ src/native/configure.ac 2011-08-07 16:17:58.000000000 +0200 +@@ -46,10 +46,6 @@ + AC_PROG_CC + AC_PROG_LIBTOOL + +-# Checks for libraries. +-dnl Check for '-ldl' +-AC_CHECK_LIB([dl], [dlopen]) +- + dnl Check for '-ljvm' + JNI_LDFLAGS="" + if test $JAVA_HOME != "" Index: ports/devel/hadoop/files/patch-src__native__src__org__apache__hadoop__io__nativeio__NativeIO.c =================================================================== RCS file: ports/devel/hadoop/files/patch-src__native__src__org__apache__hadoop__io__nativeio__NativeIO.c diff -N ports/devel/hadoop/files/patch-src__native__src__org__apache__hadoop__io__nativeio__NativeIO.c --- /dev/null 1 Jan 1970 00:00:00 -0000 +++ ports/devel/hadoop/files/patch-src__native__src__org__apache__hadoop__io__nativeio__NativeIO.c 1 Feb 2012 16:25:01 -0000 @@ -0,0 +1,11 @@ +--- src/native/src/org/apache/hadoop/io/nativeio/NativeIO.c.orig 2011-08-10 13:43:50.000000000 +0200 ++++ src/native/src/org/apache/hadoop/io/nativeio/NativeIO.c 2011-08-10 13:44:32.000000000 +0200 +@@ -188,7 +188,7 @@ + if (flags & O_CREAT) { + fd = open(path, flags, mode); + } else { +- fd = open(path, flags); ++ fd = open(path, flags | O_CREAT); + } + + if (fd == -1) { Index: ports/devel/hadoop/files/patch-src__native__src__org__apache__hadoop__security__JniBasedUnixGroupsNetgroupMapping.c =================================================================== RCS file: ports/devel/hadoop/files/patch-src__native__src__org__apache__hadoop__security__JniBasedUnixGroupsNetgroupMapping.c diff -N ports/devel/hadoop/files/patch-src__native__src__org__apache__hadoop__security__JniBasedUnixGroupsNetgroupMapping.c --- /dev/null 1 Jan 1970 00:00:00 -0000 +++ ports/devel/hadoop/files/patch-src__native__src__org__apache__hadoop__security__JniBasedUnixGroupsNetgroupMapping.c 1 Feb 2012 16:25:01 -0000 @@ -0,0 +1,12 @@ +--- src/native/src/org/apache/hadoop/security/JniBasedUnixGroupsNetgroupMapping.c.orig 2011-08-07 16:43:00.000000000 +0200 ++++ src/native/src/org/apache/hadoop/security/JniBasedUnixGroupsNetgroupMapping.c 2011-08-07 16:43:30.000000000 +0200 +@@ -73,7 +73,8 @@ + // was successfull or not (as long as it was called we need to call + // endnetgrent) + setnetgrentCalledFlag = 1; +- if(setnetgrent(cgroup) == 1) { ++ setnetgrent(cgroup); ++ if(1 == 1) { + UserList *current = NULL; + // three pointers are for host, user, domain, we only care + // about user now Index: ports/devel/hadoop/files/pkg-deinstall.in =================================================================== RCS file: ports/devel/hadoop/files/pkg-deinstall.in diff -N ports/devel/hadoop/files/pkg-deinstall.in --- /dev/null 1 Jan 1970 00:00:00 -0000 +++ ports/devel/hadoop/files/pkg-deinstall.in 1 Feb 2012 16:25:01 -0000 @@ -0,0 +1,14 @@ +#!/bin/sh +# $FreeBSD$ + +PATH="/bin:/sbin:/usr/bin:/usr/sbin" + +RUNDIR=%%HADOOP_RUNDIR%% +LOGDIR=%%HADOOP_LOGDIR%% + +if [ "$2" = "POST-DEINSTALL" ]; then + echo "=> Deleting ${RUNDIR} if empty..." + rm -d ${RUNDIR} 2>/dev/null || true + echo "=> Deleting ${LOGDIR} if empty..." + rm -d ${LOGDIR} 2>/dev/null || true +fi Index: ports/devel/hadoop/files/pkg-install.in =================================================================== RCS file: ports/devel/hadoop/files/pkg-install.in diff -N ports/devel/hadoop/files/pkg-install.in --- /dev/null 1 Jan 1970 00:00:00 -0000 +++ ports/devel/hadoop/files/pkg-install.in 1 Feb 2012 16:25:01 -0000 @@ -0,0 +1,55 @@ +#!/bin/sh +# $FreeBSD$ +PATH="/bin:/sbin:/usr/bin:/usr/sbin" + +HADOOP_USER=%%HADOOP_USER%% +HADOOP_GROUP=%%HADOOP_GROUP%% +UID=%%HADOOP_UID%% +GID=%%HADOOP_GID%% + +RUNDIR=%%HADOOP_RUNDIR%% +LOGDIR=%%HADOOP_LOGDIR%% + +PW="pw" +CHOWN="chown" +INSTALL_DIR="install -d -o ${HADOOP_USER} -g ${HADOOP_GROUP} -m 0755" + +if [ "$2" = "PRE-INSTALL" ]; then + +# if ! ${PW} groupshow ${HADOOP_GROUP} 2>/dev/null 1>&2; then +# if ${PW} groupadd ${HADOOP_GROUP} -g $GID; then +# echo "=> Added group \"${HADOOP_GROUP}\"." +# else +# echo "=> Adding group \"${HADOOP_GROUP}\" failed..." +# exit 1 +# fi +# fi + +# if ! ${PW} usershow ${HADOOP_USER} 2>/dev/null 1>&2; then +# if ${PW} useradd ${HADOOP_USER} -u $UID -g ${HADOOP_GROUP} -h - \ +# -s "/sbin/nologin" -d "/nonexistent" \ +# -c "Hadoop Daemons"; then +# echo "=> Added user \"${HADOOP_USER}\"." +# else +# echo "=> Adding user \"${HADOOP_USER}\" failed..." +# exit 1 +# fi +# fi + sleep 1 + +elif [ "$2" = "POST-INSTALL" ]; then + if [ -d ${RUNDIR} ]; then + echo "=> ${RUNDIR} already exists." + else + echo -n "=> Creating RUNDIR ${RUNDIR}... " + ${INSTALL_DIR} ${RUNDIR} || echo "failed" + fi + if [ -d ${LOGDIR} ]; then + echo "=> ${LOGDIR} already exists." + else + echo -n "=> Creating LOGDIR ${LOGDIR}... " + ${INSTALL_DIR} ${LOGDIR} || echo "failed" + fi +fi + +exit 0 Index: ports/devel/hadoop/files/secondarynamenode.in =================================================================== RCS file: ports/devel/hadoop/files/secondarynamenode.in diff -N ports/devel/hadoop/files/secondarynamenode.in --- /dev/null 1 Jan 1970 00:00:00 -0000 +++ ports/devel/hadoop/files/secondarynamenode.in 1 Feb 2012 16:25:01 -0000 @@ -0,0 +1,38 @@ +#!/bin/sh +# +# $FreeBSD$ +# + +# PROVIDE: secondarynamenode +# REQUIRE: NETWORKING SERVERS +# BEFORE: DAEMON +# KEYWORD: shutdown + +secondarynamenode_enable="${tacktracker_enable:-"NO"}" +secondarynamenode_user="${secondarynamenode_user:-%%HADOOP_USER%%}" + +. /etc/rc.subr + +name="secondarynamenode" +rcvar=`set_rcvar` + +load_rc_config "${name}" + +for i in `ls %%HADOOP_ETC%%/envvars.d/*.env` +do + . ${i} +done + +export HADOOP_PID_DIR=/var/run/hadoop +export HADOOP_IDENT_STRING=ports +command="/usr/sbin/daemon" +flags="-c %%HADOOP_HOME%%/bin/hadoop-daemon.sh --config ${HADOOP_CONF_DIR} start secondarynamenode" + +stop_cmd=hadoop_stop + +hadoop_stop () { + su -m ${secondarynamenode_user} -c 'sh -c "%%HADOOP_HOME%%/bin/hadoop-daemon.sh --config ${HADOOP_CONF_DIR} stop secondarynamenode"' +} + +run_rc_command "$1" + Index: ports/devel/hadoop/files/tasktracker.in =================================================================== RCS file: ports/devel/hadoop/files/tasktracker.in diff -N ports/devel/hadoop/files/tasktracker.in --- /dev/null 1 Jan 1970 00:00:00 -0000 +++ ports/devel/hadoop/files/tasktracker.in 1 Feb 2012 16:25:01 -0000 @@ -0,0 +1,38 @@ +#!/bin/sh +# +# $FreeBSD$ +# + +# PROVIDE: tasktracker +# REQUIRE: NETWORKING SERVERS +# BEFORE: DAEMON +# KEYWORD: shutdown + +tasktracker_enable="${tacktracker_enable:-"NO"}" +tasktracker_user="${tasktracker_user:-%%HADOOP_USER%%}" + +. /etc/rc.subr + +name="tasktracker" +rcvar=`set_rcvar` + +load_rc_config "${name}" + +for i in `ls %%HADOOP_ETC%%/envvars.d/*.env` +do + . ${i} +done + +export HADOOP_PID_DIR=/var/run/hadoop + +command="/usr/sbin/daemon" +flags="-c %%HADOOP_HOME%%/bin/hadoop-daemon.sh --config ${HADOOP_CONF_DIR} start tasktracker" + +stop_cmd=hadoop_stop + +hadoop_stop () { + su -m ${tasktracker_user} -c 'sh -c "%%HADOOP_HOME%%/bin/hadoop-daemon.sh --config ${HADOOP_CONF_DIR} stop tasktracker"' +} + +run_rc_command "$1" +