aboutsummaryrefslogtreecommitdiff
path: root/devel/hadoop/Makefile
blob: a2867833c2c491eae91a7d2b355b741a8c30e911 (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
# Created by: Clement Laforet <clement@FreeBSD.org>
# $FreeBSD$

PORTNAME=	hadoop
PORTVERSION=	1.2.1
PORTREVISION=	1
CATEGORIES=	devel java
MASTER_SITES=	${MASTER_SITE_APACHE}
MASTER_SITE_SUBDIR=	${PORTNAME}/core/${PORTNAME}-${PORTVERSION}
PKGNAMEPREFIX=	apache-
DIST_SUBDIR=	hadoop

MAINTAINER=	demon@FreeBSD.org
COMMENT=	Apache Map/Reduce framework

LICENSE=	APACHE20

BUILD_DEPENDS=	gmake:${PORTSDIR}/devel/gmake \
		gsed:${PORTSDIR}/textproc/gsed
RUN_DEPENDS=	bash:${PORTSDIR}/shells/bash

USE_JAVA=	yes
JAVA_VERSION=	1.7
USE_ANT=	yes
ONLY_FOR_ARCHS=	amd64 i386
USE_LDCONFIG=	yes

OPTIONS_DEFINE=	DOCS

HADOOP_LOGDIR=	/var/log/${PORTNAME}
HADOOP_RUNDIR=	/var/run/${PORTNAME}

HADOOP_USER=	hadoop
HADOOP_GROUP=	hadoop
USERS=		${HADOOP_USER}
GROUPS=		${HADOOP_GROUP}

SUB_FILES=	hadoop 000.java_home.env
USE_RC_SUBR=	tasktracker jobtracker datanode namenode secondarynamenode

PLIST_SUB=	PORTVERSION="${PORTVERSION}" \
		HADOOP_LOGDIR="${HADOOP_LOGDIR}" \
		HADOOP_RUNDIR="${HADOOP_RUNDIR}" \
		HADOOP_USER="${HADOOP_USER}" \
		HADOOP_GROUP="${HADOOP_GROUP}"
SUB_LIST=	HADOOP_USER="${HADOOP_USER}" \
		HADOOP_GROUP="${HADOOP_GROUP}" \
		JAVA_HOME="${JAVA_HOME}"

PORTDOCS=	*

FIX_GCC=	src/c++/libhdfs/configure src/c++/pipes/configure \
		src/c++/task-controller/configure src/c++/utils/configure \
		src/examples/pipes/configure src/native/configure
FIX_PERMS=	src/c++/pipes/install-sh src/c++/utils/install-sh \
		src/c++/libhdfs/install-sh src/c++/libhdfs/tests/test-libhdfs.sh
FIX_DOCS=	docs/jdiff docs/skin/css docs/skin/scripts docs/skin/translations

DOC=		CHANGES.txt LICENSE.txt NOTICE.txt README.txt

DEFAULTS=	src/core/core-default.xml src/hdfs/hdfs-default.xml src/mapred/mapred-default.xml hadoop-examples-${PORTVERSION}.jar

DIST=		bin contrib hadoop-ant-${PORTVERSION}.jar hadoop-core-${PORTVERSION}.jar \
		hadoop-test-${PORTVERSION}.jar hadoop-tools-${PORTVERSION}.jar lib webapps
CONF=		capacity-scheduler.xml configuration.xsl core-site.xml hadoop-env.sh hadoop-metrics2.properties \
		hadoop-policy.xml hdfs-site.xml log4j.properties mapred-queue-acls.xml mapred-site.xml taskcontroller.cfg

MAKE_ARGS=	-Dbuild.classes=${WRKSRC}/hadoop-core-${PORTVERSION}.jar -Dversion=${PORTVERSION} -Dcompile.native=true \
		-Dcompile.c++=true -Dmake.cmd=gmake -Dlibhdfs=1 -Dlibrecordio=true  -Dskip.record-parser=true
ALL_TARGET=	compile-core-native compile-c++ compile-c++-libhdfs compile-c++-pipes compile-c++-utils

.include <bsd.port.options.mk>

.if ${ARCH} == "amd64"
_HADOOP_ARCH=	FreeBSD-amd64-64
.else
_HADOOP_ARCH=	FreeBSD-${ARCH}-32
.endif
PLIST_SUB+=	HADOOP_ARCH=${_HADOOP_ARCH}

post-patch:
	@${REINPLACE_CMD} -e 's/ -DCPU=\\\\\\".*\\\\\\"//' \
		${WRKSRC}/src/c++/libhdfs/configure
.for f in ${FIX_GCC}
	@${REINPLACE_CMD} -e 's/`gcc/`$$LTCC/' ${WRKSRC}/${f}
.endfor
.for f in ${FIX_PERMS}
	@${CHMOD} +x ${WRKSRC}/${f}
.endfor
.for d in ${FIX_DOCS}
	@${TOUCH} ${WRKSRC}/${d}/.empty
.endfor

post-build:
	@cd ${WRKSRC} ;${ANT} FreeBSD-dist
	@${TAR} -cf - -C ${WRKSRC}/build/c++/ ${_HADOOP_ARCH}/lib ${_HADOOP_ARCH}/include | ${TAR} xf - -C ${WRKSRC}/c++/

do-install:
	${MKDIR} ${STAGEDIR}${DATADIR}
	${TAR} -cf - -C ${WRKSRC} ${DIST} | ${TAR} -xf - -C ${STAGEDIR}${DATADIR}

	${MKDIR} ${STAGEDIR}${ETCDIR}/envvars.d
	${INSTALL_DATA} ${WRKDIR}/000.java_home.env ${STAGEDIR}${ETCDIR}/envvars.d
	${ECHO} "export HADOOP_PREFIX=${DATADIR}" > ${STAGEDIR}${ETCDIR}/envvars.d/001.hadoop_home.env
	${ECHO} "export HADOOP_CONF_DIR=${ETCDIR}" > ${STAGEDIR}${ETCDIR}/envvars.d/002.hadoop_conf.env
	${ECHO} "export HADOOP_LOG_DIR=${HADOOP_LOGDIR}" > ${STAGEDIR}${ETCDIR}/envvars.d/003.hadoop_log.env
	${ECHO} "export HADOOP_PID_DIR=${HADOOP_RUNDIR}" > ${STAGEDIR}${ETCDIR}/envvars.d/004.hadoop_run.env
	${MKDIR} ${STAGEDIR}${EXAMPLESDIR}
	${TAR} -cf - -C ${WRKSRC}/c++/${_HADOOP_ARCH} include lib | ${TAR} xf - -C ${STAGEDIR}${PREFIX}
	${INSTALL_DATA} ${WRKSRC}/src/c++/libhdfs/hdfs.h ${STAGEDIR}${PREFIX}/include/hadoop/

.for f in ${DEFAULTS}
	${INSTALL_DATA} ${WRKSRC}/${f} ${STAGEDIR}${EXAMPLESDIR}
.endfor

.if ${PORT_OPTIONS:MDOCS}
	${MKDIR} ${STAGEDIR}${DOCSDIR}
	cd ${WRKSRC} && ${COPYTREE_SHARE} docs ${STAGEDIR}${DOCSDIR}
.for f in ${DOC}
	${INSTALL_DATA} ${WRKSRC}/${f} ${STAGEDIR}${DOCSDIR}
.endfor
.endif
	${INSTALL_SCRIPT} ${WRKDIR}/hadoop ${STAGEDIR}${PREFIX}/bin
	cd ${WRKSRC} && ${COPYTREE_SHARE} conf ${STAGEDIR}${EXAMPLESDIR}
	${MKDIR} ${STAGEDIR}${HADOOP_LOGDIR}
	${MKDIR} ${STAGEDIR}${HADOOP_RUNDIR}

.include <bsd.port.mk>