CentOS 6
network
$ system-config-network
passwordless SSH
[root@0a90a489]
rm -rf /root/.ssh;mkdir /root/.ssh;chmod 700 /root/.ssh;ssh-keygen -P '' -f /root/.ssh/id_rsa;chmod 600 /root/.ssh/*
ls -ald /root/.ssh;ls -al /root/.ssh/*
|
[root@0a90a48a]
rm -rf /root/.ssh;mkdir /root/.ssh; chmod 700 /root/.ssh
scp master:/root/.ssh/id_rsa.pub /root/.ssh/authorized_keys
chmod 600 /root/.ssh/authorized_keys;restorecon -R -v /root/.ssh
ls -ald /root/.ssh;ls -al /root/.ssh/*
|
hosts file
[root@0a90a489] cat /etc/hosts
127.0.0.1 localhost
10.144.164.134 kdc.cht.local
10.144.164.137 0a90a489.cht.local 0a90a489
10.144.164.138 0a90a48a.cht.local 0a90a48a
10.144.164.139 0a90a48b.cht.local 0a90a48b
|
[root@0a90a489] vim scpfile.sh
hosts="0a90a4b0 0a90a4b1 0a90a4b2 0a90a4b3"
for i in `echo $hosts`
do
scp $1 $i:$1
ssh $i "ls -l $1"
done
|
NTP
setup ntp client on all hosts
[root@0a90a489] vim /etc/ntp.conf
driftfile /var/lib/ntp/drift
restrict default kod nomodify notrap nopeer noquery
restrict -6 default kod nomodify notrap nopeer noquery
restrict 127.0.0.1
restrict -6 ::1
restrict kdc.cht.local
server kdc.cht.local prefer
#server 0.centos.pool.ntp.org
#server 1.centos.pool.ntp.org
#server 2.centos.pool.ntp.org
includefile /etc/ntp/crypto/pw
keys /etc/ntp/keys
|
[root@0a90a489] service ntpd restart
ntpd (pid 2228) is running... [root@0a90a489] chkconfig ntpd on
[root@0a90a489] while [ true ]; do clear; ntpstat; sleep 5; done
synchronised to NTP server (192.168.15.105) at stratum 12
time correct to within 955 ms
polling server every 64 s
|
[root@0a90a489] sh scpfile.sh /etc/ntp.conf
[root@0a90a489] ssh 0a90a48a "service ntpd restart"
[root@0a90a489] ssh 0a90a48a "chkconfig ntpd on"
[root@0a90a489] ssh 0a90a48a "service ntpd restart"
[root@0a90a489] ssh 0a90a48a "chkconfig ntpd on"
Kerberos
setup Kerberos client on all hosts
[root@0a90a489] scp kdc.cht.local:/etc/krb5.conf /etc/krb5.conf
[root@0a90a489] vim /etc/krb5.conf
[root@0a90a489] vim /etc/krb5.conf
[logging]
default = FILE:/var/log/krb5libs.log
kdc = FILE:/var/log/krb5kdc.log
admin_server = FILE:/var/log/kadmind.log
[libdefaults]
default_realm = CHT.COM.TW
dns_lookup_realm = false
dns_lookup_kdc = false
ticket_lifetime = 24h
renew_lifetime = 1d
forwardable = true
[realms]
CHT.COM.TW = {
kdc = kdc.cht.local:88
admin_server = kdc.cht.local
default_domain = cht.local
}
[domain_realm]
.cht.local = CHT.COM.TW
cht.local = CHT.COM.TW
|
[root@0a90a489] yum install krb5-workstation
Installed:
krb5-workstation.x86_64 0:1.9-33.el6_3.3
Dependency Updated:
krb5-libs.x86_64 0:1.9-33.el6_3.3
[root@master] yum info krb5-workstation
Name : krb5-workstation
Arch : x86_64
Version : 1.9
Release : 33.el6_3.3
Size : 631 k
Repo : installed
[root@0a90a4ae] kinit leorick/admin
[root@0a90a4ae] klist -e [root@0a90a4ae] kdestroy |
[root@0a90a489] sh scpfile.sh /etc/krb5.conf
Step 3: If you are Using AES-256 Encryption, install the JCE Policy File on all hosts
[root@0a90a489] unzip jce_policy-6.zip
[root@0a90a489] cd $JAVA_HOME/jre/lib/security
[root@0a90a489] $ ls -l -rw-r--r-- 1 root root 2481 May 8 15:15 local_policy.jar
-rw-r--r-- 1 root root 2465 May 8 15:15 US_export_policy.jar
[root@0a90a489] mv local_policy.jar local_policy.jar.bak[root@0a90a489] mv US_export_policy.jar US_export_policy.jar.bak[root@0a90a489] cp ~/jce/local_policy.jar ./[root@0a90a489] cp ~/jce/US_export_policy.jar ./
[root@0a90a489] ls -l
-rw-r--r-- 1 root root 2481 Jul 18 16:59 local_policy.jar
-rw-r--r-- 1 root root 2465 Jul 18 16:59 US_export_policy.jar
|
[root@0a90a489] sh scpfile.sh /usr/java/jdk1.6.0_33/jre/lib/security/local_policy.jar
[root@0a90a489] sh scpfile.sh /usr/java/jdk1.6.0_33/jre/lib/security/US_export_policy.jar
[root@0a90a489] sh scpfile.sh /usr/java/jdk1.6.0_33/jre/lib/security/US_export_policy.jar
HDFS
Step 2: Verify User Accounts and Groups in CDH4 Due to Security
[root@0a90a4ae] ls -l /opt/hadoop/hdfs/dfs/name/
drwxr-xr-x 3 hdfs hdfs 4096 Jul 18 10:41 /opt/hadoop/hdfs/dfs/name/
[root@0a90a4ae] ssh slave1 ls -l /opt/hadoop/hdfs/dfs/data
drwx------ 3 hdfs hdfs 4096 Jul 18 10:41 /opt/hadoop/hdfs/dfs/data
[root@0a90a4ae] ls -ld /var/log/hadoop-hdfs
drwxrwxr-x 2 hdfs hadoop 4096 Jul 18 10:27 /var/log/hadoop-hdfs
[root@0a90a4ae] ssh slave1 ls -ld /var/log/hadoop-hdfs
drwxrwxr-x 2 hdfs hadoop 4096 Jul 18 10:27 /var/log/hadoop-hdfs
|
[root@0a90a4ae] ls -ld /opt/hadoop/mapred/local
drwxr-xr-x 3 mapred mapred 4096 Jul 18 10:27 /opt/hadoop/mapred/local
[root@0a90a4ae] ssh slave1 ls -ld /opt/hadoop/mapred/local
drwxr-xr-x 7 mapred mapred 4096 Jul 18 10:27 /opt/hadoop/mapred/local
[root@0a90a4ae] ls -ld /var/log/hadoop-0.20-mapreduce
drwxrwxr-x 3 root hadoop 4096 Jul 18 10:27 /var/log/hadoop-0.20-mapreduce
[root@0a90a4ae] ssh 0a90a48a ls -ld /var/log/hadoop-0.20-mapreduce
drwxrwxr-x 3 root hadoop 4096 Jul 18 10:27 /var/log/hadoop-0.20-mapreduce
|
Step 3: If you are Using AES-256 Encryption, install the JCE Policy File on all hosts
Step 4: Create and Deploy the Kerberos Principals and Keytab Files
[root@kdc] sh princ_hadoop_add.sh
domain="CHT.COM.TW"
instance="0a90a489.cht.local 0a90a48a.cht.local 0a90a48b.cht.local"
principals="HTTP hdfs mapred"
echo "you should authenticate with KDC first..."
/usr/kerberos/bin/klist
for i in `echo ${instance}`
do
echo "### HOST $i"
for j in `echo ${principals}`
do
/usr/sbin/kadmin.local -q "addprinc -randkey ${j}/${i}@${domain}"
done
done
/usr/sbin/kadmin.local -q "list_principals *"
|
HTTP/0a90a489.cht.local@CHT.COM.TW
HTTP/0a90a48a.cht.local@CHT.COM.TW
HTTP/0a90a48b.cht.local@CHT.COM.TW
hdfs/0a90a489.cht.local@CHT.COM.TW
hdfs/0a90a48a.cht.local@CHT.COM.TW
hdfs/0a90a48b.cht.local@CHT.COM.TW
mapred/0a90a489.cht.local@CHT.COM.TW
mapred/0a90a48a.cht.local@CHT.COM.TW
mapred/0a90a48b.cht.local@CHT.COM.TW
|
[root@kdc] sh princ_hadoop_del.sh
domain="CHT.COM.TW"
instance="0a90a489.cht.local 0a90a48a.cht.local 0a90a48b.cht.local"
principals="HTTP hdfs mapred"
echo "you should authenticate with KDC first..."
/usr/kerberos/bin/klist
for i in `echo ${instance}`
do
echo "### HOST $i"
for j in `echo ${principals}`
do
/usr/sbin/kadmin.local -q "delprinc -force ${j}/${i}@${domain}"
done
done
/usr/sbin/kadmin.local -q "list_principals *"
|
[root@0a90a489] kdestroy; kinit leorick/admin@CHT.COM.TW
[root@0a90a489] sh key_hadoop_dist.sh
[root@0a90a489] sh key_hadoop_dist.sh
domain="CHT.COM.TW"
instance="0a90a489.cht.local 0a90a48a.cht.local 0a90a48b.cht.local"
echo "you should authenticate with KDC first..."
/usr/bin/klist
for i in `echo ${instance}`
do
echo "### HOST $i"
KEYTAB="hadoop.keytab"
TMPFILE="/root/${KEYTAB}.${i}"
DESFILE="/etc/hadoop/conf/${KEYTAB}"
PRINCIPAL="HTTP/${i}@${domain} mapred/${i}@${domain} hdfs/${i}@${domain}"
rm -f ${TMPFILE}
/usr/bin/kadmin -q "ktadd -k ${TMPFILE} ${PRINCIPAL}"
scp ${TMPFILE} ${i}:${DESFILE}
CMD="chown hdfs:hadoop ${DESFILE}"
ssh ${i} ${CMD}
CMD="chmod 440 ${DESFILE}"
ssh ${i} ${CMD}
CMD="ls -l ${DESFILE}"
ssh ${i} ${CMD}
done
|
[root@0a90a489] vim key_hadoop_list.sh
target=`ls /etc/hadoop/conf/*.keytab`
for i in ${target}
do
echo `ls -l ${i}`
sudo /usr/bin/ktutil << EOF
rkt ${i}
list -e
q
EOF
done
|
[root@0a90a489] sh scpfile.sh /root/key_hadoop_list.sh
[root@0a90a489] sh key_hadoop_list.sh
-r--r----- 1 hdfs hadoop 1352 Jul 31 15:23 /etc/hadoop/conf/hadoop.keytab
ktutil: rkt /etc/hadoop/conf/hadoop.keytab
ktutil: list -e
slot KVNO Principal
---- ---- ---------------------------------------------------------------------
1 2 HTTP/0a90a489.cht.local@CHT.COM.TW (aes256-cts-hmac-sha1-96)
2 2 HTTP/0a90a489.cht.local@CHT.COM.TW (aes128-cts-hmac-sha1-96)
3 2 HTTP/0a90a489.cht.local@CHT.COM.TW (des3-cbc-sha1)
4 2 HTTP/0a90a489.cht.local@CHT.COM.TW (arcfour-hmac)
5 2 HTTP/0a90a489.cht.local@CHT.COM.TW (des-hmac-sha1)
6 2 HTTP/0a90a489.cht.local@CHT.COM.TW (des-cbc-md5)
7 2 mapred/0a90a489.cht.local@CHT.COM.TW (aes256-cts-hmac-sha1-96)
8 2 mapred/0a90a489.cht.local@CHT.COM.TW (aes128-cts-hmac-sha1-96)
9 2 mapred/0a90a489.cht.local@CHT.COM.TW (des3-cbc-sha1)
10 2 mapred/0a90a489.cht.local@CHT.COM.TW (arcfour-hmac)
11 2 mapred/0a90a489.cht.local@CHT.COM.TW (des-hmac-sha1)
12 2 mapred/0a90a489.cht.local@CHT.COM.TW (des-cbc-md5)
13 2 hdfs/0a90a489.cht.local@CHT.COM.TW (aes256-cts-hmac-sha1-96)
14 2 hdfs/0a90a489.cht.local@CHT.COM.TW (aes128-cts-hmac-sha1-96)
15 2 hdfs/0a90a489.cht.local@CHT.COM.TW (des3-cbc-sha1)
16 2 hdfs/0a90a489.cht.local@CHT.COM.TW (arcfour-hmac)
17 2 hdfs/0a90a489.cht.local@CHT.COM.TW (des-hmac-sha1)
18 2 hdfs/0a90a489.cht.local@CHT.COM.TW (des-cbc-md5)
|
[root@0a90a489] ssh 0a90a48a "sh /root/key_hadoop_list.sh"
-r--r----- 1 hdfs hadoop 1352 Jul 31 15:22 /etc/hadoop/conf/hadoop.keytab
ktutil: rkt /etc/hadoop/conf/hadoop.keytab
ktutil: list -e
slot KVNO Principal
---- ---- ---------------------------------------------------------------------
1 2 HTTP/0a90a48a.cht.local@CHT.COM.TW (aes256-cts-hmac-sha1-96)
2 2 HTTP/0a90a48a.cht.local@CHT.COM.TW (aes128-cts-hmac-sha1-96)
3 2 HTTP/0a90a48a.cht.local@CHT.COM.TW (des3-cbc-sha1)
4 2 HTTP/0a90a48a.cht.local@CHT.COM.TW (arcfour-hmac)
5 2 HTTP/0a90a48a.cht.local@CHT.COM.TW (des-hmac-sha1)
6 2 HTTP/0a90a48a.cht.local@CHT.COM.TW (des-cbc-md5)
7 2 mapred/0a90a48a.cht.local@CHT.COM.TW (aes256-cts-hmac-sha1-96)
8 2 mapred/0a90a48a.cht.local@CHT.COM.TW (aes128-cts-hmac-sha1-96)
9 2 mapred/0a90a48a.cht.local@CHT.COM.TW (des3-cbc-sha1)
10 2 mapred/0a90a48a.cht.local@CHT.COM.TW (arcfour-hmac)
11 2 mapred/0a90a48a.cht.local@CHT.COM.TW (des-hmac-sha1)
12 2 mapred/0a90a48a.cht.local@CHT.COM.TW (des-cbc-md5)
13 2 hdfs/0a90a48a.cht.local@CHT.COM.TW (aes256-cts-hmac-sha1-96)
14 2 hdfs/0a90a48a.cht.local@CHT.COM.TW (aes128-cts-hmac-sha1-96)
15 2 hdfs/0a90a48a.cht.local@CHT.COM.TW (des3-cbc-sha1)
16 2 hdfs/0a90a48a.cht.local@CHT.COM.TW (arcfour-hmac)
17 2 hdfs/0a90a48a.cht.local@CHT.COM.TW (des-hmac-sha1)
18 2 hdfs/0a90a48a.cht.local@CHT.COM.TW (des-cbc-md5)
|
[root@0a90a489] ssh 0a90a48b "sh /root/key_hadoop_list.sh"
-r--r----- 1 hdfs hadoop 1352 Jul 31 15:30 /etc/hadoop/conf/hadoop.keytab
ktutil: rkt /etc/hadoop/conf/hadoop.keytab
ktutil: list -e
slot KVNO Principal
---- ---- ---------------------------------------------------------------------
1 2 HTTP/0a90a48b.cht.local@CHT.COM.TW (aes256-cts-hmac-sha1-96)
2 2 HTTP/0a90a48b.cht.local@CHT.COM.TW (aes128-cts-hmac-sha1-96)
3 2 HTTP/0a90a48b.cht.local@CHT.COM.TW (des3-cbc-sha1)
4 2 HTTP/0a90a48b.cht.local@CHT.COM.TW (arcfour-hmac)
5 2 HTTP/0a90a48b.cht.local@CHT.COM.TW (des-hmac-sha1)
6 2 HTTP/0a90a48b.cht.local@CHT.COM.TW (des-cbc-md5)
7 2 mapred/0a90a48b.cht.local@CHT.COM.TW (aes256-cts-hmac-sha1-96)
8 2 mapred/0a90a48b.cht.local@CHT.COM.TW (aes128-cts-hmac-sha1-96)
9 2 mapred/0a90a48b.cht.local@CHT.COM.TW (des3-cbc-sha1)
10 2 mapred/0a90a48b.cht.local@CHT.COM.TW (arcfour-hmac)
11 2 mapred/0a90a48b.cht.local@CHT.COM.TW (des-hmac-sha1)
12 2 mapred/0a90a48b.cht.local@CHT.COM.TW (des-cbc-md5)
13 2 hdfs/0a90a48b.cht.local@CHT.COM.TW (aes256-cts-hmac-sha1-96)
14 2 hdfs/0a90a48b.cht.local@CHT.COM.TW (aes128-cts-hmac-sha1-96)
15 2 hdfs/0a90a48b.cht.local@CHT.COM.TW (des3-cbc-sha1)
16 2 hdfs/0a90a48b.cht.local@CHT.COM.TW (arcfour-hmac)
17 2 hdfs/0a90a48b.cht.local@CHT.COM.TW (des-hmac-sha1)
18 2 hdfs/0a90a48b.cht.local@CHT.COM.TW (des-cbc-md5)
|
Step 5: Shut Down the Cluster
service hbase-regionserver stop
service zookeeper-server stop
service hbase-master stop
for x in `cd /etc/init.d ; ls hadoop-*` ; do sudo service $x stop ; done
service hbasestargate stop
service oozie stop
service hue stop
service zookeeper-server stop
service hbase-master stop
for x in `cd /etc/init.d ; ls hadoop-*` ; do sudo service $x stop ; done
service hbasestargate stop
service oozie stop
service hue stop
Step 6.1: Enable Hadoop Security
[root@0a90a489] vim /etc/hadoop/conf/core-site.xml
[root@0a90a489] sh scpfile.sh /etc/hadoop/conf/core-site.xml
Step 7: Configure Secure HDFS
[root@0a90a489] vim /etc/hadoop/conf/hdfs-site.xml
[root@0a90a489] sh scpfile.sh /etc/hadoop/conf/hdfs-site.xml
Step 10: Set Variables for Secure DataNodes
[root@0a90a489] vim /etc/default/hadoop-hdfs-datanode
export HADOOP_SECURE_DN_USER=hdfs
export HADOOP_SECURE_DN_PID_DIR=/var/run/hadoop-hdfs
export HADOOP_SECURE_DN_LOG_DIR=/var/log/hadoop-hdfs
export JSVC_HOME=/usr/libexec/bigtop-utils
|
[root@0a90a489] sh scpfile.sh /etc/default/hadoop-hdfs-datanode
Step 11.4: Start up the NameNode
[root@0a90a489] service hadoop-hdfs-namenode start
[root@0a90a489] vim /var/log/hadoop-hdfs/hadoop-hdfs-namenode-0a90a489.log
[root@0a90a489] vim /var/log/hadoop-hdfs/hadoop-hdfs-namenode-0a90a489.log
...
2013-07-31 16:15:54,732 INFO org.apache.hadoop.security.UserGroupInformation: Login successful for user hdfs/0a90a489.cht.local@CHT.COM.TW using keytab file /etc/hadoop/conf/hadoop.keytab
...
2013-07-31 16:15:56,615 INFO org.apache.hadoop.security.authentication.server.KerberosAuthenticationHandler: Login using keytab /etc/hadoop/conf/hadoop.keytab, for principal HTTP/0a90a489.cht.local@CHT.COM.TW
2013-07-31 16:15:56,634 INFO org.apache.hadoop.security.authentication.server.KerberosAuthenticationHandler: Initialized, principal [HTTP/0a90a489.cht.local@CHT.COM.TW] from keytab [/etc/hadoop/conf/hadoop.keytab]
|
[root@0a90a489] kdestroy
[root@0a90a489] hadoop fs -ls / 13/07/31 16:20:34 ERROR security.UserGroupInformation: PriviledgedActionException as:root (auth:KERBEROS) cause:javax.security.sasl.SaslException: GSS initiate failed [Caused by GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)] |
[root@0a90a489] kinit leorick/admin
Password for leorick/admin@CHT.COM.TW:
[root@0a90a489] hadoop fs -ls /
Found 6 items
drwxr-xr-x - mapred hadoop 0 2013-07-31 14:27 /benchmarks
drwxr-xr-x - hbase hbase 0 2013-07-29 16:58 /hbase
drwxr-xr-x - mapred mapred 0 2013-07-29 16:26 /mapred
drwxrwxrwt - hdfs hadoop 0 2013-07-31 14:06 /tmp
drwxr-xr-x - hdfs hadoop 0 2013-07-29 16:24 /user
drwxr-xr-x - hdfs hadoop 0 2013-07-29 16:25 /var
|
Step 12.1: Start up a DataNode
[root@0a90a48a] service hadoop-hdfs-datanode start
[root@0a90a48a] vim /var/log/hadoop-hdfs/hadoop-hdfs-datanode-0a90a48a.log
[root@0a90a48a] vim /var/log/hadoop-hdfs/hadoop-hdfs-datanode-0a90a48a.log
2013-07-31 16:22:47,662 INFO org.apache.hadoop.security.UserGroupInformation: Login successful for user hdfs/0a90a48a.cht.local@CHT.COM.TW using keytab file /etc/hadoop/conf/hadoop.keytab
|
[root@0a90a48b ] sudo service hadoop-hdfs-datanode start
[root@0a90a48b ] vim /var/log/hadoop-hdfs/hadoop-hdfs-datanode-0a90a48b.log
[root@0a90a48b ] vim /var/log/hadoop-hdfs/hadoop-hdfs-datanode-0a90a48b.log
2013-07-31 16:32:26,031 INFO org.apache.hadoop.security.UserGroupInformation: Login successful for user hdfs/0a90a48b.cht.local@CHT.COM.TW using keytab file /etc/hadoop/conf/hadoop.keytab
|
Step 12.2: Test
[root@0a90a489] kdestroy;kinit -k -t /etc/hadoop/conf/hadoop.keytab hdfs/0a90a489.cht.local@CHT.COM.TW
[root@0a90a489] hdfs dfs -mkdir /user/leorick
[root@0a90a489] hdfs dfs -put install.log /user/leorick/
[root@0a90a489] hdfs dfs -chown -R leorick:leorick /user/leorick
[root@0a90a489] hdfs dfs -chmod -R 700 /user/leorick
[root@0a90a489] hdfs dfs -ls -R /user/leorick
-rw------- 2 leorick leorick 25233 2013-07-31 16:36 /user/leorick/install.log
|
[root@0a90a489] hdfs dfs -cat /user/leorick/install.log
Installing libgcc-4.4.7-3.el6.x86_64
warning: libgcc-4.4.7-3.el6.x86_64: Header V3 RSA/SHA1 Signature, key ID c105b9de: NOKEY
... |
[root@0a90a489] kdestroy;kinit leorick/admin@CHT.COM.TW
[root@0a90a489] hdfs dfs -cat /user/leorick/install.log
Installing libgcc-4.4.7-3.el6.x86_64
warning: libgcc-4.4.7-3.el6.x86_64: Header V3 RSA/SHA1 Signature, key ID c105b9de: NOKEY
... |
Step 13: Set the Sticky Bit on HDFS Directories
[root@0a90a489] sudo -u hdfs kinit -k -t /etc/hadoop/conf/hadoop.keytab hdfs/0a90a489.cht.local@CHT.COM.TW
[root@0a90a489] sudo -u hdfs hadoop fs -chmod 1777 /tmp
[root@0a90a489] sudo -u hdfs hadoop fs -ls /
drwxrwxrwt - hdfs hadoop 0 2013-07-31 14:06 /tmp
drwxr-xr-x - hdfs hadoop 0 2013-07-31 16:34 /user
|
MapReduce 0.20
Step 1: Configure Secure MRv1
[root@0a90a489] vim /etc/hadoop/conf/mapred-site.xml
[root@0a90a489] vim /etc/hadoop/conf/taskcontroller.cfg
hadoop.log.dir=/var/log/hadoop-0.20-mapreduce/
mapreduce.tasktracker.group=mapred
banned.users=bin
min.user.id=100
|
[root@0a90a489] sh scpfile.sh /etc/hadoop/conf/mapred-site.xml
[root@0a90a489] sh scpfile.sh /etc/hadoop/conf/taskcontroller.cfg
Step 2: Start up the JobTracker
[root@0a90a489] service hadoop-0.20-mapreduce-jobtracker start
[root@0a90a489] vim /var/log/hadoop-0.20-mapreduce/hadoop-hadoop-jobtracker-0a90a489.log
2013-07-31 17:03:31,296 INFO org.apache.hadoop.security.UserGroupInformation: Login successful for user mapred/0a90a489.cht.local@CHT.COM.TW using keytab file /etc/hadoop/conf/hadoop.keytab
|
Step 3.1: Start up a TaskTracker
[root@0a90a48a] service hadoop-0.20-mapreduce-tasktracker start
[root@0a90a48a] vim /var/log/hadoop-0.20-mapreduce/hadoop-hadoop-tasktracker-0a90a48a.log
2013-07-31 17:12:38,879 INFO org.apache.hadoop.security.UserGroupInformation: Login successful for user mapred/0a90a48a.cht.local@CHT.COM.TW using keytab file /etc/hadoop/conf/hadoop.keytab
|
[root@0a90a48b] service hadoop-0.20-mapreduce-tasktracker start
[root@0a90a48b] vim /var/log/hadoop-0.20-mapreduce/hadoop-hadoop-tasktracker-0a90a48b.log
2013-07-31 17:11:15,282 INFO org.apache.hadoop.security.UserGroupInformation: Login successful for user mapred/0a90a48b.cht.local@CHT.COM.TW using keytab file /etc/hadoop/conf/hadoop.keytab
|
Step 3.2: Testing
3.2.1 沒有kerberos principle執行程式
[root@0a90a489] kdestroy
[root@0a90a489] hadoop jar /usr/lib/hadoop-0.20-mapreduce/hadoop-examples.jar pi 4 1000
[root@0a90a489] hadoop jar /usr/lib/hadoop-0.20-mapreduce/hadoop-examples.jar pi 4 1000
Number of Maps = 4
Samples per Map = 1000
13/07/31 17:14:26 ERROR security.UserGroupInformation: PriviledgedActionException as:root (auth:KERBEROS) cause:javax.security.sasl.SaslException: GSS initiate failed [Caused by GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)]
|
3.2.2 在每台主機建立OS帳號
[root@0a90a489] cat /etc/group
hadoop:x:498:
[root@0a90a489] useradd -b /home -k /etc/skel -m -s /bin/bash -g 498 leorick
[root@0a90a489] ssh 0a90a48a "useradd -b /home -k /etc/skel -m -s /bin/bash -g 498 leorick"
[root@0a90a489] ssh 0a90a48b "useradd -b /home -k /etc/skel -m -s /bin/bash -g 498 leorick"
[root@0a90a489] id leorick
uid=500(leorick) gid=498(hadoop) groups=498(hadoop)
[root@0a90a489] ssh 0a90a48a "id leorick"
uid=500(leorick) gid=498(hadoop) groups=498(hadoop)
[root@0a90a489] ssh 0a90a48b "id leorick"
uid=500(leorick) gid=498(hadoop) groups=498(hadoop)
3.2.3 使用kerberos principle執行程式
[root@0a90a489] kinit leorick/admin
Password for leorick/admin@CHT.COM.TW:
[root@0a90a489] hadoop jar /usr/lib/hadoop-0.20-mapreduce/hadoop-examples.jar pi 4 1000
…
13/07/31 17:15:27 ERROR security.UserGroupInformation: PriviledgedActionException as:leorick/admin@CHT.COM.TW (auth:KERBEROS) cause:org.apache.hadoop.security.AccessControlException: Permission denied: user=leorick, access=EXECUTE, inode="/tmp/hadoop-mapred":mapred:hadoop:drwx------
...
|
3.2.4 修改HDFS上共用目錄權限並執行程式
[root@0a90a48a] kdestroy;kinit -k -t /etc/hadoop/conf/hadoop.keytab hdfs/0a90a48a.cht.local@CHT.COM.TW
[root@0a90a48a] hdfs dfs -chmod -R 777 /tmp/hadoop-mapred
[root@0a90a489] hadoop jar /usr/lib/hadoop-0.20-mapreduce/hadoop-examples.jar pi 4 1000
Starting Job
13/07/31 17:28:40 WARN mapred.JobClient: Use GenericOptionsParser for parsing the arguments. Applications should implement Tool for the same.
13/07/31 17:28:40 INFO hdfs.DFSClient: Created HDFS_DELEGATION_TOKEN token 2 for leorick on 10.144.164.137:8020
13/07/31 17:28:40 INFO security.TokenCache: Got dt for hdfs://0a90a489.cht.local; Kind: HDFS_DELEGATION_TOKEN, Service: 10.144.164.137:8020, Ident: (HDFS_DELEGATION_TOKEN token 2 for leorick)
…
Job Finished in 42.958 seconds
Estimated value of Pi is 3.14000000000000000000
|