- 複製/usr/lib/hadoop/example-confs/conf.secure/log4j.properties到/etc/hadoop/conf/log4j.properties 並修改內容
# Define some default values that can be overridden by system properties hadoop.root.logger=INFO,console hadoop.log.dir=. hadoop.log.file=hadoop.log # Define the root logger to the system property "hadoop.root.logger". log4j.rootLogger=${hadoop.root.logger}, EventCounter # Logging Threshold log4j.threshhold=ALL # # Daily Rolling File Appender # log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender log4j.appender.DRFA.File=${hadoop.log.dir}/${hadoop.log.file} log4j.appender.DRFA.DatePattern=.yyyy-MM-dd # 30-day backup #log4j.appender.DRFA.MaxBackupIndex=30 log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout # Pattern format: Date LogLevel LoggerName LogMessage log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n # Debugging Pattern format #log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n # # console # Add "console" to rootlogger above if you want to use this # log4j.appender.console=org.apache.log4j.ConsoleAppender log4j.appender.console.target=System.err log4j.appender.console.layout=org.apache.log4j.PatternLayout log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n # # TaskLog Appender # #Default values hadoop.tasklog.taskid=null hadoop.tasklog.noKeepSplits=4 hadoop.tasklog.totalLogFileSize=100 hadoop.tasklog.purgeLogSplits=true hadoop.tasklog.logsRetainHours=12 hadoop.tasklog.iscleanup=false log4j.appender.TLA=org.apache.hadoop.mapred.TaskLogAppender log4j.appender.TLA.taskId=${hadoop.tasklog.taskid} log4j.appender.TLA.totalLogFileSize=${hadoop.tasklog.totalLogFileSize} log4j.appender.TLA.isCleanup=${hadoop.tasklog.iscleanup} log4j.appender.TLA.layout=org.apache.log4j.PatternLayout log4j.appender.TLA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n # # Event Counter Appender # Sends counts of logging messages at different severity levels to Hadoop Metrics. # log4j.appender.EventCounter=org.apache.hadoop.metrics.jvm.EventCounter #======= # security audit logging security.audit.logger=INFO,DRFAS log4j.category.SecurityLogger=${security.audit.logger} log4j.additivity.SecurityLogger=false hadoop.security.log.file=SecurityAuth-${user.name}.audit log4j.appender.DRFAS=org.apache.log4j.DailyRollingFileAppender log4j.appender.DRFAS.File=${hadoop.log.dir}/${hadoop.id.str}-auth.log log4j.appender.DRFAS.layout=org.apache.log4j.PatternLayout log4j.appender.DRFAS.layout.ConversionPattern=%d{ISO8601} %p %c{2}: %m%n log4j.appender.DRFAS.DatePattern=.yyyy-MM-dd # hdfs audit logging hdfs.audit.logger=INFO,DRFAAUDIT log4j.logger.org.apache.hadoop.hdfs.server.namenode.FSNamesystem.audit=${hdfs.audit.logger} log4j.additivity.org.apache.hadoop.hdfs.server.namenode.FSNamesystem.audit=false log4j.appender.DRFAAUDIT=org.apache.log4j.DailyRollingFileAppender log4j.appender.DRFAAUDIT.File=${hadoop.log.dir}/hdfs-audit.log log4j.appender.DRFAAUDIT.layout=org.apache.log4j.PatternLayout log4j.appender.DRFAAUDIT.layout.ConversionPattern=%d{ISO8601} %p %c{2}: %m%n log4j.appender.DRFAAUDIT.DatePattern=.yyyy-MM-dd # mapred audit logging mapred.audit.logger=INFO,MRAUDIT log4j.logger.org.apache.hadoop.mapred.AuditLogger=${mapred.audit.logger} log4j.additivity.org.apache.hadoop.mapred.AuditLogger=false log4j.appender.MRAUDIT=org.apache.log4j.DailyRollingFileAppender log4j.appender.MRAUDIT.File=${hadoop.log.dir}/mapred-audit.log log4j.appender.MRAUDIT.layout=org.apache.log4j.PatternLayout log4j.appender.MRAUDIT.layout.ConversionPattern=%d{ISO8601} %p %c{2}: %m%n log4j.appender.MRAUDIT.DatePattern=.yyyy-MM-dd # Mapred job summary mapred.jobsummary.logger=INFO,console log4j.logger.org.apache.hadoop.mapred.JobInProgress$JobSummary=${mapred.jobsummary.logger} log4j.additivity.org.apache.hadoop.mapred.JobInProgress$JobSummary=false log4j.appender.JSA=org.apache.log4j.DailyRollingFileAppender log4j.appender.JSA.File=${hadoop.log.dir}/mapred-jobsummary.log log4j.appender.JSA.layout=org.apache.log4j.PatternLayout log4j.appender.JSA.layout.ConversionPattern=%d{ISO8601} %p %c{2}: %m%n log4j.appender.JSA.DatePattern=.yyyy-MM-dd |
- 在/var/log/hadoop目錄下產生2個audit files
-rw-r--r-- 1 hdfs hdfs 0 Jul 15 15:12 /var/log/hadoop/hdfs-audit.log -rw-r--r-- 1 hdfs hdfs 0 Jul 15 15:12 /var/log/hadoop/mapred-audit.log |
- 在另一台client執行hdfs指令並檢視audit log內容
[leo@slave1]# hadoop fs -ls /user/leo drwxr-xr-x - leo leo 0 2013-05-30 14:34 /user/leo/warehouse [leo@slave1]# hadoop fs -put boot.log /user/leo [leo@slave1]# hadoop fs -chmod 777 /user/leo/boot.log [leo@slave1]# hadoop fs -cat /user/leo/boot.log [leo@slave1]# hadoop fs -rm /user/leo/boot.log |
[root@master]# vim /var/log/hadoop/hdfs-audit.log 2013-07-15 15:14:49,637 INFO FSNamesystem.audit: ugi=leo (auth:SIMPLE) ip=/10.144.129.138 cmd=listStatus src=/user/leo dst=null perm=null 2013-07-15 15:20:35,159 INFO FSNamesystem.audit: ugi=leo (auth:SIMPLE) ip=/10.144.129.138 cmd=create src=/user/leo/boot.log dst=null perm=leo:leo:rw-r--r-- 2013-07-15 15:21:13,695 INFO FSNamesystem.audit: ugi=leo (auth:SIMPLE) ip=/10.144.129.138 cmd=setPermission src=/user/leo/boot.log dst=null perm=leo:leo:rw-rw-rw- 2013-07-15 15:21:27,975 INFO FSNamesystem.audit: ugi=leo (auth:SIMPLE) ip=/10.144.129.138 cmd=open src=/user/leo/boot.log dst=null perm=null 2013-07-15 15:21:54,751 INFO FSNamesystem.audit: ugi=leo (auth:SIMPLE) ip=/10.144.129.138 cmd=delete src=/user/leo/boot.log dst=null perm=null |
- 在另一台client執行hadoop job指令並檢視audit log內容
[leo@slave1]# hadoop jar /usr/lib/hadoop/hadoop-examples.jar pi 6 50000 |
[root@master]# ls /var/log/hadoop/mapred-auth.log -rw-r--r-- 1 mapred mapred 0 Jul 15 15:25 /var/log/hadoop/mapred-auth.log |
- 由於master上mapreduce audit log沒有內容, 因此修改hdfs & mapreduce audit file屬性, 並在client執行hadoop job
[root@master]# chown hdfs:hadoop /var/log/hadoop/hdfs-audit.log [root@master]# chmod 774 /var/log/hadoop/hdfs-audit.log [root@master]# chown hdfs:hadoop /var/log/hadoop/mapred-audit.log [root@master]# chmod 774 /var/log/hadoop/mapred-audit.log |
[leo@slave1]# hadoop jar /usr/lib/hadoop/hadoop-examples.jar pi 6 50000
hadoop job -status job_201307151623_0007
hadoop job -set-priority job_201307151623_0007 LOW
hadoop job -kill job_201307151623_0007
|
[root@master]# ls -l /var/log/hadoop/*au* -rwxrwxr-- 1 hdfs hadoop 209864 Jul 15 16:33 /var/log/hadoop/hdfs-audit.log -rwxrwxr-- 1 hdfs hadoop 987 Jul 15 16:33 /var/log/hadoop/mapred-audit.log [root@master]# vim /var/log/hadoop/mapred-audit.log 2013-07-15 16:33:25,875 INFO mapred.AuditLogger: USER=leo IP=10.144.129.138 OPERATION=SUBMIT_JOB TARGET=job_201307151623_0007 RESULT=SUCCESS #其餘指令皆無log |
- 雖然手動修改log屬性, 然而daily rotate時又會恢復為預設值, 導致mapreduce audit log無法繼續寫入