메뉴 건너뛰기

Cloudera, BigData, Semantic IoT, Hadoop, NoSQL

Cloudera CDH/CDP 및 Hadoop EcoSystem, Semantic IoT등의 개발/운영 기술을 정리합니다. gooper@gooper.com로 문의 주세요.


아래와 같은 오류가 발생하면 :

mapred-site.xml에 mapreduce.jobtracker.staging.root.dir 속성값을 /user 로 지정해준다. 


------------------------------------------------case 1------------------------------

org.apache.hadoop.security.AccessControlException: Permission denied: user=emily.ragland, access=WRITE, inode="staging":ubuntu:supergroup:rwxr-xr-x

Exception in thread "main" org.apache.hadoop.security.AccessControlException: org.apache.hadoop.security.AccessControlException: Permission denied: user=emily.ragland, access=WRITE, inode="staging":ubuntu:supergroup:rwxr-xr-x

at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)

at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:57)

at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)

at java.lang.reflect.Constructor.newInstance(Constructor.java:526)

at org.apache.hadoop.ipc.RemoteException.instantiateException(RemoteException.java:95)

at org.apache.hadoop.ipc.RemoteException.unwrapRemoteException(RemoteException.java:57)

at org.apache.hadoop.hdfs.DFSClient.mkdirs(DFSClient.java:1459)

at org.apache.hadoop.hdfs.DistributedFileSystem.mkdirs(DistributedFileSystem.java:362)

at org.apache.hadoop.mapreduce.JobSubmissionFiles.getStagingDir(JobSubmissionFiles.java:126)

at org.apache.hadoop.mapred.JobClient$2.run(JobClient.java:942)

at org.apache.hadoop.mapred.JobClient$2.run(JobClient.java:936)

at java.security.AccessController.doPrivileged(Native Method)

at javax.security.auth.Subject.doAs(Subject.java:415)

at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1190)

at org.apache.hadoop.mapred.JobClient.submitJobInternal(JobClient.java:936)

at org.apache.hadoop.mapreduce.Job.submit(Job.java:550)

at org.apache.hadoop.mapreduce.Job.waitForCompletion(Job.java:580)

at com.jerry.WordCount.main(WordCount.java:61)

at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)

at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)

at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)

at java.lang.reflect.Method.invoke(Method.java:606)

at org.apache.hadoop.util.RunJar.main(RunJar.java:160)


------------------------------------------------case 2-----------------------------------------------------------

hadoop@master:~/working$ hadoop jar hadoop-examples-1.2.1.jar wordcount work/input/out.txt work/output/test1

14/08/23 16:22:32 ERROR security.UserGroupInformation: PriviledgedActionException as:hadoop cause:org.apache.hadoop.security.AccessControlException: org.apache.hadoop.security.AccessControlException: Permission denied: user=hadoop, access=WRITE, inode="hadoop":root:supergroup:rwxr-xr-x

org.apache.hadoop.security.AccessControlException: org.apache.hadoop.security.AccessControlException: Permission denied: user=hadoop, access=WRITE, inode="hadoop":root:supergroup:rwxr-xr-x

        at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)

        at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:57)

        at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)

        at java.lang.reflect.Constructor.newInstance(Constructor.java:526)

        at org.apache.hadoop.ipc.RemoteException.instantiateException(RemoteException.java:95)

        at org.apache.hadoop.ipc.RemoteException.unwrapRemoteException(RemoteException.java:57)

        at org.apache.hadoop.hdfs.DFSClient.mkdirs(DFSClient.java:1459)

        at org.apache.hadoop.hdfs.DistributedFileSystem.mkdirs(DistributedFileSystem.java:362)

        at org.apache.hadoop.mapreduce.JobSubmissionFiles.getStagingDir(JobSubmissionFiles.java:126)

        at org.apache.hadoop.mapred.JobClient$2.run(JobClient.java:942)

        at org.apache.hadoop.mapred.JobClient$2.run(JobClient.java:936)

        at java.security.AccessController.doPrivileged(Native Method)

        at javax.security.auth.Subject.doAs(Subject.java:415)

        at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1190)

        at org.apache.hadoop.mapred.JobClient.submitJobInternal(JobClient.java:936)

        at org.apache.hadoop.mapreduce.Job.submit(Job.java:550)

        at org.apache.hadoop.mapreduce.Job.waitForCompletion(Job.java:580)

        at org.apache.hadoop.examples.WordCount.main(WordCount.java:82)

        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)

        at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)

        at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)

        at java.lang.reflect.Method.invoke(Method.java:606)

        at org.apache.hadoop.util.ProgramDriver$ProgramDescription.invoke(ProgramDriver.java:68)

        at org.apache.hadoop.util.ProgramDriver.driver(ProgramDriver.java:139)

        at org.apache.hadoop.examples.ExampleDriver.main(ExampleDriver.java:64)

        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)

        at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)

        at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)

        at java.lang.reflect.Method.invoke(Method.java:606)

        at org.apache.hadoop.util.RunJar.main(RunJar.java:160)

Caused by: org.apache.hadoop.ipc.RemoteException: org.apache.hadoop.security.AccessControlException: Permission denied: user=hadoop, access=WRITE, inode="hadoop":root:supergroup:rwxr-xr-x

        at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.check(FSPermissionChecker.java:217)

        at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.check(FSPermissionChecker.java:197)

        at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkPermission(FSPermissionChecker.java:141)

        at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkPermission(FSNamesystem.java:5758)

        at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkAncestorAccess(FSNamesystem.java:5731)

        at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.mkdirsInternal(FSNamesystem.java:2502)

        at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.mkdirs(FSNamesystem.java:2469)

        at org.apache.hadoop.hdfs.server.namenode.NameNode.mkdirs(NameNode.java:911)

        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)

        at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)

        at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)

        at java.lang.reflect.Method.invoke(Method.java:606)

        at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:587)

        at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:1432)

        at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:1428)

        at java.security.AccessController.doPrivileged(Native Method)

        at javax.security.auth.Subject.doAs(Subject.java:415)

        at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1190)

        at org.apache.hadoop.ipc.Server$Handler.run(Server.java:1426)


        at org.apache.hadoop.ipc.Client.call(Client.java:1113)

        at org.apache.hadoop.ipc.RPC$Invoker.invoke(RPC.java:229)

        at com.sun.proxy.$Proxy1.mkdirs(Unknown Source)

        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)

        at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)

        at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)

        at java.lang.reflect.Method.invoke(Method.java:606)

        at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:85)

        at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:62)

        at com.sun.proxy.$Proxy1.mkdirs(Unknown Source)

        at org.apache.hadoop.hdfs.DFSClient.mkdirs(DFSClient.java:1457)

        ... 23 more

번호 제목 날짜 조회 수
67 메이븐 (maven) 설치 및 이클립스 연동하기 file 2013.03.06 2490
66 Hadoop 설치 및 시작하기 file 2013.03.06 2077
65 Hadoop wordcount 소스 작성 file 2013.03.06 2067
64 이클립스에서 생성한 jar 파일 hadoop 으로 실행하기 file 2013.03.06 3016
63 ExWordCount jar파일 file 2013.03.06 1563
62 Hadoop Cluster 설치 (Hadoop+Zookeeper+Hbase) file 2013.03.07 4063
61 hadoop 설치(3대) file 2013.03.07 2689
60 hadoop설치시 참고사항 2013.03.08 2295
59 org.apache.hadoop.hdfs.server.common.InconsistentFSStateException: Directory /tmp/hadoop-root/dfs/name is in an inconsistent state: storage directory does not exist or is not accessible. 2013.03.11 14838
58 Cacti로 Hadoop 모니터링 하기 file 2013.03.12 2505
57 hadoop설치시 오류 2013.12.18 2501
56 hadoop및 ecosystem에서 사용되는 명령문 정리 2014.05.28 3660
55 banana pi에(lubuntu)에 hadoop설치하고 테스트하기 - 성공 file 2014.07.05 2819
54 org.apache.hadoop.security.AccessControlException: Permission denied: user=hadoop, access=WRITE, inode="":root:supergroup:rwxr-xr-x 오류 처리방법 2014.07.05 2929
» access=WRITE, inode="staging":ubuntu:supergroup:rwxr-xr-x 오류 2014.07.05 1793
52 hadoop의 data디렉토리를 변경하는 방법 2014.08.24 676
51 bananapi 5대(ubuntu계열 리눅스)에 yarn(hadoop 2.6.0)설치하기-ResourceManager HA/HDFS HA포함, JobHistory포함 2015.04.24 19291
50 WARN util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable원인 2015.04.27 545
49 Hadoop - 클러스터 세팅및 기동 2015.04.28 509
48 hadoop 2.6.0 기동(에코시스템 포함)및 wordcount 어플리케이션을 이용한 테스트 2015.05.05 3846
위로