当我运行配置单元查询时出现以下错误。请帮我解决这个问题。
HIVE> insert overwrite table bucket_emp1 select * from emp;
查询ID = hduser_20160426213038_58cbf1dc-a345-40f8-ab3d-a3258046b279 就业总数= 3 启动工作1出的reduce任务设置为0的3 数,因为没有降低运营商 org.apache.hadoop.util .DiskChecker $ DiskErrorException:任何本地目录中都没有可用空间。 在org.apache.hadoop.fs.LocalDirAllocator $ AllocatorPerContext.getLocalPathForWrite(LocalDirAllocator.java:366) 在org.apache.hadoop.fs.LocalDirAllocator.getLocalPathForWrite(LocalDirAllocator.java:150) 在org.apache.hadoop。 fs.LocalDirAllocator.getLocalPathForWrite(LocalDirAllocator.java:131) 在org.apache.hadoop.fs.LocalDirAllocator.getLocalPathForWrite(LocalDirAllocator.java:115) 在org.apache.hadoop.mapred.LocalDistributedCacheManager.setup(LocalDistributedCacheManager.java: 131) at org.apache.hadoop.mapred.LocalJobRunner $ Job。(LocalJobRunner.java:163) at org.apache.hadoop.mapred.LocalJobRunner.submitJob(LocalJobRunner.java:731) at org.apache.hadoop .mapreduce.JobSubmitter.submitJobInternal(JobSubmitter.java:536) at org.apache.hadoop.mapreduce.Job $ 10.run(Job.java:1296) at org.apache.hadoop.mapreduce.Job $ 10.run(Job.java:1293) at java.security.AccessController。在org.apache.hadoop上的org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1628) .mapreduce.Job.submit(Job.java:1293) at org.apache.hadoop.mapred.JobClient $ 1.run(JobClient.java:562) at org.apache.hadoop.mapred.JobClient $ 1.run(JobClient .java:557) at java.security.AccessController.doPrivileged(Native Method) at javax.security.auth.Subject.doAs(Subject.java:422) at org.apache.hadoop.security.Us erGroupInformation.doAs(UserGroupInformation.java:1628) 在org.apache.hadoop.mapred.JobClient.submitJobInternal(JobClient.java:557) 在org.apache.hadoop.mapred.JobClient.submitJob(JobClient.java:548) at org.apache.hadoop.hive.ql.exec.mr.ExecDriver.execute(ExecDriver.java:431) at org.apache.hadoop.hive.ql.exec.mr.MapRedTask.execute(MapRedTask.java: 137) at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:160) at org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:88) at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:1653) at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:1412) at org.apache。 hadoop.hive.ql.Driver.runInternal(Driver.java:1195) at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1059) at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1049) at org.apache。 hadoop.hive.cli.CliDriver.processLocalCmd(CliDriver.java:213) at org.apache.hadoop.hive.cli.CliDriver.processCmd(CliDriver.java:165) at org.apache.hadoop.hive.cli。 CliDriver.processLine(CliDriver.java:376) at org.apache.hadoop.hive.cli.CliDriver.executeDriver(CliDriver.java:736) at org.apache.hadoop.hive.cli.CliDriver.run(CliDriver。的java:681) 在org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:621) 在sun.reflect.NativeMethodAccessorImpl.invoke0(本机方法) 在sun.reflect.NativeMethodAccessorImpl。在org.apache.hadoop处的java.lang.reflect.Method.invoke(Method.java:498) 处调用(NativeMethodAccessorImpl.java:62) (在DelegatingMethodAccessorImpl.java:43) 。 (org.apache.hadoop.util.RunJar.main(RunJar.java:136) 作业提交失败,出现异常' (在任何本地目录中没有可用空间)' FAILED:执行错误,来自org.apache.hadoop.hive.ql.exec.mr.MapRedTask的返回代码1
它说:“任何本地目录中没有空间可用”。你还需要什么? – gudok