java - 空指针异常-Eclipse远程Hadoop

标签 java eclipse hadoop hdfs

我已经忙了好几天了,请帮助

在为Hadoop执行一个简单的Wordcount时,我有一个nullpointerexception(我已经安装了hadoop的VMs Ubuntu 14的VM集群并运行eclipse LOCALY)

java.lang.NullPointerException
    at java.lang.ProcessBuilder.start(Unknown Source)
    at org.apache.hadoop.util.Shell.runCommand(Shell.java:482)
    at org.apache.hadoop.util.Shell.run(Shell.java:455)
    at org.apache.hadoop.util.Shell$ShellCommandExecutor.execute(Shell.java:715)
    at org.apache.hadoop.util.Shell.execCommand(Shell.java:808)
    at org.apache.hadoop.util.Shell.execCommand(Shell.java:791)
    at org.apache.hadoop.fs.RawLocalFileSystem.setPermission(RawLocalFileSystem.java:656)
    at org.apache.hadoop.fs.RawLocalFileSystem.mkdirs(RawLocalFileSystem.java:444)
    at org.apache.hadoop.fs.FilterFileSystem.mkdirs(FilterFileSystem.java:293)
    at org.apache.hadoop.mapreduce.JobSubmissionFiles.getStagingDir(JobSubmissionFiles.java:133)
    at org.apache.hadoop.mapreduce.JobSubmitter.submitJobInternal(JobSubmitter.java:437)
    at org.apache.hadoop.mapreduce.Job$10.run(Job.java:1296)
    at org.apache.hadoop.mapreduce.Job$10.run(Job.java:1293)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Unknown Source)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1628)
    at org.apache.hadoop.mapreduce.Job.submit(Job.java:1293)
    at org.apache.hadoop.mapred.JobClient$1.run(JobClient.java:562)
    at org.apache.hadoop.mapred.JobClient$1.run(JobClient.java:557)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Unknown Source)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1628)
    at org.apache.hadoop.mapred.JobClient.submitJobInternal(JobClient.java:557)
    at org.apache.hadoop.mapred.JobClient.submitJob(JobClient.java:548)
    at org.apache.hadoop.mapred.JobClient.runJob(JobClient.java:833)
    at WordCount.main(WordCount.java:76)

这是我的代码:
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;

import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.FileInputFormat;
import org.apache.hadoop.mapred.FileOutputFormat;
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
//import org.apache.log4j.Logger;


public class WordCount {

  public static void main(String[] args) throws IOException {


    JobConf conf = new JobConf(WordCount.class);

    System.setProperty("HADOOP_USER_NAME", "hduser");
    // specify output types
    conf.setOutputKeyClass(Text.class);
    conf.setOutputValueClass(IntWritable.class);
    conf.set("hadoop.job.ugi", "hduser");
    conf.set("fs.defaultFS", "hdfs://10.99.99.2:54310/user/hduser");
    //log4j.logger.org.apache.hadoop = DEBUG
    conf.set("mapred.job.tracker", "10.99.99.2:54311");




    // specify input and output dirs
    FileInputFormat.addInputPath(conf, new Path("input"));  //hdfs://10.99.99.2:54310/user/hduser/input/Good.txt
    FileOutputFormat.setOutputPath(conf, new Path("output"));  

    // specify a mapper
    conf.setMapperClass(WordCountMapper.class);

    // specify a reducer
    conf.setReducerClass(WordCountReducer.class);
    conf.setCombinerClass(WordCountReducer.class);



    FileSystem fs = FileSystem.get(conf);


    // CREATE FILE AND PRINT PATH TO CHECK IS EVERYTHING IS OK
    fs.createNewFile(new Path("/user/hduser/test"));

    FileStatus[] status = fs.listStatus(new Path("/user/hduser"));
    for(int i=0;i<status.length;i++){
        System.out.println(status[i].getPath());

    }


    // READ TXT TO BE SURE THERE IS NOT PERMISSION PROBLEM
    Path pt= new Path("hdfs://10.99.99.2:54310/user/hduser/input/Good.txt");
    BufferedReader br=new BufferedReader(new InputStreamReader(fs.open(pt)));
    String line;
    line=br.readLine();
    while (line != null){
            System.out.println(line);
            line=br.readLine();  }

    JobClient client = new JobClient(conf);
    client.setConf(conf);


    try {
      JobClient.runJob(conf);
    } catch (Exception e) {
      e.printStackTrace();
    }
  }
}

最佳答案

代码看起来不错!请确保您给出的命令准确无误,以回复JAR文件,还可以进行一些测试并调试它们,以了解空指针异常的来源。

关于java - 空指针异常-Eclipse远程Hadoop,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/31360013/

相关文章:

java - TextView没有显示结果

java - 应用服务器崩溃的主要原因

java - Eclipse 无法正确导出资源

eclipse - "Workspace in use or cannot be created, chose a different one."

带有 Bloomberg API 的 Python 2.7 导入 blpapi 失败

functional-programming - 原始函数式语言中的 MapReduce 的可扩展性如何?

hadoop - HBase如何使用ZooKeeper?

java - 使用 Intellij IDEA 在 tomcat 中重新加载 primefaces 文件

java - 如何在 Java 中使用 Mongodb 连接池

hadoop - pig 用线程运行