Null Pointer Exception - Eclipse remote Hadoop

151 views
Skip to first unread message

Mehdi SFAR

unread,
Jul 12, 2015, 10:36:37 AM7/12/15
to hadoop-learner-tutori...@googlegroups.com


Hi,

I don't know if this is the good place to ask this kind of question, if not please let me know, and I will delete the post :

I have been seaching for days now, Please HELP

I have a nullpointerexception when executing a simple Wordcount for Hadoop (I have installed a cluster of VMs Ubuntu 14 of hadoop and run eclipse LOCALY)

java.lang.NullPointerException
    at java.lang.ProcessBuilder.start(Unknown Source)
    at org.apache.hadoop.util.Shell.runCommand(Shell.java:482)
    at org.apache.hadoop.util.Shell.run(Shell.java:455)
    at org.apache.hadoop.util.Shell$ShellCommandExecutor.execute(Shell.java:715)
    at org.apache.hadoop.util.Shell.execCommand(Shell.java:808)
    at org.apache.hadoop.util.Shell.execCommand(Shell.java:791)
    at org.apache.hadoop.fs.RawLocalFileSystem.setPermission(RawLocalFileSystem.java:656)
    at org.apache.hadoop.fs.RawLocalFileSystem.mkdirs(RawLocalFileSystem.java:444)
    at org.apache.hadoop.fs.FilterFileSystem.mkdirs(FilterFileSystem.java:293)
    at org.apache.hadoop.mapreduce.JobSubmissionFiles.getStagingDir(JobSubmissionFiles.java:133)
    at org.apache.hadoop.mapreduce.JobSubmitter.submitJobInternal(JobSubmitter.java:437)
    at org.apache.hadoop.mapreduce.Job$10.run(Job.java:1296)
    at org.apache.hadoop.mapreduce.Job$10.run(Job.java:1293)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Unknown Source)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1628)
    at org.apache.hadoop.mapreduce.Job.submit(Job.java:1293)
    at org.apache.hadoop.mapred.JobClient$1.run(JobClient.java:562)
    at org.apache.hadoop.mapred.JobClient$1.run(JobClient.java:557)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Unknown Source)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1628)
    at org.apache.hadoop.mapred.JobClient.submitJobInternal(JobClient.java:557)
    at org.apache.hadoop.mapred.JobClient.submitJob(JobClient.java:548)
    at org.apache.hadoop.mapred.JobClient.runJob(JobClient.java:833)
    at WordCount.main(WordCount.java:76)

This is my code :

import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;

import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.FileInputFormat;
import org.apache.hadoop.mapred.FileOutputFormat;
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
//import org.apache.log4j.Logger;


public class WordCount {

  public static void main(String[] args) throws IOException {


    JobConf conf = new JobConf(WordCount.class);

    System.setProperty("HADOOP_USER_NAME", "hduser");
    // specify output types
    conf.setOutputKeyClass(Text.class);
    conf.setOutputValueClass(IntWritable.class);
    conf.set("hadoop.job.ugi", "hduser");
    conf.set("fs.defaultFS", "hdfs://10.99.99.2:54310/user/hduser");
    //log4j.logger.org.apache.hadoop = DEBUG
    conf.set("mapred.job.tracker", "10.99.99.2:54311");




    // specify input and output dirs
    FileInputFormat.addInputPath(conf, new Path("input"));  //hdfs://10.99.99.2:54310/user/hduser/input/Good.txt
    FileOutputFormat.setOutputPath(conf, new Path("output"));  

    // specify a mapper
    conf.setMapperClass(WordCountMapper.class);

    // specify a reducer
    conf.setReducerClass(WordCountReducer.class);
    conf.setCombinerClass(WordCountReducer.class);



    FileSystem fs = FileSystem.get(conf);


    // CREATE FILE AND PRINT PATH TO CHECK IS EVERYTHING IS OK
    fs.createNewFile(new Path("/user/hduser/test"));

    FileStatus[] status = fs.listStatus(new Path("/user/hduser"));
    for(int i=0;i<status.length;i++){
        System.out.println(status[i].getPath());

    }


    // READ TXT TO BE SURE THERE IS NOT PERMISSION PROBLEM
    Path pt= new Path("hdfs://10.99.99.2:54310/user/hduser/input/Good.txt");
    BufferedReader br=new BufferedReader(new InputStreamReader(fs.open(pt)));
    String line;
    line=br.readLine();
    while (line != null){
            System.out.println(line);
            line=br.readLine();  }

    JobClient client = new JobClient(conf);
    client.setConf(conf);


    try {
      JobClient.runJob(conf);
    } catch (Exception e) {
      e.printStackTrace();
    }
  }
}

P Kumar

unread,
Jul 20, 2015, 10:01:01 AM7/20/15
to hadoop-learner-tutori...@googlegroups.com
Can you change the for loop condition to <=

for(int i=0;i<status.length;i++){  to for(int i=0;i<=status.length;i++){

also can you give the command for execution 
Reply all
Reply to author
Forward
0 new messages