seaching depuis plusieurs jours, S'il vous plaît AIDERexception de pointeur nul - J'ai Eclipse Hadoop à distance
J'ai un NullPointerException lors de l'exécution d'un simple Wordcount pour Hadoop (j'ai installé un cluster de machines virtuelles Ubuntu 14 de Hadoop et d'Eclipse localY)
java.lang.NullPointerException
at java.lang.ProcessBuilder.start(Unknown Source)
at org.apache.hadoop.util.Shell.runCommand(Shell.java:482)
at org.apache.hadoop.util.Shell.run(Shell.java:455)
at org.apache.hadoop.util.Shell$ShellCommandExecutor.execute(Shell.java:715)
at org.apache.hadoop.util.Shell.execCommand(Shell.java:808)
at org.apache.hadoop.util.Shell.execCommand(Shell.java:791)
at org.apache.hadoop.fs.RawLocalFileSystem.setPermission(RawLocalFileSystem.java:656)
at org.apache.hadoop.fs.RawLocalFileSystem.mkdirs(RawLocalFileSystem.java:444)
at org.apache.hadoop.fs.FilterFileSystem.mkdirs(FilterFileSystem.java:293)
at org.apache.hadoop.mapreduce.JobSubmissionFiles.getStagingDir(JobSubmissionFiles.java:133)
at org.apache.hadoop.mapreduce.JobSubmitter.submitJobInternal(JobSubmitter.java:437)
at org.apache.hadoop.mapreduce.Job$10.run(Job.java:1296)
at org.apache.hadoop.mapreduce.Job$10.run(Job.java:1293)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Unknown Source)
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1628)
at org.apache.hadoop.mapreduce.Job.submit(Job.java:1293)
at org.apache.hadoop.mapred.JobClient$1.run(JobClient.java:562)
at org.apache.hadoop.mapred.JobClient$1.run(JobClient.java:557)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Unknown Source)
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1628)
at org.apache.hadoop.mapred.JobClient.submitJobInternal(JobClient.java:557)
at org.apache.hadoop.mapred.JobClient.submitJob(JobClient.java:548)
at org.apache.hadoop.mapred.JobClient.runJob(JobClient.java:833)
at WordCount.main(WordCount.java:76)
Ceci est mon code:
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.FileInputFormat;
import org.apache.hadoop.mapred.FileOutputFormat;
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
//import org.apache.log4j.Logger;
public class WordCount {
public static void main(String[] args) throws IOException {
JobConf conf = new JobConf(WordCount.class);
System.setProperty("HADOOP_USER_NAME", "hduser");
// specify output types
conf.setOutputKeyClass(Text.class);
conf.setOutputValueClass(IntWritable.class);
conf.set("hadoop.job.ugi", "hduser");
conf.set("fs.defaultFS", "hdfs://10.99.99.2:54310/user/hduser");
//log4j.logger.org.apache.hadoop = DEBUG
conf.set("mapred.job.tracker", "10.99.99.2:54311");
// specify input and output dirs
FileInputFormat.addInputPath(conf, new Path("input")); //hdfs://10.99.99.2:54310/user/hduser/input/Good.txt
FileOutputFormat.setOutputPath(conf, new Path("output"));
// specify a mapper
conf.setMapperClass(WordCountMapper.class);
// specify a reducer
conf.setReducerClass(WordCountReducer.class);
conf.setCombinerClass(WordCountReducer.class);
FileSystem fs = FileSystem.get(conf);
// CREATE FILE AND PRINT PATH TO CHECK IS EVERYTHING IS OK
fs.createNewFile(new Path("/user/hduser/test"));
FileStatus[] status = fs.listStatus(new Path("/user/hduser"));
for(int i=0;i<status.length;i++){
System.out.println(status[i].getPath());
}
// READ TXT TO BE SURE THERE IS NOT PERMISSION PROBLEM
Path pt= new Path("hdfs://10.99.99.2:54310/user/hduser/input/Good.txt");
BufferedReader br=new BufferedReader(new InputStreamReader(fs.open(pt)));
String line;
line=br.readLine();
while (line != null){
System.out.println(line);
line=br.readLine(); }
JobClient client = new JobClient(conf);
client.setConf(conf);
try {
JobClient.runJob(conf);
} catch (Exception e) {
e.printStackTrace();
}
}
}
Cher Rachit, que voulez-vous dire par "commande précise pour répondre au fichier JAR"? – NoNull
Je voulais dire Déployer *. Je suggère de programmer des tests pour le code et de les déboguer avec quelques entrées. –