jazz 寫:
我猜,錯誤訊息應該是 output 目錄已經存在。不信您跑一次,貼錯誤訊息上來看看

- Jazz
JAZZ我並沒有錯誤訊息,他直接就跑完了,但是沒有進去迴圈
我必須要改寫成實現
public int run(String[] args) throws Exception {
int count = 1;
System.out.println(count);
count++;
Configuration conf = new Configuration();
conf.set("fs.default.name", "hdfs://localhost:9000");
conf.set("mapred.job.tracker", "localhost:9001");
FileSystem hdfs = FileSystem.get(conf);
Job job = new Job(conf, "aa");
job.setJarByClass(BFStest.class);
job.setJobName("BFStest");
job.setNumReduceTasks(1);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(Text.class);
job.setMapperClass(Map.class);
// job.setCombinerClass(Combine.class); 開偽分散會出錯 WHY!?
job.setReducerClass(Reduce.class);
if (hdfs.exists(new Path(outPutMst))) {
hdfs.delete(new Path(outPutMst), true);
System.out.println("檔案已經存在");
} else {
System.out.println("檔案不存在");
}
job.setInputFormatClass(KeyValueTextInputFormat.class);
job.setOutputFormatClass(TextOutputFormat.class);
FileInputFormat.addInputPath(job, new Path(inputMst));
FileOutputFormat.setOutputPath(job, new Path(outPutMst));
boolean success = job.waitForCompletion(true);
return success ? 0 : 1;
}
public static void main(String[] args) throws Exception {
int i = 0;
while (!stop) {
i++;
stop = true;
int ret = ToolRunner.run(new BFStest(), args);
inputMst = outPutMst.toString();
outPutMst = outPutMst.substring(0, outPutMst.length() - 1) + i;
}
System.out.println("wordcount@@");
System.exit(i);
}
他這樣才會跑迴圈...
可是之前的例子我在單機上是沒問題的,會跑出迴圈,可是在偽分散式就跑不出迴圈...
以下是原程式 單機下沒問題的package org.windoop;
import java.io.IOException;
import java.util.*;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.*;
import org.apache.hadoop.mapred.*;
import org.aspectj.weaver.IUnwovenClassFile;
public class BFS {
static String input = "hdfs://localhost:9000/user/root/input/inputBFS";
static String output="hdfs://localhost:9000/user/root/output/";
public static boolean stop = false;
public static int number = 0;// 設定跑幾次就停止 避免找不到路徑一直找
public static class Map extends MapReduceBase implements
Mapper<Text, Text, Text, Text> {
public void map(Text key, Text value,
OutputCollector<Text, Text> output, Reporter reporter)
throws IOException {
}
public static class Reduce extends MapReduceBase implements
Reducer<Text, Text, Text, Text> {
public void reduce(Text key, Iterator<Text> values,
OutputCollector<Text, Text> output, Reporter reporter)
throws IOException {}
public static void main(String[] args) throws Exception {
int i = 0;
while (!stop) {
System.out.println("count="+i);
i++;
stop = true;
JobConf conf = new JobConf();
conf.setOutputKeyClass(Text.class);
conf.setOutputValueClass(Text.class);
conf.setMapperClass(Map.class);
conf.setReducerClass(Reduce.class);
conf.setInputFormat(KeyValueTextInputFormat.class);
conf.setOutputFormat(TextOutputFormat.class);
FileInputFormat.setInputPaths(conf, new Path(input));
FileOutputFormat.setOutputPath(conf, new Path(output));
input=output.toString();
output=output.substring(0,output.length()-1)+i;
JobClient.runJob(conf);
}
}
}
偽分散式下得 只跑出一個,紅色是有修改得地方,只改變路徑,卻無法跑出迴圈
package org.windoop;
import java.io.IOException;
import java.util.*;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.*;
import org.apache.hadoop.mapred.*;
import org.aspectj.weaver.IUnwovenClassFile;
public class BFS {
static String input = "inputBFS";
static String output="output"; public static boolean stop = false;
public static int number = 0;// 設定跑幾次就停止 避免找不到路徑一直找
public static class Map extends MapReduceBase implements
Mapper<Text, Text, Text, Text> {
public void map(Text key, Text value,
OutputCollector<Text, Text> output, Reporter reporter)
throws IOException {
}
}
public static class Reduce extends MapReduceBase implements
Reducer<Text, Text, Text, Text> {
public void reduce(Text key, Iterator<Text> values,
OutputCollector<Text, Text> output, Reporter reporter)
throws IOException {
}
public static void main(String[] args) throws Exception {
int i = 0;
while (!stop) {
System.out.println("count="+i);
i++;
stop = true;
JobConf conf = new JobConf();
conf.set("fs.default.name", "hdfs://localhost:9000");
conf.set("mapred.job.tracker", "localhost:9001"); conf.setOutputKeyClass(Text.class);
conf.setOutputValueClass(Text.class);
conf.setMapperClass(Map.class);
conf.setReducerClass(Reduce.class);
conf.setInputFormat(KeyValueTextInputFormat.class);
conf.setOutputFormat(TextOutputFormat.class);
FileInputFormat.setInputPaths(conf, new Path(input));
FileOutputFormat.setOutputPath(conf, new Path(output));
input=output.toString();
output=output.substring(0,output.length()-1)+i;
JobClient.runJob(conf);
}
}
}