txt文件在linux环境下打开呈现了乱码状态。
解决方法1:在linux用iconv命令,如乱码文件名为zhongwen.txt,那么在终端输入如下命令:
iconv -f gbk -t utf8 zhongwen.txt > zhongwen.txt.utf8
如果eclipse打开后仍是乱码,则需进入preferrence,修改默认编码格式为utf-8
解决方法2:如果需要批量的将gbk文件转成utf8文件,则需要编写如下java程序:
package classifier;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.KeyValueTextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.mahout.classifier.bayes.BayesParameters;
public class ClassifierDriver {
public static void main(String[] args) throws Exception {
// set bayes parameter
BayesParameters params = new BayesParameters();
params.setBasePath(args[2]);
params.set("classifierType", args[3]);
params.set("alpha_i", "1.0");
params.set("defaultCat", "unknown");
params.setGramSize(1);
// set configuration
Configuration conf = new Configuration();
conf.set("bayes.parameters", params.toString());
// create job
Job job = new Job(conf,"Classifier");
job.setJarByClass(ClassifierDriver.class);
// specify input format
job.setInputFormatClass(KeyValueTextInputFormat.class);
// specify mapper & reducer
job.setMapperClass(classifier.ClassifierMapper.class);
job.setReducerClass(ClassifierReducer.class);
// specify output types of mapper and reducer
job.setOutputKeyClass(NullWritable.class);
job.setOutputValueClass(Text.class);
job.setMapOutputKeyClass(Text.class);
job.setMapOutputValueClass(IntWritable.class);
// specify input and output DIRECTORIES
Path inPath = new Path(args[0]);
Path outPath = new Path(args[1]);
FileInputFormat.addInputPath(job, inPath);
FileOutputFormat.setOutputPath(job,outPath); // output path
// delete output directory
try{
FileSystem hdfs = outPath.getFileSystem(conf);
if(hdfs.exists(outPath))
hdfs.delete(outPath);
hdfs.close();
} catch (Exception e){
e.printStackTrace();
return ;
}
// run the job
System.exit(job.waitForCompletion(true) ? 0 : 1);
}
}