package com.Main;
import java.io.BufferedInputStream;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.URI;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
public class Main {
public static void main(String[] args) throws IOException {
//Source file in the local file system
String localSrc = args[0];
//Destination file in HDFS
String dst = args[1];
//Input stream for the file in local file system to be written to HDFS
InputStream in = new BufferedInputStream(new FileInputStream(localSrc));
//Get configimport org.apache.commons.configuration.Configuration;uration of Hadoop system
Configuration conf = new Configuration();
System.out.println("Connecting to -- "+conf.get("fs.defaultFS"));
//Destination file in HDFS
FileSystem fs = FileSystem.get(URI.create(dst), conf);
OutputStream out = fs.create(new Path(dst));
//Copy file from local to HDFS
IOUtils.copyBytes(in, out, 4096, true);
System.out.println(dst + " copied to HDFS");
}
}
AM得到以下错误消息“异常在线程 ”主要“ java.lang.ArrayIndexOutOfBoundsException:0 在com.Main.Main.main(Main.java:22) “复制的Json从本地平面文件到HDFS
我JSON文件在我的地方,必须移动,在HDFS 例: {” 删除 “:” Ef77xvP “ ”时间“:1509073785106}, { ”删除“:” 2YXsF7r “,”time“:1509073795109}
你是如何运行这个?你在运行时提供JAR参数吗? – philantrovert
'hdfs dfs -put file.json' ...这不是那么难 –