4
我有一个包含需要上载到Hive表的数据的文件。我写了一个自定义的SerDe(这基本上是对Hive现有的Regex Serde的修改),以帮助我上传数据。尝试使用自定义SerDe创建Hive表时发生错误
这是我写的
package my.hive.customserde;
public class FIASC2 extends AbstractSerDe {
public static final Log LOG = LogFactory.getLog(FIASC2.class.getName());
int colwidths[] = {1, 10, 6, 12, 8, 14, 16, 6, 6, 2, 10, 10, 19, 2, 2, 6, 8, 1};
String outputformat = "%1$s %2$s %3$s %4$s %5$s %6$s %7$s %8$s %9$s %10$s %11$s %12$s %13$s %14$s %15$s "
+ "%16$s %17$s %18$s";
int datetimecols[] = {5};
int datecols[] = {17};
String cols;
int numColumns;
int totalcolwidth = 0;
List<String> columnNames;
List<TypeInfo> columnTypes;
ArrayList<String> row;
StructObjectInspector rowOI;
Object[] outputFields;
Text outputRowText;
@Override
public void initialize(Configuration conf, Properties tbl) throws SerDeException {
LOG.debug("Initializing SerDe");
// Get column names
String columnNameProperty = tbl.getProperty(serdeConstants.LIST_COLUMNS);
String columnTypeProperty = tbl.getProperty(serdeConstants.LIST_COLUMN_TYPES);
LOG.debug("Columns : " + columnNameProperty + "Types : " + columnTypeProperty);
if(columnNameProperty.length() == 0) {
columnNames = new ArrayList<String>();
}
else {
columnNames = Arrays.asList(columnNameProperty.split(","));
}
columnTypes = TypeInfoUtils.getTypeInfosFromTypeString(columnTypeProperty);
assert columnNames.size() == columnTypes.size();
assert colwidths.length == columnNames.size();
numColumns = columnNames.size();
for(int i = 0; i < numColumns; i++) {
totalcolwidth += i;
}
List<ObjectInspector> columnOIs = new ArrayList<ObjectInspector>(columnNames.size());
for (int i = 0; i < numColumns; i++) {
columnOIs.add(PrimitiveObjectInspectorFactory.javaStringObjectInspector);
}
rowOI = ObjectInspectorFactory.getStandardStructObjectInspector(columnNames, columnOIs);
row = new ArrayList<String>(numColumns);
for(int i = 0; i < numColumns; i++) {
row.add(null);
}
outputFields = new Object[numColumns];
outputRowText = new Text();
}
@Override
public Object deserialize(Writable blob) throws SerDeException {
// TODO Auto-generated method stub
Text rowText = (Text) blob;
int index = 0;
if(rowText.toString().length() < totalcolwidth) {
return null;
}
if((rowText.toString().substring(0, 1) == "H") || (rowText.toString().substring(0, 1) == "T")) {
return null;
}
for(int i = 0; i < numColumns; i++) {
int len = colwidths[i];
String col = rowText.toString().substring(index, index + len);
// Convert the datetime string into the correct format so that it can be uploaded to the hive table
if(Arrays.asList(datetimecols).contains(i)) {
DateTimeFormatConverter dtc = new DateTimeFormatConverter();
try {
col = dtc.convertCurrToNew(col);
} catch (ParseException e) {
LOG.error("Unable to parse Date Time string : " + col);
e.printStackTrace();
}
}
if(Arrays.asList(datecols).contains(i)) {
DateFormatConverter dtc = new DateFormatConverter();
try {
col = dtc.convertCurrToNew(col);
} catch (ParseException e) {
LOG.error("Unable to parse Date String : " + col);
e.printStackTrace();
}
}
row.set(i, col);
index += len;
}
return row;
}
@Override
public ObjectInspector getObjectInspector() throws SerDeException {
return rowOI;
}
@Override
public SerDeStats getSerDeStats() {
// TODO Auto-generated method stub
return null;
}
@Override
public Class<? extends Writable> getSerializedClass() {
return Text.class;
}
@Override
public Writable serialize(Object obj, ObjectInspector objInspector) throws SerDeException {
if(outputformat == null) {
throw new SerDeException("Cannot write into table because no output format was specified");
}
StructObjectInspector outputRowOI = (StructObjectInspector) objInspector;
List<? extends StructField> outputFieldRefs = outputRowOI.getAllStructFieldRefs();
if(outputFieldRefs.size() != numColumns) {
throw new SerDeException("Output format does not have the same number fields as the number of columns");
}
for(int i = 0; i < numColumns; i++) {
Object field = outputRowOI.getStructFieldData(obj, outputFieldRefs.get(i));
ObjectInspector fieldOI = outputFieldRefs.get(i).getFieldObjectInspector();
StringObjectInspector fieldStringOI = (StringObjectInspector) fieldOI;
outputFields[i] = fieldStringOI.getPrimitiveJavaObject(field);
}
String outputRowString = null;
try {
outputRowString = String.format(outputformat, outputFields);
} catch (MissingFormatArgumentException e) {
throw new SerDeException("The table contains " + numColumns + "columns but the output format requires more", e);
}
outputRowText.set(outputRowString);
return outputRowText;
}
}
你可以放心,我已经进口的每一个需要导入类SERDE。
当我试图创建表,我得到一个错误说“无法从SERDE得到现场:my.hive.customserde.FIASC2”
这里是堆栈跟踪
2015-08-25 15:57:51,995 ERROR [HiveServer2-Background-Pool: Thread-57]: metadata.Table (Table.java:getCols(608)) - Unable to get field from serde: my.hive.customserde.FIASC2
java.lang.NullPointerException
at org.apache.hadoop.hive.metastore.MetaStoreUtils.getFieldsFromDeserializer(MetaStoreUtils.java:1257)
at org.apache.hadoop.hive.ql.metadata.Table.getCols(Table.java:605)
at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:694)
at org.apache.hadoop.hive.ql.exec.DDLTask.createTable(DDLTask.java:4135)
at org.apache.hadoop.hive.ql.exec.DDLTask.execute(DDLTask.java:306)
at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:160)
at org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:88)
at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:1653)
at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:1412)
at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:1195)
at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1059)
at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1054)
at org.apache.hive.service.cli.operation.SQLOperation.runQuery(SQLOperation.java:154)
at org.apache.hive.service.cli.operation.SQLOperation.access$100(SQLOperation.java:71)
at org.apache.hive.service.cli.operation.SQLOperation$1$1.run(SQLOperation.java:206)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:422)
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1657)
at org.apache.hive.service.cli.operation.SQLOperation$1.run(SQLOperation.java:218)
at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
at java.util.concurrent.FutureTask.run(FutureTask.java:266)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
at java.lang.Thread.run(Thread.java:745)
2015-08-25 15:57:51,996 ERROR [HiveServer2-Background-Pool: Thread-57]: exec.DDLTask (DDLTask.java:failed(520)) - org.apache.hadoop.hive.ql.metadata.HiveException: java.lang.NullPointerException
at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:720)
at org.apache.hadoop.hive.ql.exec.DDLTask.createTable(DDLTask.java:4135)
at org.apache.hadoop.hive.ql.exec.DDLTask.execute(DDLTask.java:306)
at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:160)
at org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:88)
at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:1653)
at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:1412)
at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:1195)
at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1059)
at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1054)
at org.apache.hive.service.cli.operation.SQLOperation.runQuery(SQLOperation.java:154)
at org.apache.hive.service.cli.operation.SQLOperation.access$100(SQLOperation.java:71)
at org.apache.hive.service.cli.operation.SQLOperation$1$1.run(SQLOperation.java:206)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:422)
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1657)
at org.apache.hive.service.cli.operation.SQLOperation$1.run(SQLOperation.java:218)
at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
at java.util.concurrent.FutureTask.run(FutureTask.java:266)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
at java.lang.Thread.run(Thread.java:745)
Caused by: java.lang.NullPointerException
at org.apache.hadoop.hive.metastore.MetaStoreUtils.getFieldsFromDeserializer(MetaStoreUtils.java:1257)
at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:695)
... 21 more
我明白表创建失败。但有谁知道为什么我得到这个错误?我尝试了谷歌搜索,但没有得到很多帮助。
如果它有任何帮助,这里是我正在使用的创建表脚本。
create table if not exists fiasc2(
record_type varchar(1),
fin_id varchar(16),
corp_id varchar(8),
merc_id varchar(16),
term_id varchar(8),
tran_time timestamp,
cashcard_number varchar(16),
ttc varchar(8),
tcc varchar(8),
tran_type varchar(2),
tran_amount varchar(16),
deposit_amount varchar(16),
pan varchar(32),
account_type varchar(2),
response_code varchar(2),
card_balance varchar(8),
settlement_date date,
tran_mode varchar(1))
row format serde 'my.hive.customserde.FIASC2'
location '/user/hive/fiasc2_test';