gpt4 book ai didi

java.lang.ClassCastException : cannot be cast to java. lang.Object

转载 作者:可可西里 更新时间:2023-11-01 14:22:29 29 4
gpt4 key购买 nike

我正在尝试根据自己的需要编写自定义 serde,但卡在了获取类转换异常的位置。

输入数据是:

john,miller

我想将此数据作为 fname string,lname string 插入到配置单元中,为此我编写了一个 customserde。

我只实现了SerDe接口(interface)的反序列化方法如下:

    package com.datametica.serde;

import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Properties;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.serde.Constants;
import org.apache.hadoop.hive.serde2.SerDe;
import org.apache.hadoop.hive.serde2.SerDeException;
import org.apache.hadoop.hive.serde2.SerDeStats;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;

public class CustomSerde implements SerDe {

int numColumns;
StructObjectInspector rowOI;
List<String> columnNames;
List<Object> rows;
List<TypeInfo> columnTypes;

@Override
public void initialize(Configuration conf, Properties tblProps)
throws SerDeException {
String columnNameProperty = tblProps
.getProperty(Constants.LIST_COLUMNS);
columnNames = Arrays.asList(columnNameProperty.split(","));

String columnTypeProperty = tblProps
.getProperty(Constants.LIST_COLUMN_TYPES);
columnTypes = TypeInfoUtils
.getTypeInfosFromTypeString(columnTypeProperty);
numColumns = columnNames.size();

List<ObjectInspector> columnOIs = new ArrayList<ObjectInspector>(
columnNames.size());
ObjectInspector oi;

for (int c = 0; c < numColumns; c++) {
oi = TypeInfoUtils
.getStandardJavaObjectInspectorFromTypeInfo(columnTypes
.get(c));
columnOIs.add(oi);
}

/*
* for (int c = 0; c < numColumns; c++) { rows.add(); }
*/

rowOI = ObjectInspectorFactory.getStandardStructObjectInspector(
columnNames, columnOIs);

}

@Override
public CustomDataFormat deserialize(Writable record) throws SerDeException {
Text text = (Text) record;
String[] valArray = text.toString().split(",");
System.out.println("----------------------------\n");
System.out.println("yo yo yo "+text.toString() + "\n");
System.out.println("----------------------------\n");
CustomDataFormat dataObject = new CustomDataFormat();
dataObject.setFname(valArray[0]);
dataObject.setLname(valArray[1]);
return dataObject;
}

@Override
public ObjectInspector getObjectInspector() throws SerDeException {
return rowOI;
}

@Override
public SerDeStats getSerDeStats() {
return null;
}

@Override
public Class<? extends Writable> getSerializedClass() {
return null;
}

@Override
public Writable serialize(Object arg0, ObjectInspector arg1)
throws SerDeException {
return null;
}

}

保存数据的类

package com.datametica.serde;

import java.util.ArrayList;
import java.util.List;

public class CustomDataFormat {
String fname;

String lname;

/*List<LevelOneStruct> arrayOfLevelTwoStruct = new ArrayList<LevelOneStruct>();

public List<LevelOneStruct> getArrayOfLevelTwoStruct() {
return arrayOfLevelTwoStruct;
}

public void setArrayOfLevelTwoStruct(
List<LevelOneStruct> arrayOfLevelTwoStruct) {
this.arrayOfLevelTwoStruct = arrayOfLevelTwoStruct;
}*/

public String getFname() {
return fname;
}

public void setFname(String fname) {
this.fname = fname;
}

public String getLname() {
return lname;
}

public void setLname(String lname) {
this.lname = lname;
}

}

CustomDataFormat 类的 ObjectInspector

package com.datametica.serde;

import java.util.ArrayList;
import java.util.List;

import org.apache.hadoop.hive.serde2.objectinspector.StandardStructObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.StructField;

public class CustomStructObjectInspector extends StandardStructObjectInspector {

@Override
public Object getStructFieldData(Object data, StructField fieldRef) {
Object dataToReturn = new Object();
CustomDataFormat customSerde = (CustomDataFormat) data;
switch (fieldRef.getFieldName()) {
case "fname":
dataToReturn = customSerde.getFname();
break;
/*
* case "arrayOfLevelTwoStruct": dataToReturn =
* customSerde.getArrayOfLevelTwoStruct(); break;
*/
case "lname":
dataToReturn = customSerde.getLname();
break;
default:
dataToReturn = null;
}
return dataToReturn;
}

@Override
public List<Object> getStructFieldsDataAsList(Object data) {
List<Object> listOfData = new ArrayList<Object>();
CustomDataFormat customSerde = (CustomDataFormat) data;
listOfData.add(customSerde.getFname());
listOfData.add(customSerde.getLname());
return listOfData;
}

}

创建 jar 后,我正在创建配置单元表作为

create table customserde (fname string,lname string) row format serde 'com.datametica.serde.CustomSerde';

并将数据加载到表中

load data inpath '/user/dm3/tables_data/customserde' into table customserde;

到目前为止一切都很好但是当我在表上选择操作时

select * from customserde;

获取异常

Caused by: java.lang.ClassCastException: com.datametica.serde.CustomDataFormat cannot be cast to [Ljava.lang.Object;

感谢任何帮助我完全陷入了这一点

提前致谢。

最佳答案

我发现我的错误 deserialize() 方法没有返回 customdataformat.class 的对象,但是 row 的对象表示 arrayList,如下所示

public Object deserialize(Writable record) throws SerDeException {
Text text = (Text) record;
String[] valArray = text.toString().split(",");
CustomDataFormat dataObject = new CustomDataFormat();
dataObject.setFname(valArray[0]);
dataObject.setLname(valArray[1]);

rows.set(0, dataObject.getFname());
rows.set(1, dataObject.getLname());
return rows;
}

关于java.lang.ClassCastException : cannot be cast to java. lang.Object,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/28276525/

29 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com