我的在hive中传的类型是hive中的数组
解决方案 »
- openstack安装后部署虚拟机
- 云计算
- spark as a service :混入SparkJob 特质 SparkJobValidation报错
- spark-streaming配置的小问题
- 我windows 2012 服务器上装有一个虚拟机.但是鼠标不听使唤.有什么解决办法
- Spark运行卡住,求助一下各位大神
- glance同步数据库时
- Docker 里怎么配置虚拟域名
- spark org.apache.spark.SparkException: Task not serializable 报错求助
- VMvare6.7 内存分配的问题
- 请问spark submit 怎么指定jar包路径?
- 关于docker启动容器的问题,求解答,给你跪下了!
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFUtils;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
import org.apache.hive.pdk.HivePdkUnitTest;
import org.apache.hive.pdk.HivePdkUnitTests;/**
* GenericUDF Class for SQL construct "greatest(value1, value2, value3, ....)".
* Oracle's CREATEST returns the greatest of the list of one or more expressions.
*
*/
@Description(name = "greatest", value = "_FUNC_(value1, value2, value3, ....) " +
"- Returns the greatest value in the list.",
extended = "Example:\n" + " > SELECT _FUNC_(2, 5, 12, 3) FROM src;\n 12")
@HivePdkUnitTests(setup = "create table dual_data (i int); "
+ "insert overwrite table dual_data select 1 from dual limit 1;",
cleanup = "drop table if exists dual_data;",
cases = {
@HivePdkUnitTest(query = "SELECT nexr_greatest(2, 5, 12, 3) " +
"FROM dual_data;", result = "12"),
@HivePdkUnitTest(query = "SELECT nexr_greatest('2', '5', '12', '3') " +
"FROM dual_data;", result = "5"),
@HivePdkUnitTest(query = "SELECT nexr_greatest('apples', 'oranges', 'bananas') " +
"FROM dual_data;", result = "oranges") })
public class GenericUDFGreatest extends GenericUDF { private ObjectInspector[] argumentOIs;
private GenericUDFUtils.ReturnObjectInspectorResolver returnOIResolver; @Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException { for (int i = 0; i < arguments.length; i++) {
if (arguments[i].getCategory() != ObjectInspector.Category.PRIMITIVE) {
throw new UDFArgumentTypeException(i, "Only primitive type arguments are accepted but "
+ arguments[i].getTypeName() + " is passed.");
}
} argumentOIs = arguments;
returnOIResolver = new GenericUDFUtils.ReturnObjectInspectorResolver(true);
for (int i = 0; i < arguments.length; i++) {
if (!returnOIResolver.update(arguments[i])) {
throw new UDFArgumentTypeException(i, "The value of return should have the same type: \""
+ returnOIResolver.get().getTypeName() +
"\" is expected but \"" + arguments[i].getTypeName()
+ "\" is found");
}
} return returnOIResolver.get();
} @Override
public Object evaluate(DeferredObject[] fields) throws HiveException {
Object greatestObject = null;
ObjectInspector greatestOI = null; for (int i = 0; i < fields.length; i++) {
Object fieldObject = fields[i].get();
if (greatestObject == null) {
greatestObject = fieldObject;
greatestOI = argumentOIs[i];
continue;
} if (ObjectInspectorUtils.compare(greatestObject, greatestOI, fieldObject, argumentOIs[i]) <= 0) {
greatestObject = fieldObject;
greatestOI = argumentOIs[i];
}
} return returnOIResolver.convertIfNecessary(greatestObject, greatestOI);
} @Override
public String getDisplayString(String[] children) {
StringBuilder sb = new StringBuilder();
sb.append("greatest (");
for (int i = 0; i < children.length - 1; i++) {
sb.append(children[i]).append(", ");
}
sb.append(children[children.length - 1]).append(")");
return sb.toString();
}}