2017-03-16 84 views
1

我想在aphp凤凰运行一个自定义udf,但得到错误。请帮我弄清楚这个问题。凤凰udf不工作

以下是我的功能类:

package co.abc.phoenix.customudfs; 

import org.apache.hadoop.hbase.io.ImmutableBytesWritable; 
import org.apache.phoenix.expression.Expression; 
import org.apache.phoenix.expression.function.ScalarFunction; 
import org.apache.phoenix.parse.FunctionParseNode.Argument; 
import org.apache.phoenix.parse.FunctionParseNode.BuiltInFunction; 
import org.apache.phoenix.schema.tuple.Tuple; 
import org.apache.phoenix.schema.types.PDataType; 
import org.apache.phoenix.schema.types.PVarchar; 
import org.joda.time.format.DateTimeFormatter; 

import java.util.HashMap; 
import java.util.Map; 

import static java.lang.Long.parseLong; 
import static org.joda.time.format.DateTimeFormat.forPattern; 

@BuiltInFunction(name = EpochToDateFunction.NAME, args = { 
    @Argument(allowedTypes = {PVarchar.class}), @Argument(allowedTypes = {PVarchar.class})}) 
public class EpochToDateFunction extends ScalarFunction { 

    public static final String NAME = "EpochToDate"; 
    private static final Map<String, DateTimeFormatter> DATE_FORMATTERS = new HashMap<>(); 

    public String getName() { 
    return NAME; 
    } 

    public boolean evaluate(Tuple tuple, ImmutableBytesWritable ptr) { 
    Expression arg = getChildren().get(0); 
    if (!arg.evaluate(tuple, ptr)) return false; 
    String epochStr = new String(ptr.copyBytes()); 
    arg = getChildren().get(1); 
    if (!arg.evaluate(tuple, ptr)) return false; 
    String dfStr = new String(ptr.copyBytes()); 
    if (!DATE_FORMATTERS.containsKey(dfStr)) DATE_FORMATTERS.put(dfStr, forPattern(dfStr)); 
    String dateStr = DATE_FORMATTERS.get(dfStr).print(parseLong(epochStr)); 
    ptr.set(PVarchar.INSTANCE.toBytes(dateStr)); 
    return true; 
    } 

    public PDataType getDataType() { 
    return PVarchar.INSTANCE; 
    } 

} 

Maven的依赖

<dependency> 
    <groupId>org.apache.phoenix</groupId> 
    <artifactId>phoenix-core</artifactId> 
    <version>4.8.1-HBase-1.2</version> 
</dependency> 

HBase的-site.xml中

<configuration> 
    <property> 
    <name>phoenix.functions.allowUserDefinedFunctions</name> 
    <value>true</value> 
    </property> 
    <property> 
     <name>hbase.rootdir</name> 
     <value>hdfs://localhost:9000/hbase</value> 
    </property> 
    <property> 
      <name>hbase.dynamic.jars.dir</name> 
      <value>${hbase.rootdir}/lib</value> 
     </property> 
     <property> 
       <name>hbase.local.dir</name> 
       <value>${hbase.tmp.dir}/local/</value> 
     </property> 
</configuration> 

我添加了自定义的罐子在hbase.dynamic.jars.dir

$ ./bin/hadoop fs -ls /hbase/lib/ 
Found 1 items 
-rw-r--r-- 1 nj supergroup 79798208 2017-03-16 10:08 /hbase/lib/phoenix-custom-udfs-1.0-SNAPSHOT.jar 

创建和执行功能

0: jdbc:phoenix:localhost> CREATE FUNCTION EpochToDate(varchar, varchar) returns varchar as 'co.abc.phoenix.customudfs.EpochToDateFunction' using jar 'hdfs://localhost:9000/hbase/lib/phoenix-custom-udfs-1.0-SNAPSHOT.jar'; 
No rows affected (0.018 seconds) 

0: jdbc:phoenix:localhost> select epochtodate('1489637458000', 'yyyy'); 
Error: ERROR 6001 (42F01): Function undefined. functionName=EPOCHTODATE (state=42F01,code=6001) 
org.apache.phoenix.schema.FunctionNotFoundException: ERROR 6001 (42F01): Function undefined. functionName=EPOCHTODATE 
    at org.apache.phoenix.compile.FromCompiler$1.resolveFunction(FromCompiler.java:129) 
    at org.apache.phoenix.compile.ExpressionCompiler.visitLeave(ExpressionCompiler.java:313) 
    at org.apache.phoenix.compile.ProjectionCompiler$SelectClauseVisitor.visitLeave(ProjectionCompiler.java:688) 
    at org.apache.phoenix.compile.ProjectionCompiler$SelectClauseVisitor.visitLeave(ProjectionCompiler.java:584) 
    at org.apache.phoenix.parse.FunctionParseNode.accept(FunctionParseNode.java:86) 
    at org.apache.phoenix.compile.ProjectionCompiler.compile(ProjectionCompiler.java:416) 
    at org.apache.phoenix.compile.QueryCompiler.compileSingleFlatQuery(QueryCompiler.java:561) 
    at org.apache.phoenix.compile.QueryCompiler.compileSingleQuery(QueryCompiler.java:507) 
    at org.apache.phoenix.compile.QueryCompiler.compileSelect(QueryCompiler.java:202) 
    at org.apache.phoenix.compile.QueryCompiler.compile(QueryCompiler.java:157) 
    at org.apache.phoenix.jdbc.PhoenixStatement$ExecutableSelectStatement.compilePlan(PhoenixStatement.java:406) 
    at org.apache.phoenix.jdbc.PhoenixStatement$ExecutableSelectStatement.compilePlan(PhoenixStatement.java:380) 
    at org.apache.phoenix.jdbc.PhoenixStatement$1.call(PhoenixStatement.java:271) 
    at org.apache.phoenix.jdbc.PhoenixStatement$1.call(PhoenixStatement.java:266) 
    at org.apache.phoenix.call.CallRunner.run(CallRunner.java:53) 
    at org.apache.phoenix.jdbc.PhoenixStatement.executeQuery(PhoenixStatement.java:265) 
    at org.apache.phoenix.jdbc.PhoenixStatement.execute(PhoenixStatement.java:1446) 
    at sqlline.Commands.execute(Commands.java:822) 
    at sqlline.Commands.sql(Commands.java:732) 
    at sqlline.SqlLine.dispatch(SqlLine.java:807) 
    at sqlline.SqlLine.begin(SqlLine.java:681) 
    at sqlline.SqlLine.start(SqlLine.java:398) 
    at sqlline.SqlLine.main(SqlLine.java:292) 
0: jdbc:phoenix:localhost> 

有人可以帮助我,让我知道我在哪里丢失任何配置。

回答

2

我以前有过这个问题。

基本上你需要从表中选择一些行的UDF的工作(前提是你写你的UDF其余正常)

所以像

选择udffunc(1,1)将无法正常工作

选择udffunc(COL1,1)从表将

http://eyang3.github.io/2016/12/13/post/

+0

它的工作,谢谢 –

+0

嘿埃里克,你可以帮我这个我的JDBC连接对象:https://community.hortonworks.com/questions/91946/ running-phoenix-udfs-from-java-service.html? –

+0

你正在连接到同一个群集吗?我看到以下错误消息: 引起:java.lang.RuntimeException:java.lang.ClassNotFoundException:co。 .phoenix.customudfs.EpochPastDays 的看起来可疑...... –

0
 url: sourceDatabaseConfiguration.url, 
     drivername: sourceDatabaseConfiguration.driverName, 
     maxpoolsize: sourceDatabaseConfiguration.maxpoolsize, 
     properties: { 
      'phoenix.functions.allowUserDefinedFunctions': 'true', 
      'phoenix.query.timeoutMs': '1800000', 
      'hbase.regionserver.lease.period': '1200000', 
      'hbase.rpc.timeout': '1200000', 
      'hbase.client.scanner.caching': '1000', 
      'hbase.client.scanner.timeout.period': '1200000' 

     } 

这是我使用的UDF的