hive4 运行查询命令时出错 原因:java.lang.ExceptionInInitializerError: Exception java.lang.RuntimeException: java.lang.reflect

问题描述 投票:0回答:1

当我开始查询或插入数据时 我使用的是hive4 jdk17和hadoop3.4,它们已添加到hive的conf/hive-env.sh

export HADOOP_OPTS="$HADOOP_OPTS --add-opens=java.base/java.util=ALL-UNNAMED --add-opens=java.base/java.net=ALL-UNNAMED --add-opens=java.base/java.nio=ALL-UNNAMED"

在此输入图片描述

 ERROR [HiveServer2-Handler-Pool: Thread-76] thrift.ThriftCLIService: Failed to execute statement [request: TExecuteStatementReq(sessionHandle:TSessionHandle(sessionId:THandleIdentifier(guid:09 01 E3 84 3E 43 4D BA AD 86 3A 5E 0F 93 CD 0A, secret:BE 4A F2 02 99 EE 44 8D 95 51 31 32 39 7A 1D 50)), statement:insert into employees values(1,'aa','bb',1.0), confOverlay:{}, runAsync:true, queryTimeout:0)]
org.apache.hive.service.cli.HiveSQLException: Error running query
    at org.apache.hive.service.cli.operation.SQLOperation.prepare(SQLOperation.java:220) ~[hive-service-4.0.0.jar:4.0.0]
    at org.apache.hive.service.cli.operation.SQLOperation.runInternal(SQLOperation.java:270) ~[hive-service-4.0.0.jar:4.0.0]
    at org.apache.hive.service.cli.operation.Operation.run(Operation.java:286) ~[hive-service-4.0.0.jar:4.0.0]
    at org.apache.hive.service.cli.session.HiveSessionImpl.executeStatementInternal(HiveSessionImpl.java:557) ~[hive-service-4.0.0.jar:4.0.0]
    at org.apache.hive.service.cli.session.HiveSessionImpl.executeStatementAsync(HiveSessionImpl.java:542) ~[hive-service-4.0.0.jar:4.0.0]
    at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?]
    at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) ~[?:?]
    at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:?]
    at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?]
    at org.apache.hive.service.cli.session.HiveSessionProxy.invoke(HiveSessionProxy.java:78) ~[hive-service-4.0.0.jar:4.0.0]
    at org.apache.hive.service.cli.session.HiveSessionProxy.access$000(HiveSessionProxy.java:36) ~[hive-service-4.0.0.jar:4.0.0]
    at org.apache.hive.service.cli.session.HiveSessionProxy$1.run(HiveSessionProxy.java:63) ~[hive-service-4.0.0.jar:4.0.0]
    at java.security.AccessController.doPrivileged(AccessController.java:712) ~[?:?]
    at javax.security.auth.Subject.doAs(Subject.java:439) ~[?:?]
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1953) ~[hadoop-common-3.4.0.jar:?]
    at org.apache.hive.service.cli.session.HiveSessionProxy.invoke(HiveSessionProxy.java:59) ~[hive-service-4.0.0.jar:4.0.0]
    at jdk.proxy2.$Proxy44.executeStatementAsync(Unknown Source) ~[?:?]
    at org.apache.hive.service.cli.CLIService.executeStatementAsync(CLIService.java:316) ~[hive-service-4.0.0.jar:4.0.0]
    at org.apache.hive.service.cli.thrift.ThriftCLIService.ExecuteStatement(ThriftCLIService.java:652) ~[hive-service-4.0.0.jar:4.0.0]
    at org.apache.hive.service.rpc.thrift.TCLIService$Processor$ExecuteStatement.getResult(TCLIService.java:1670) ~[hive-exec-4.0.0.jar:4.0.0]
    at org.apache.hive.service.rpc.thrift.TCLIService$Processor$ExecuteStatement.getResult(TCLIService.java:1650) ~[hive-exec-4.0.0.jar:4.0.0]
    at org.apache.thrift.ProcessFunction.process(ProcessFunction.java:38) ~[hive-exec-4.0.0.jar:4.0.0]
    at org.apache.thrift.TBaseProcessor.process(TBaseProcessor.java:38) ~[hive-exec-4.0.0.jar:4.0.0]
    at org.apache.hive.service.auth.TSetIpAddressProcessor.process(TSetIpAddressProcessor.java:56) ~[hive-service-4.0.0.jar:4.0.0]
    at org.apache.thrift.server.TThreadPoolServer$WorkerProcess.run(TThreadPoolServer.java:250) ~[hive-exec-4.0.0.jar:4.0.0]
    at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1136) ~[?:?]
    at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) ~[?:?]
    at java.lang.Thread.run(Thread.java:842) ~[?:?]
Caused by: java.lang.NoClassDefFoundError: Could not initialize class org.apache.hadoop.hive.common.CopyOnFirstWriteProperties
    at org.apache.hadoop.hive.ql.plan.PartitionDesc.setProperties(PartitionDesc.java:225) ~[hive-exec-4.0.0.jar:4.0.0]
    at org.apache.hadoop.hive.ql.plan.PartitionDesc.<init>(PartitionDesc.java:89) ~[hive-exec-4.0.0.jar:4.0.0]
    at org.apache.hadoop.hive.ql.exec.Utilities.getPartitionDesc(Utilities.java:788) ~[hive-exec-4.0.0.jar:4.0.0]
    at org.apache.hadoop.hive.ql.optimizer.GenMapRedUtils.setMapWork(GenMapRedUtils.java:519) ~[hive-exec-4.0.0.jar:4.0.0]
    at org.apache.hadoop.hive.ql.optimizer.GenMapRedUtils.setTaskPlan(GenMapRedUtils.java:467) ~[hive-exec-4.0.0.jar:4.0.0]
    at org.apache.hadoop.hive.ql.optimizer.GenMapRedUtils.setTaskPlan(GenMapRedUtils.java:445) ~[hive-exec-4.0.0.jar:4.0.0]
    at org.apache.hadoop.hive.ql.optimizer.GenMRFileSink1.processFS(GenMRFileSink1.java:208) ~[hive-exec-4.0.0.jar:4.0.0]
    at org.apache.hadoop.hive.ql.optimizer.GenMRFileSink1.process(GenMRFileSink1.java:105) ~[hive-exec-4.0.0.jar:4.0.0]
    at org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher.dispatch(DefaultRuleDispatcher.java:90) ~[hive-exec-4.0.0.jar:4.0.0]
    at org.apache.hadoop.hive.ql.lib.DefaultGraphWalker.dispatchAndReturn(DefaultGraphWalker.java:105) ~[hive-exec-4.0.0.jar:4.0.0]
    at org.apache.hadoop.hive.ql.parse.GenMapRedWalker.walk(GenMapRedWalker.java:54) ~[hive-exec-4.0.0.jar:4.0.0]
    at org.apache.hadoop.hive.ql.parse.GenMapRedWalker.walk(GenMapRedWalker.java:65) ~[hive-exec-4.0.0.jar:4.0.0]
    at org.apache.hadoop.hive.ql.parse.GenMapRedWalker.walk(GenMapRedWalker.java:65) ~[hive-exec-4.0.0.jar:4.0.0]
    at org.apache.hadoop.hive.ql.parse.GenMapRedWalker.walk(GenMapRedWalker.java:65) ~[hive-exec-4.0.0.jar:4.0.0]
    at org.apache.hadoop.hive.ql.parse.GenMapRedWalker.walk(GenMapRedWalker.java:65) ~[hive-exec-4.0.0.jar:4.0.0]
    at org.apache.hadoop.hive.ql.lib.DefaultGraphWalker.startWalking(DefaultGraphWalker.java:120) ~[hive-exec-4.0.0.jar:4.0.0]
    at org.apache.hadoop.hive.ql.parse.MapReduceCompiler.generateTaskTree(MapReduceCompiler.java:339) ~[hive-exec-4.0.0.jar:4.0.0]
    at org.apache.hadoop.hive.ql.parse.TaskCompiler.compile(TaskCompiler.java:301) ~[hive-exec-4.0.0.jar:4.0.0]
    at org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.compilePlan(SemanticAnalyzer.java:13089) ~[hive-exec-4.0.0.jar:4.0.0]
    at org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.analyzeInternal(SemanticAnalyzer.java:13307) ~[hive-exec-4.0.0.jar:4.0.0]
    at org.apache.hadoop.hive.ql.parse.CalcitePlanner.analyzeInternal(CalcitePlanner.java:465) ~[hive-exec-4.0.0.jar:4.0.0]
    at org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer.analyze(BaseSemanticAnalyzer.java:327) ~[hive-exec-4.0.0.jar:4.0.0]
    at org.apache.hadoop.hive.ql.Compiler.analyze(Compiler.java:224) ~[hive-exec-4.0.0.jar:4.0.0]
    at org.apache.hadoop.hive.ql.Compiler.compile(Compiler.java:107) ~[hive-exec-4.0.0.jar:4.0.0]
    at org.apache.hadoop.hive.ql.Driver.compile(Driver.java:519) ~[hive-exec-4.0.0.jar:4.0.0]
    at org.apache.hadoop.hive.ql.Driver.compileInternal(Driver.java:471) ~[hive-exec-4.0.0.jar:4.0.0]
    at org.apache.hadoop.hive.ql.Driver.compileAndRespond(Driver.java:436) ~[hive-exec-4.0.0.jar:4.0.0]
    at org.apache.hadoop.hive.ql.Driver.compileAndRespond(Driver.java:430) ~[hive-exec-4.0.0.jar:4.0.0]
    at org.apache.hadoop.hive.ql.reexec.ReExecDriver.compileAndRespond(ReExecDriver.java:121) ~[hive-exec-4.0.0.jar:4.0.0]
    at org.apache.hive.service.cli.operation.SQLOperation.prepare(SQLOperation.java:207) ~[hive-service-4.0.0.jar:4.0.0]
    ... 27 more
Caused by: java.lang.ExceptionInInitializerError: Exception java.lang.RuntimeException: java.lang.reflect.InaccessibleObjectException: Unable to make field protected volatile java.util.Properties java.util.Properties.defaults accessible: module java.base does not "opens java.util" to unnamed module @342c38f8 [in thread "e8b1b6ff-56dc-4fbf-84e2-a0c0669c2cf3 HiveServer2-Handler-Pool: Thread-65"]
    at org.apache.hadoop.hive.common.CopyOnFirstWriteProperties.<clinit>(CopyOnFirstWriteProperties.java:60) ~[hive-common-4.0.0.jar:4.0.0]
    at org.apache.hadoop.hive.ql.plan.PartitionDesc.setProperties(PartitionDesc.java:225) ~[hive-exec-4.0.0.jar:4.0.0]
    at org.apache.hadoop.hive.ql.plan.PartitionDesc.<init>(PartitionDesc.java:89) ~[hive-exec-4.0.0.jar:4.0.0]
    at org.apache.hadoop.hive.ql.exec.Utilities.getPartitionDesc(Utilities.java:788) ~[hive-exec-4.0.0.jar:4.0.0]
    at org.apache.hadoop.hive.ql.optimizer.GenMapRedUtils.setMapWork(GenMapRedUtils.java:519) ~[hive-exec-4.0.0.jar:4.0.0]
    at org.apache.hadoop.hive.ql.optimizer.GenMapRedUtils.setTaskPlan(GenMapRedUtils.java:467) ~[hive-exec-4.0.0.jar:4.0.0]
    at org.apache.hadoop.hive.ql.optimizer.GenMapRedUtils.setTaskPlan(GenMapRedUtils.java:445) ~[hive-exec-4.0.0.jar:4.0.0]
    at org.apache.hadoop.hive.ql.optimizer.GenMRFileSink1.processFS(GenMRFileSink1.java:208) ~[hive-exec-4.0.0.jar:4.0.0]
    at org.apache.hadoop.hive.ql.optimizer.GenMRFileSink1.process(GenMRFileSink1.java:105) ~[hive-exec-4.0.0.jar:4.0.0]
    at org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher.dispatch(DefaultRuleDispatcher.java:90) ~[hive-exec-4.0.0.jar:4.0.0]
    at org.apache.hadoop.hive.ql.lib.DefaultGraphWalker.dispatchAndReturn(DefaultGraphWalker.java:105) ~[hive-exec-4.0.0.jar:4.0.0]
    at org.apache.hadoop.hive.ql.parse.GenMapRedWalker.walk(GenMapRedWalker.java:54) ~[hive-exec-4.0.0.jar:4.0.0]
    at org.apache.hadoop.hive.ql.parse.GenMapRedWalker.walk(GenMapRedWalker.java:65) ~[hive-exec-4.0.0.jar:4.0.0]
    at org.apache.hadoop.hive.ql.parse.GenMapRedWalker.walk(GenMapRedWalker.java:65) ~[hive-exec-4.0.0.jar:4.0.0]
    at org.apache.hadoop.hive.ql.parse.GenMapRedWalker.walk(GenMapRedWalker.java:65) ~[hive-exec-4.0.0.jar:4.0.0]
    at org.apache.hadoop.hive.ql.parse.GenMapRedWalker.walk(GenMapRedWalker.java:65) ~[hive-exec-4.0.0.jar:4.0.0]
    at org.apache.hadoop.hive.ql.lib.DefaultGraphWalker.startWalking(DefaultGraphWalker.java:120) ~[hive-exec-4.0.0.jar:4.0.0]
    at org.apache.hadoop.hive.ql.parse.MapReduceCompiler.generateTaskTree(MapReduceCompiler.java:339) ~[hive-exec-4.0.0.jar:4.0.0]
    at org.apache.hadoop.hive.ql.parse.TaskCompiler.compile(TaskCompiler.java:301) ~[hive-exec-4.0.0.jar:4.0.0]
    at org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.compilePlan(SemanticAnalyzer.java:13089) ~[hive-exec-4.0.0.jar:4.0.0]
    at org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.analyzeInternal(SemanticAnalyzer.java:13307) ~[hive-exec-4.0.0.jar:4.0.0]
    at org.apache.hadoop.hive.ql.parse.CalcitePlanner.analyzeInternal(CalcitePlanner.java:465) ~[hive-exec-4.0.0.jar:4.0.0]
    at org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer.analyze(BaseSemanticAnalyzer.java:327) ~[hive-exec-4.0.0.jar:4.0.0]
    at org.apache.hadoop.hive.ql.Compiler.analyze(Compiler.java:224) ~[hive-exec-4.0.0.jar:4.0.0]
    at org.apache.hadoop.hive.ql.Compiler.compile(Compiler.java:107) ~[hive-exec-4.0.0.jar:4.0.0]
    at org.apache.hadoop.hive.ql.Driver.compile(Driver.java:519) ~[hive-exec-4.0.0.jar:4.0.0]
    at org.apache.hadoop.hive.ql.Driver.compileInternal(Driver.java:471) ~[hive-exec-4.0.0.jar:4.0.0]
    at org.apache.hadoop.hive.ql.Driver.compileAndRespond(Driver.java:436) ~[hive-exec-4.0.0.jar:4.0.0]
    at org.apache.hadoop.hive.ql.Driver.compileAndRespond(Driver.java:430) ~[hive-exec-4.0.0.jar:4.0.0]
    at org.apache.hadoop.hive.ql.reexec.ReExecDriver.compileAndRespond(ReExecDriver.java:121) ~[hive-exec-4.0.0.jar:4.0.0]
    at org.apache.hive.service.cli.operation.SQLOperation.prepare(SQLOperation.java:207) ~[hive-service-4.0.0.jar:4.0.0]
    ... 27 more

希望通过在hive-env文件中添加配置来解决问题,因为这台主机没有任何hadoop组件,是通过配置文件与hadoop集群关联的。

hive
1个回答
0
投票

需要修改配置文件mapred-site.xml和yarn-site.xml(只需修改nodemanager和resourcemanager节点配置) 分别给它们添加配置。请注意,-Xmx1024m 是可选的,我将在配置文件中显示它。

1 插入yarn-site.xml

<property>
  <name>yarn.app.mapreduce.am.command-opts</name>
  <value>--add-opens=java.base/java.net=ALL-UNNAMED --add-opens=java.base/java.lang=ALL-UNNAMED --add-opens=java.base/java.nio=ALL-UNNAMED --add-opens=java.base/sun.nio.ch=ALL-UNNAMED --add-opens=java.base/java.io=ALL-UNNAMED --add-opens=java.base/java.util=ALL-UNNAMED --add-opens=java.rmi/sun.rmi.transport=ALL-UNNAMED --add-opens=java.base/java.math=ALL-UNNAMED --add-opens=java.base/java.text=ALL-UNNAMED --add-opens=java.sql/java.sql=ALL-UNNAMED --add-opens=java.base/java.util.concurrent.atomic=ALL-UNNAMED</value>
</property>

2 插入mapred-site.xml

  <property>
  <name>mapreduce.map.java.opts</name>
  <value>-Xmx1024m --add-opens=java.base/java.net=ALL-UNNAMED --add-opens=java.base/java.lang=ALL-UNNAMED --add-opens=java.base/java.nio=ALL-UNNAMED --add-opens=java.base/sun.nio.ch=ALL-UNNAMED --add-opens=java.base/java.io=ALL-UNNAMED --add-opens=java.base/java.util=ALL-UNNAMED --add-opens=java.rmi/sun.rmi.transport=ALL-UNNAMED --add-opens=java.base/java.math=ALL-UNNAMED --add-opens=java.base/java.text=ALL-UNNAMED --add-opens=java.sql/java.sql=ALL-UNNAMED --add-opens=java.base/java.util.concurrent.atomic=ALL-UNNAMED</value>
</property>

<property>
  <name>mapreduce.reduce.java.opts</name>
  <value>-Xmx1024m --add-opens=java.base/java.net=ALL-UNNAMED --add-opens=java.base/java.lang=ALL-UNNAMED --add-opens=java.base/java.nio=ALL-UNNAMED --add-opens=java.base/sun.nio.ch=ALL-UNNAMED --add-opens=java.base/java.io=ALL-UNNAMED --add-opens=java.base/java.util=ALL-UNNAMED --add-opens=java.rmi/sun.rmi.transport=ALL-UNNAMED --add-opens=java.base/java.math=ALL-UNNAMED --add-opens=java.base/java.text=ALL-UNNAMED --add-opens=java.sql/java.sql=ALL-UNNAMED --add-opens=java.base/java.util.concurrent.atomic=ALL-UNNAMED</value>
</property>
© www.soinside.com 2019 - 2024. All rights reserved.