代码拉取完成,页面将自动刷新
openLooKeng version (source or binary):
openLooKeng1.2.0RC6
OS platform & distribution (eg., Linux Ubuntu 16.04):
Java version:
During CTS create bucket table, kill a worker node, CTS SQL statements fail to be executed.
During CTS create bucket table, kill a worker node, CTS SQL statements can be executed through task recovery.
此处可能存在不合适展示的内容,页面不予展示。您可通过相关编辑功能自查并修改。
如您确认内容无涉及 不当用语 / 纯广告导流 / 暴力 / 低俗色情 / 侵权 / 盗版 / 虚假 / 无价值内容或违法国家有关法律法规的内容,可点击提交进行申诉,我们将尽快为您处理。
Please provide more details on error
create table test_recovery_automate.customer_textfile with(format='RCBINARY') as select * from tpcds_bin_partitioned_orc_1000.customer;
2.CTAS
create table test_recovery_automate.tb_23 with(bucket_count=16,bucketed_by=array['c_last_review_date_sk']) as select * from test_recovery_automate.customer_textfile;
3.After snapshot 1 is generated, kill a worker.
io.prestosql.spi.PrestoException: Error committing write to Hive
at io.prestosql.plugin.hive.OrcFileWriter.commit(OrcFileWriter.java:404)
at io.prestosql.plugin.hive.HiveWriter.commit(HiveWriter.java:105)
at io.prestosql.plugin.hive.HivePageSink.doFinish(HivePageSink.java:291)
at io.prestosql.plugin.hive.HivePageSink.mergeFiles(HivePageSink.java:344)
at io.prestosql.plugin.hive.authentication.NoHdfsAuthentication.doAs(NoHdfsAuthentication.java:23)
at io.prestosql.plugin.hive.HdfsEnvironment.doAs(HdfsEnvironment.java:78)
at io.prestosql.plugin.hive.HivePageSink.finish(HivePageSink.java:278)
at io.prestosql.spi.connector.classloader.ClassLoaderSafeConnectorPageSink.finish(ClassLoaderSafeConnectorPageSink.java:90)
at io.prestosql.operator.TableWriterOperator.finish(TableWriterOperator.java:226)
at io.prestosql.operator.Driver.processInternal(Driver.java:462)
at io.prestosql.operator.Driver.lambda$processFor$9(Driver.java:315)
at io.prestosql.operator.Driver.tryWithLock(Driver.java:785)
at io.prestosql.operator.Driver.processFor(Driver.java:308)
at io.prestosql.execution.SqlTaskExecution$DriverSplitRunner.processFor(SqlTaskExecution.java:1261)
at io.prestosql.execution.executor.PrioritizedSplitRunner.process(PrioritizedSplitRunner.java:163)
at io.prestosql.execution.executor.TaskExecutor$TaskRunner.run(TaskExecutor.java:484)
at io.prestosql.$gen.Presto_100_RC4_1544_gba66bcf____20211221_025421_1.run(Unknown Source)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
at java.lang.Thread.run(Thread.java:748)
Caused by: java.io.UncheckedIOException: java.io.FileNotFoundException: File does not exist: /user/hive/warehouse/test_recovery_automate.db/.staging-20211221_090207_00338_zm7h7-ec004170-ef93-47bf-a5c6-0f3100ebd85b/000001_0_20211221_090207_00338_zm7h7_snapshot_20211221_090207_00338_zm7h7 (inode 841086752) Holder DFSClient_NONMAPREDUCE_-133843704_198 does not have any open files.
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkLease(FSNamesystem.java:2890)
at org.apache.hadoop.hdfs.server.namenode.FSDirWriteFileOp.analyzeFileState(FSDirWriteFileOp.java:652)
at org.apache.hadoop.hdfs.server.namenode.FSDirWriteFileOp.validateAddBlock(FSDirWriteFileOp.java:178)
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBlock(FSNamesystem.java:2757)
at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.addBlock(NameNodeRpcServer.java:899)
at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.addBlock(ClientNamenodeProtocolServerSideTranslatorPB.java:602)
at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:528)
at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1036)
at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:985)
at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:913)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:422)
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1761)
at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2876)
at io.airlift.slice.OutputStreamSliceOutput.writeToOutputStream(OutputStreamSliceOutput.java:362)
at io.airlift.slice.OutputStreamSliceOutput.writeBytes(OutputStreamSliceOutput.java:185)
at io.airlift.slice.OutputStreamSliceOutput.writeBytes(OutputStreamSliceOutput.java:176)
at io.prestosql.orc.OrcOutputBuffer.writeDataTo(OrcOutputBuffer.java:124)
at io.prestosql.orc.stream.StreamDataOutput.writeData(StreamDataOutput.java:67)
at io.prestosql.orc.OutputStreamOrcDataSink.lambda$write$0(OutputStreamOrcDataSink.java:53)
at java.util.ArrayList.forEach(ArrayList.java:1257)
at io.prestosql.orc.OutputStreamOrcDataSink.write(OutputStreamOrcDataSink.java:53)
at io.prestosql.orc.OrcWriter.flushStripe(OrcWriter.java:356)
at io.prestosql.orc.OrcWriter.close(OrcWriter.java:461)
at io.prestosql.plugin.hive.OrcFileWriter.commit(OrcFileWriter.java:395)
... 19 more
Caused by: java.io.FileNotFoundException: File does not exist: /user/hive/warehouse/test_recovery_automate.db/.staging-20211221_090207_00338_zm7h7-ec004170-ef93-47bf-a5c6-0f3100ebd85b/000001_0_20211221_090207_00338_zm7h7_snapshot_20211221_090207_00338_zm7h7 (inode 841086752) Holder DFSClient_NONMAPREDUCE_-133843704_198 does not have any open files.
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkLease(FSNamesystem.java:2890)
at org.apache.hadoop.hdfs.server.namenode.FSDirWriteFileOp.analyzeFileState(FSDirWriteFileOp.java:652)
at org.apache.hadoop.hdfs.server.namenode.FSDirWriteFileOp.validateAddBlock(FSDirWriteFileOp.java:178)
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBlock(FSNamesystem.java:2757)
at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.addBlock(NameNodeRpcServer.java:899)
at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.addBlock(ClientNamenodeProtocolServerSideTranslatorPB.java:602)
at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:528)
at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1036)
at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:985)
at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:913)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:422)
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1761)
at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2876)
at sun.reflect.GeneratedConstructorAccessor209.newInstance(Unknown Source)
at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
at org.apache.hadoop.ipc.RemoteException.instantiateException(RemoteException.java:121)
at org.apache.hadoop.ipc.RemoteException.unwrapRemoteException(RemoteException.java:88)
at org.apache.hadoop.hdfs.DFSOutputStream.addBlock(DFSOutputStream.java:1084)
at org.apache.hadoop.hdfs.DataStreamer.locateFollowingBlock(DataStreamer.java:1865)
at org.apache.hadoop.hdfs.DataStreamer.nextBlockOutputStream(DataStreamer.java:1668)
at org.apache.hadoop.hdfs.DataStreamer.run(DataStreamer.java:716)
Caused by: org.apache.hadoop.ipc.RemoteException: File does not exist: /user/hive/warehouse/test_recovery_automate.db/.staging-20211221_090207_00338_zm7h7-ec004170-ef93-47bf-a5c6-0f3100ebd85b/000001_0_20211221_090207_00338_zm7h7_snapshot_20211221_090207_00338_zm7h7 (inode 841086752) Holder DFSClient_NONMAPREDUCE_-133843704_198 does not have any open files.
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkLease(FSNamesystem.java:2890)
at org.apache.hadoop.hdfs.server.namenode.FSDirWriteFileOp.analyzeFileState(FSDirWriteFileOp.java:652)
at org.apache.hadoop.hdfs.server.namenode.FSDirWriteFileOp.validateAddBlock(FSDirWriteFileOp.java:178)
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBlock(FSNamesystem.java:2757)
at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.addBlock(NameNodeRpcServer.java:899)
at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.addBlock(ClientNamenodeProtocolServerSideTranslatorPB.java:602)
at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:528)
at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1036)
at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:985)
at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:913)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:422)
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1761)
at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2876)
at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1511)
at org.apache.hadoop.ipc.Client.call(Client.java:1457)
at org.apache.hadoop.ipc.Client.call(Client.java:1367)
at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:228)
at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:116)
at com.sun.proxy.$Proxy352.addBlock(Unknown Source)
at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.addBlock(ClientNamenodeProtocolTranslatorPB.java:513)
at sun.reflect.GeneratedMethodAccessor896.invoke(Unknown Source)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:422)
at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:165)
at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:157)
at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:95)
at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:359)
at com.sun.proxy.$Proxy353.addBlock(Unknown Source)
at org.apache.hadoop.hdfs.DFSOutputStream.addBlock(DFSOutputStream.java:1081)
... 3 more
登录 后才可以发表评论