[ https://issues.apache.org/jira/browse/AMBARI-24399?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel ]
Andrew Onischuk updated AMBARI-24399: ------------------------------------- Attachment: AMBARI-24399.patch > Components start failing with 'Holder DFSClient_NONMAPREDUCE does not have > any open files' while adding Namespace > ------------------------------------------------------------------------------------------------------------------ > > Key: AMBARI-24399 > URL: https://issues.apache.org/jira/browse/AMBARI-24399 > Project: Ambari > Issue Type: Bug > Affects Versions: 2.7.1 > Reporter: Vivek Rathod > Priority: Major > Fix For: 2.7.1 > > Attachments: AMBARI-24399.patch > > > STR: > Add a namespace from UI. In the last step restart required services, > hiveserver2 restart fails. Although on retrying it comes back up > {code} > Traceback (most recent call last): > File > "/usr/lib/ambari-agent/lib/resource_management/libraries/script/script.py", > line 982, in restart > self.status(env) > File > "/var/lib/ambari-agent/cache/stacks/HDP/3.0/services/HIVE/package/scripts/hive_server.py", > line 79, in status > check_process_status(status_params.hive_pid) > File > "/usr/lib/ambari-agent/lib/resource_management/libraries/functions/check_process_status.py", > line 43, in check_process_status > raise ComponentIsNotRunning() > ComponentIsNotRunning > The above exception was the cause of the following exception: > Traceback (most recent call last): > File > "/var/lib/ambari-agent/cache/stacks/HDP/3.0/services/HIVE/package/scripts/hive_server.py", > line 137, in <module> > HiveServer().execute() > File > "/usr/lib/ambari-agent/lib/resource_management/libraries/script/script.py", > line 353, in execute > method(env) > File > "/usr/lib/ambari-agent/lib/resource_management/libraries/script/script.py", > line 993, in restart > self.start(env, upgrade_type=upgrade_type) > File > "/var/lib/ambari-agent/cache/stacks/HDP/3.0/services/HIVE/package/scripts/hive_server.py", > line 50, in start > self.configure(env) # FOR SECURITY > File > "/var/lib/ambari-agent/cache/stacks/HDP/3.0/services/HIVE/package/scripts/hive_server.py", > line 45, in configure > hive(name='hiveserver2') > File > "/var/lib/ambari-agent/cache/stacks/HDP/3.0/services/HIVE/package/scripts/hive.py", > line 119, in hive > setup_hiveserver2() > File > "/var/lib/ambari-agent/cache/stacks/HDP/3.0/services/HIVE/package/scripts/hive.py", > line 167, in setup_hiveserver2 > skip=params.sysprep_skip_copy_tarballs_hdfs) > File > "/usr/lib/ambari-agent/lib/resource_management/libraries/functions/copy_tarball.py", > line 516, in copy_to_hdfs > replace_existing_files=replace_existing_files, > File "/usr/lib/ambari-agent/lib/resource_management/core/base.py", line 166, > in __init__ > self.env.run() > File "/usr/lib/ambari-agent/lib/resource_management/core/environment.py", > line 160, in run > self.run_action(resource, action) > File "/usr/lib/ambari-agent/lib/resource_management/core/environment.py", > line 124, in run_action > provider_action() > File > "/usr/lib/ambari-agent/lib/resource_management/libraries/providers/hdfs_resource.py", > line 654, in action_create_on_execute > self.action_delayed("create") > File > "/usr/lib/ambari-agent/lib/resource_management/libraries/providers/hdfs_resource.py", > line 651, in action_delayed > self.get_hdfs_resource_executor().action_delayed(action_name, self) > File > "/usr/lib/ambari-agent/lib/resource_management/libraries/providers/hdfs_resource.py", > line 354, in action_delayed > self.action_delayed_for_nameservice(nameservice, action_name, main_resource) > File > "/usr/lib/ambari-agent/lib/resource_management/libraries/providers/hdfs_resource.py", > line 380, in action_delayed_for_nameservice > self._create_resource() > File > "/usr/lib/ambari-agent/lib/resource_management/libraries/providers/hdfs_resource.py", > line 396, in _create_resource > self._create_file(self.main_resource.resource.target, > source=self.main_resource.resource.source, mode=self.mode) > File > "/usr/lib/ambari-agent/lib/resource_management/libraries/providers/hdfs_resource.py", > line 511, in _create_file > self.util.run_command(target, 'CREATE', method='PUT', overwrite=True, > assertable_result=False, file_to_put=source, **kwargs) > File > "/usr/lib/ambari-agent/lib/resource_management/libraries/providers/hdfs_resource.py", > line 199, in run_command > return self._run_command(*args, **kwargs) > File > "/usr/lib/ambari-agent/lib/resource_management/libraries/providers/hdfs_resource.py", > line 272, in _run_command > raise WebHDFSCallException(err_msg, result_dict) > resource_management.libraries.providers.hdfs_resource.WebHDFSCallException: > Execution of 'curl -sS -L -w '%\{http_code}' -X PUT --data-binary > @/usr/hdp/3.0.1.0-30/hive/hive.tar.gz -H 'Content-Type: > application/octet-stream' --negotiate -u : -k > 'https://<HOST-FQDN>:50470/webhdfs/v1/hdp/apps/3.0.1.0-30/hive/hive.tar.gz?op=CREATE&overwrite=True&permission=444'' > returned status_code=404. > { > "RemoteException": { > "exception": "FileNotFoundException", > "javaClassName": "java.io.FileNotFoundException", > "message": "File does not exist: /hdp/apps/3.0.1.0-30/hive/hive.tar.gz > (inode 16450) Holder DFSClient_NONMAPREDUCE_-1764810327_120 does not have any > open files.\n\tat > org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkLease(FSNamesystem.java:2800)\n\tat > > org.apache.hadoop.hdfs.server.namenode.FSDirWriteFileOp.analyzeFileState(FSDirWriteFileOp.java:597)\n\tat > > org.apache.hadoop.hdfs.server.namenode.FSDirWriteFileOp.validateAddBlock(FSDirWriteFileOp.java:172)\n\tat > > org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBlock(FSNamesystem.java:2679)\n\tat > > org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.addBlock(NameNodeRpcServer.java:875)\n\tat > > org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.addBlock(ClientNamenodeProtocolServerSideTranslatorPB.java:561)\n\tat > > org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)\n\tat > > org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:523)\n\tat > org.apache.hadoop.ipc.RPC$Server.call(RPC.java:991)\n\tat > org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:872)\n\tat > org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:818)\n\tat > java.security.AccessController.doPrivileged(Native Method)\n\tat > javax.security.auth.Subject.doAs(Subject.java:422)\n\tat > org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1729)\n\tat > org.apache.hadoop.ipc.Server$Handler.run(Server.java:2678)\n" > } > } > {code} > -- This message was sent by Atlassian JIRA (v7.6.3#76005)