Ask Your Question

Revision history [back]

click to hide/show revision 1
initial version
 Thanks guys i found the root cause and i am able to find the solution and it got solved 
  (actually it took 2 days for me to debug)
  I hope this info help someone who is facing similar problem 


please find detailed steps/debug info.


[root@controller-node ~]# 
[root@controller-node ~]# nova --debug boot --flavor m1.tiny --image cirros-0.3.4-x86_64 --nic netid=3e1f50b7-4f07-4479-b4fd-2db23854f3bc --security-group default --key-name demo-key demo-instance1 
DEBUG (session:195) REQ: curl -g -i -X GET http://controller-node:5000/v3 -H "Accept: application/json" -H "User-Agent: python-keystoneclient"
INFO (connectionpool:213) Starting new HTTP connection (1): controller-node
DEBUG (connectionpool:395) "GET /v3 HTTP/1.1" 200 253
DEBUG (session:224) RESP: [200] Date: Wed, 05 Jan 2000 06:46:34 GMT Server: Apache/2.4.6 (CentOS) mod_wsgi/3.4 Python/2.7.5 Vary: X-Auth-Token x-openstack-request-id: req-3b76f610-d971-46e4-b90c-dc971d57ee90 Content-Length: 253 Keep-Alive: timeout=5, max=100 Connection: Keep-Alive Content-Type: application/json 
RESP BODY: {"version": {"status": "stable", "updated": "2015-03-30T00:00:00Z", "media-types": [{"base": "application/json", "type": "application/vnd.openstack.identity-v3+json"}], "id": "v3.4", "links": [{"href": "http://controller-node:5000/v3/", "rel": "self"}]}}

DEBUG (base:171) Making authentication request to http://controller-node:5000/v3/auth/tokens
DEBUG (connectionpool:395) "POST /v3/auth/tokens HTTP/1.1" 201 4177
DEBUG (session:195) REQ: curl -g -i -X GET http://controller-node:8774/v2/3cd53bc659334d1f84a72002a9f9159a/images -H "User-Agent: python-novaclient" -H "Accept: application/json" -H "X-Auth-Token: {SHA1}81f4ad5ac15259cd379cee7909d3c50b880a6fc1"
INFO (connectionpool:213) Starting new HTTP connection (1): controller-node
DEBUG (connectionpool:395) "GET /v2/3cd53bc659334d1f84a72002a9f9159a/images HTTP/1.1" 200 520
DEBUG (session:224) RESP: [200] Content-Type: application/json Content-Length: 520 X-Compute-Request-Id: req-d7e22a4c-d5f0-461c-8c61-08efb8525a83 Date: Wed, 05 Jan 2000 06:46:35 GMT Connection: keep-alive 
RESP BODY: {"images": [{"id": "128ddb4e-73b9-4f79-841c-ca327376594e", "links": [{"href": "http://controller-node:8774/v2/3cd53bc659334d1f84a72002a9f9159a/images/128ddb4e-73b9-4f79-841c-ca327376594e", "rel": "self"}, {"href": "http://controller-node:8774/3cd53bc659334d1f84a72002a9f9159a/images/128ddb4e-73b9-4f79-841c-ca327376594e", "rel": "bookmark"}, {"href": "http://controller-node:9292/images/128ddb4e-73b9-4f79-841c-ca327376594e", "type": "application/vnd.openstack.image", "rel": "alternate"}], "name": "cirros-0.3.4-x86_64"}]}

DEBUG (session:195) REQ: curl -g -i -X GET http://controller-node:8774/v2/3cd53bc659334d1f84a72002a9f9159a/images/128ddb4e-73b9-4f79-841c-ca327376594e -H "User-Agent: python-novaclient" -H "Accept: application/json" -H "X-Auth-Token: {SHA1}81f4ad5ac15259cd379cee7909d3c50b880a6fc1"
DEBUG (connectionpool:395) "GET /v2/3cd53bc659334d1f84a72002a9f9159a/images/128ddb4e-73b9-4f79-841c-ca327376594e HTTP/1.1" 500 128
DEBUG (session:224) RESP:
DEBUG (shell:914) The server has either erred or is incapable of performing the requested operation. (HTTP 500) (Request-ID: req-4562c62c-d003-4198-95d4-7a64239628f9)
Traceback (most recent call last):
  File "/usr/lib/python2.7/site-packages/novaclient/shell.py", line 911, in main
    OpenStackComputeShell().main(argv)
  File "/usr/lib/python2.7/site-packages/novaclient/shell.py", line 838, in main
    args.func(self.cs, args)
  File "/usr/lib/python2.7/site-packages/novaclient/v2/shell.py", line 495, in do_boot
    boot_args, boot_kwargs = _boot(cs, args)
  File "/usr/lib/python2.7/site-packages/novaclient/v2/shell.py", line 142, in _boot
    image = _find_image(cs, args.image)
  File "/usr/lib/python2.7/site-packages/novaclient/v2/shell.py", line 1894, in _find_image
    return utils.find_resource(cs.images, image)
  File "/usr/lib/python2.7/site-packages/novaclient/utils.py", line 216, in find_resource
    return manager.find(**kwargs)
  File "/usr/lib/python2.7/site-packages/novaclient/base.py", line 196, in find
    matches = self.findall(**kwargs)
  File "/usr/lib/python2.7/site-packages/novaclient/base.py", line 258, in findall
    found.append(self.get(obj.id))
  File "/usr/lib/python2.7/site-packages/novaclient/v2/images.py", line 53, in get
    return self._get("/images/%s" % base.getid(image), "image")
  File "/usr/lib/python2.7/site-packages/novaclient/base.py", line 156, in _get
    _resp, body = self.api.client.get(url)
  File "/usr/lib/python2.7/site-packages/keystoneclient/adapter.py", line 170, in get
    return self.request(url, 'GET', **kwargs)
  File "/usr/lib/python2.7/site-packages/novaclient/client.py", line 96, in request
    raise exceptions.from_response(resp, body, url, method)
ClientException: The server has either erred or is incapable of performing the requested operation. (HTTP 500) (Request-ID: req-4562c62c-d003-4198-95d4-7a64239628f9)
ERROR (ClientException): The server has either erred or is incapable of performing the requested operation. (HTTP 500) (Request-ID: req-4562c62c-d003-4198-95d4-7a64239628f9)
[root@controller-node ~]# 
[root@controller-node ~]# 
[root@controller-node ~]# 
[root@controller-node ~]# 
[root@controller-node ~]# 
[root@controller-node ~]# 
[root@controller-node ~]# mkdir liberty
[root@controller-node ~]# cd liberty
[root@controller-node liberty]# wget http://mirror.centos.org/centos/7/cloud/x86_64/openstack-liberty/python-glanceclient-1.1.0-1.el7.noarch.rpm
--2000-01-05 06:47:50--  http://mirror.centos.org/centos/7/cloud/x86_64/openstack-liberty/python-glanceclient-1.1.0-1.el7.noarch.rpm
Resolving mirror.centos.org (mirror.centos.org)... 202.76.233.2, 2605:9000:401:102::2
Connecting to mirror.centos.org (mirror.centos.org)|202.76.233.2|:80... connected.
HTTP request sent, awaiting response... 200 OK
Length: 253652 (248K) [application/x-rpm]
Saving to: 'python-glanceclient-1.1.0-1.el7.noarch.rpm'

100%[=================================================================================================>] 253,652      291KB/s   in 0.9s   

2000-01-05 06:47:52 (291 KB/s) - 'python-glanceclient-1.1.0-1.el7.noarch.rpm' saved [253652/253652]

[root@controller-node liberty]# ls
python-glanceclient-1.1.0-1.el7.noarch.rpm
[root@controller-node liberty]# 
[root@controller-node liberty]# 
[root@controller-node liberty]# rpm -qa | grep python-glanceclient
python-glanceclient-0.17.0-2.el7.noarch
[root@controller-node liberty]# rpm -Uvh python-glanceclient-1.1.0-1.el7.noarch.rpm
Preparing...                          ################################# [100%]
Updating / installing...
   1:python-glanceclient-1:1.1.0-1.el7################################# [ 50%]
Cleaning up / removing...
   2:python-glanceclient-1:0.17.0-2.el################################# [100%]
[root@controller-node liberty]# 
[root@controller-node liberty]# 
[root@controller-node liberty]# rpm -qa | grep python-glanceclient
python-glanceclient-1.1.0-1.el7.noarch
[root@controller-node liberty]#





[root@controller-node liberty]# systemctl restart memcached.service
[root@controller-node liberty]# systemctl restart openstack-glance-api.service openstack-glance-registry.service
[root@controller-node liberty]# systemctl restart openstack-nova-api.service openstack-nova-cert.service openstack-nova-consoleauth.service openstack-nova-scheduler.service openstack-nova-conductor.service openstack-nova-novncproxy.service
[root@controller-node liberty]#



[root@controller-node ~]# source /opt/openstack-kilo/scripts/admin-openrc.sh 
[root@controller-node ~]# nova service-list
ERROR (InternalServerError): An unexpected error prevented the server from fulfilling your request. (HTTP 500) (Request-ID: req-4948996a-de0b-4d5d-a506-f6f15f2d839b)
[root@controller-node ~]# 
[root@controller-node ~]# 
[root@controller-node ~]# nova service-list
ERROR (InternalServerError): An unexpected error prevented the server from fulfilling your request. (HTTP 500) (Request-ID: req-7690e96e-46d3-484c-a9e9-76ecdc7aa1ba)
[root@controller-node ~]# 
[root@controller-node ~]# 
[root@controller-node ~]# neutron agent-list
An unexpected error prevented the server from fulfilling your request. (HTTP 500) (Request-ID: req-c62758e9-5943-4a44-8abd-76b344aa0771)
[root@controller-node ~]# systemctl restart httpd.service memcached.service
Warning: httpd.service changed on disk. Run 'systemctl daemon-reload' to reload units.

[root@controller-node ~]# 
[root@controller-node ~]# neutron agent-list
+--------------------------------------+--------------------+----------------+-------+----------------+---------------------------+
| id                                   | agent_type         | host           | alive | admin_state_up | binary                    |
+--------------------------------------+--------------------+----------------+-------+----------------+---------------------------+
| 0389e133-4bb5-4092-b6d0-1dfaff01b391 | Metadata agent     | controller-node | :-)   | True           | neutron-metadata-agent    |
| 727a5bcd-dd8d-46fc-82db-08c775d1eaf5 | Open vSwitch agent | compute-node    | :-)   | True           | neutron-openvswitch-agent |
| 9b68fe23-ec11-41b7-b9c7-6d63ba34d1e5 | DHCP agent         | controller-node | :-)   | True           | neutron-dhcp-agent        |
| f63ec54c-e425-4f74-b27c-1fb7a2147424 | L3 agent           | controller-node | :-)   | True           | neutron-l3-agent          |
| fbf13e29-24ba-4c5e-af76-0577c94f47f0 | Open vSwitch agent | controller-node | :-)   | True           | neutron-openvswitch-agent |
+--------------------------------------+--------------------+----------------+-------+----------------+---------------------------+
[root@controller-node ~]# nova service-list
+----+------------------+----------------+----------+---------+-------+----------------------------+-----------------+
| Id | Binary           | Host           | Zone     | Status  | State | Updated_at                 | Disabled Reason |
+----+------------------+----------------+----------+---------+-------+----------------------------+-----------------+
| 1  | nova-cert        | controller-node | internal | enabled | up    | 2000-01-05T06:57:52.000000 | -               |
| 2  | nova-consoleauth | controller-node | internal | enabled | up    | 2000-01-05T06:57:52.000000 | -               |
| 3  | nova-scheduler   | controller-node | internal | enabled | up    | 2000-01-05T06:57:52.000000 | -               |
| 4  | nova-conductor   | controller-node | internal | enabled | up    | 2000-01-05T06:57:52.000000 | -               |
| 5  | nova-compute     | compute-node    | nova     | enabled | up    | 2000-01-05T06:57:51.000000 | -               |
+----+------------------+----------------+----------+---------+-------+----------------------------+-----------------+
[root@controller-node ~]#





When Instance is launched, the current issue of glance is crossed but the instance continued to be in spawning state :-

nova-compute.log

334d1f84a72002a9f9159a - - -] [instance: d08e5e44-a0cc-4824-8181-60273762195b] Claim successful
2000-01-05 06:59:22.660 1731 INFO nova.scheduler.client.report [req-801cdd51-84b9-42a6-93cb-7fd70ef6a641 8b989da594d34ab28a36839cee016f4f 3cd53bc659334d1f84a72002a9f9159a - - -] Compute_service record updated for ('compute-node', 'compute-node')
2000-01-05 06:59:23.308 1731 INFO nova.scheduler.client.report [req-801cdd51-84b9-42a6-93cb-7fd70ef6a641 8b989da594d34ab28a36839cee016f4f 3cd53bc659334d1f84a72002a9f9159a - - -] Compute_service record updated for ('compute-node', 'compute-node')
2000-01-05 06:59:23.322 1731 INFO oslo_messaging._drivers.impl_rabbit [-] Connecting to AMQP server on 10.130.98.99:5672
2000-01-05 06:59:23.341 1731 INFO oslo_messaging._drivers.impl_rabbit [-] Connected to AMQP server on 10.130.98.99:5672
2000-01-05 06:59:23.564 1731 INFO nova.virt.libvirt.driver [req-801cdd51-84b9-42a6-93cb-7fd70ef6a641 8b989da594d34ab28a36839cee016f4f 3cd53bc659334d1f84a72002a9f9159a - - -] [instance: d08e5e44-a0cc-4824-8181-60273762195b] Creating image
2000-01-05 06:59:23.567 1731 INFO oslo_concurrency.lockutils [req-801cdd51-84b9-42a6-93cb-7fd70ef6a641 8b989da594d34ab28a36839cee016f4f 3cd53bc659334d1f84a72002a9f9159a - - -] Created lock path: /var/lib/nova/instances/locks
2000-01-05 06:59:25.220 1731 INFO nova.compute.resource_tracker [req-d88450c6-8cc2-4c85-8a68-70b092c11c64 - - - - -] Auditing locally available compute resources for node compute-node
2000-01-05 06:59:26.357 1731 INFO nova.compute.resource_tracker [req-d88450c6-8cc2-4c85-8a68-70b092c11c64 - - - - -] Total usable vcpus: 16, total allocated vcpus: 0
2000-01-05 06:59:26.358 1731 INFO nova.compute.resource_tracker [req-d88450c6-8cc2-4c85-8a68-70b092c11c64 - - - - -] Final resource view: name=compute-node phys_ram=31862MB used_ram=1024MB phys_disk=58GB used_disk=1GB total_vcpus=16 used_vcpus=0 pci_stats=<nova.pci.stats.PciDeviceStats object at 0x5ef9f10>
2000-01-05 06:59:26.393 1731 INFO nova.scheduler.client.report [req-d88450c6-8cc2-4c85-8a68-70b092c11c64 - - - - -] Compute_service record updated for ('compute-node', 'compute-node')
2000-01-05 06:59:26.393 1731 INFO nova.compute.resource_tracker [req-d88450c6-8cc2-4c85-8a68-70b092c11c64 - - - - -] Compute_service record updated for compute-node:compute-node
2000-01-05 07:00:23.346 1731 ERROR nova.compute.manager [-] Instance failed network setup after 1 attempt(s)
2000-01-05 07:00:23.346 1731 TRACE nova.compute.manager Traceback (most recent call last):
2000-01-05 06:59:26.393 1731 INFO nova.compute.resource_tracker [req-d88450c6-8cc2-4c85-8a68-70b092c11c64 - - - - -] Compute_service record updated for compute-node:compute-node
2000-01-05 07:00:23.346 1731 ERROR nova.compute.manager [-] Instance failed network setup after 1 attempt(s)
2000-01-05 07:00:23.346 1731 TRACE nova.compute.manager Traceback (most recent call last):
2000-01-05 07:00:23.346 1731 TRACE nova.compute.manager   File "/usr/lib/python2.7/site-packages/nova/compute/manager.py", line 1795, in _allocate_network_async
2000-01-05 07:00:23.346 1731 TRACE nova.compute.manager     dhcp_options=dhcp_options)
2000-01-05 07:00:23.346 1731 TRACE nova.compute.manager   File "/usr/lib/python2.7/site-packages/nova/network/api.py", line 49, in wrapped
2000-01-05 07:00:23.346 1731 TRACE nova.compute.manager     return func(self, context, *args, **kwargs)
2000-01-05 07:00:23.346 1731 TRACE nova.compute.manager   File "/usr/lib/python2.7/site-packages/nova/network/base_api.py", line 64, in wrapper
2000-01-05 07:00:23.346 1731 TRACE nova.compute.manager     res = f(self, context, *args, **kwargs)
2000-01-05 07:00:23.346 1731 TRACE nova.compute.manager   File "/usr/lib/python2.7/site-packages/nova/network/api.py", line 281, in allocate_for_instance
2000-01-05 07:00:23.346 1731 TRACE nova.compute.manager     nw_info = self.network_rpcapi.allocate_for_instance(context, **args)
2000-01-05 07:00:23.346 1731 TRACE nova.compute.manager   File "/usr/lib/python2.7/site-packages/nova/network/rpcapi.py", line 152, in allocate_for_instance
2000-01-05 07:00:23.346 1731 TRACE nova.compute.manager     macs=jsonutils.to_primitive(macs))
2000-01-05 07:00:23.346 1731 TRACE nova.compute.manager   File "/usr/lib/python2.7/site-packages/oslo_messaging/rpc/client.py", line 156, in call
2000-01-05 07:00:23.346 1731 TRACE nova.compute.manager     retry=self.retry)
2000-01-05 07:00:23.346 1731 TRACE nova.compute.manager   File "/usr/lib/python2.7/site-packages/oslo_messaging/transport.py", line 90, in _send
2000-01-05 07:00:23.346 1731 TRACE nova.compute.manager     timeout=timeout, retry=retry)
2000-01-05 07:00:23.346 1731 TRACE nova.compute.manager   File "/usr/lib/python2.7/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 350, in send
2000-01-05 07:00:23.346 1731 TRACE nova.compute.manager     retry=retry)
2000-01-05 07:00:23.346 1731 TRACE nova.compute.manager   File "/usr/lib/python2.7/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 339, in _send
2000-01-05 07:00:23.346 1731 TRACE nova.compute.manager     result = self._waiter.wait(msg_id, timeout)
2000-01-05 07:00:23.346 1731 TRACE nova.compute.manager   File "/usr/lib/python2.7/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 243, in wait
2000-01-05 07:00:23.346 1731 TRACE nova.compute.manager     message = self.waiters.get(msg_id, timeout=timeout)
2000-01-05 07:00:23.346 1731 TRACE nova.compute.manager   File "/usr/lib/python2.7/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 149, in get
2000-01-05 07:00:23.346 1731 TRACE nova.compute.manager     'to message ID %s' % msg_id)
2000-01-05 07:00:23.346 1731 TRACE nova.compute.manager MessagingTimeout: Timed out waiting for a reply to message ID ac4c1974d6824a8d9de4e894478bc4da
2000-01-05 07:00:23.346 1731 TRACE nova.compute.manag




On Controller :-

[root@controller-node ~]# source /opt/openstack-kilo/scripts/admin-openrc.sh 
[root@controller-node ~]# systemctl start neutron-server.service
[root@controller-node ~]# systemctl restart neutron-server.service
[root@controller-node ~]# systemctl restart openstack-nova-api.service
[root@controller-node ~]# systemctl restart openvswitch.service
[root@controller-node ~]# systemctl start neutron-openvswitch-agent.service neutron-l3-agent.service \
> neutron-dhcp-agent.service neutron-metadata-agent.service
[root@controller-node ~]# systemctl restart neutron-openvswitch-agent.service neutron-l3-agent.service neutron-dhcp-agent.service neutron-metadata-agent.service
[root@controller-node ~]#


On Compute :-

[root@compute-node ~]# 
[root@compute-node ~]# systemctl restart openvswitch.service
[root@compute-node ~]# systemctl restart openstack-nova-compute.service
[root@compute-node ~]# systemctl restart neutron-openvswitch-agent.service



Now the instance launch is successful.