Ask Your Question

Revision history [back]

click to hide/show revision 1
initial version

No valid host was found.

When ever I try to launch an instance it shows no valid host was found.Could you please, help to resolve this issue?

from nova-compute.log it says total allocated vcpus=0 or it just info log indicating that no instance launched.I dont understand this log and others as well

2019-04-02 12:08:26.616 2132 INFO nova.compute.resource_tracker [req-d8f3c0e3-fa06-4d04-92e9-1455bfd7bc8d - - - - -] Compute_service record updated for compute1:compute1 2019-04-02 12:09:27.495 2132 INFO nova.compute.resource_tracker [req-d8f3c0e3-fa06-4d04-92e9-1455bfd7bc8d - - - - -] Auditing locally available compute resources for node compute1 2019-04-02 12:09:27.587 2132 INFO nova.compute.resource_tracker [req-d8f3c0e3-fa06-4d04-92e9-1455bfd7bc8d - - - - -] Total usable vcpus: 3, total allocated vcpus: 0 2019-04-02 12:09:27.588 2132 INFO nova.compute.resource_tracker [req-d8f3c0e3-fa06-4d04-92e9-1455bfd7bc8d - - - - -] Final resource view: name=compute1 phys_ram=5919MB used_ram=512MB phys_disk=47GB used_disk=0GB total_vcpus=3 used_vcpus=0 pci

root@controller:~# openstack compute service list +----+------------------+------------+----------+---------+-------+----------------------------+ | ID | Binary | Host | Zone | Status | State | Updated At | +----+------------------+------------+----------+---------+-------+----------------------------+ | 1 | nova-scheduler | controller | internal | enabled | up | 2019-04-02T10:03:16.000000 | | 5 | nova-consoleauth | controller | internal | enabled | up | 2019-04-02T10:03:16.000000 | | 6 | nova-conductor | controller | internal | enabled | up | 2019-04-02T10:03:15.000000 | | 8 | nova-compute | compute1 | nova | enabled | up | 2019-04-02T10:03:15.000000 | +----+------------------+------------+----------+---------+-------+----------------------------+

root@controller:~# openstack network agent list +--------------------------------------+--------------------+------------+-------------------+-------+-------+---------------------------+ | ID | Agent Type | Host | Availability Zone | Alive | State | Binary | +--------------------------------------+--------------------+------------+-------------------+-------+-------+---------------------------+ | 1eea37b4-7d8f-4c3d-99dd-d99535fed931 | Linux bridge agent | compute1 | None | :-) | UP | neutron-linuxbridge-agent | | 23c07fed-83ef-48af-9d9a-0f9d5396e1de | Metadata agent | controller | None | :-) | UP | neutron-metadata-agent | | 473be661-d3b9-4888-9dd6-a0da4045aea2 | Linux bridge agent | controller | None | :-) | UP | neutron-linuxbridge-agent | | 7193f0da-693c-444f-a5a3-90ed5bca4c6a | L3 agent | controller | nova | :-) | UP | neutron-l3-agent | | e882fe7b-7852-48b1-a307-e5b803d1d77f | DHCP agent | controller | nova | :-) | UP | neutron-dhcp-agent | +--------------------------------------+--------------------+------------+-------------------+-------+-------+---------------------------+

root@controller:~# openstack hypervisor list +----+---------------------+-----------------+-------------+-------+ | ID | Hypervisor Hostname | Hypervisor Type | Host IP | State | +----+---------------------+-----------------+-------------+-------+ | 1 | compute1 | QEMU | 10.10.10.53 | up | +----+---------------------+-----------------+-------------+-------+

root@controller:~# grep -v -e "^#" -e "^$" /etc/nova/nova.conf [DEFAULT] my_ip = 10.10.10.51 use_neutron = true firewall_driver = nova.virt.firewall.NoopFirewallDriver transport_url = rabbit://openstack:stack@controller lock_path = /var/lock/nova state_path = /var/lib/nova [api] auth_strategy = keystone [api_database] connection = mysql+pymysql://nova:stack@controller/nova_api [barbican] [cache] [cells] enable = False [cinder] [compute] [conductor] [console] [consoleauth] [cors] [crypto] [database] connection = mysql+pymysql://nova:stack@controller/nova [devices] [ephemeral_storage_encryption] [filter_scheduler] [glance] api_servers = http://controller:9292 [guestfs] [healthcheck] [hyperv] [ironic] [key_manager] [keystone] [placement_database] connection = mysql+pymysql://placement:stack@controller/placement [keystone_authtoken] auth_url = http://controller:5000/v3 memcached_servers = controller:11211 auth_type = password project_domain_name = default user_domain_name = default project_name = service username = nova password = stack [libvirt] [matchmaker_redis] [metrics] [mks] [neutron] url = http://controller:9696 auth_url = http://controller:5000 auth_type = password project_domain_name = default user_domain_name = default region_name = RegionOne project_name = service username = neutron password = stack service_metadata_proxy = true metadata_proxy_shared_secret = stack [notifications] [osapi_v21] [oslo_concurrency] lock_path = /var/lib/nova/tmp [oslo_messaging_amqp] [oslo_messaging_kafka] [oslo_messaging_notifications] [oslo_messaging_rabbit] [oslo_messaging_zmq] [oslo_middleware] [oslo_policy] [pci] [placement] region_name = RegionOne project_domain_name = Default project_name = service auth_type = password user_domain_name = Default auth_url = http://controller:5000/v3 username = placement password = stack [quota] [rdp] [remote_debug] [scheduler] discover_hosts_in_cells_interval = 300 [serial_console] [service_user] [spice] [upgrade_levels] [vault] [vendordata_dynamic_auth] [vmware] [vnc] enabled = true server_listen = $my_ip server_proxyclient_address = $my_ip [workarounds] [wsgi] [xenserver] [xvp]

root@controller:~# grep -v -e "^#" -e "^$" /etc/neutron/neutron.conf [DEFAULT] core_plugin = ml2 service_plugins = router allow_overlapping_ips = true transport_url = rabbit://openstack:stack@controller auth_strategy = keystone notify_nova_on_port_status_changes = true notify_nova_on_port_data_changes = true [agent] root_helper = "sudo /usr/bin/neutron-rootwrap /etc/neutron/rootwrap.conf" [cors] [database] connection = mysql+pymysql://neutron:stack@controller/neutron [keystone_authtoken] auth_uri = http://controller:5000 auth_url = http://controller:5000 memcached_servers = controller:11211 auth_type = password project_domain_name = default user_domain_name = default project_name = service username = neutron password = stack [matchmaker_redis] [nova] auth_url = http://controller:5000 auth_type = password project_domain_name = default user_domain_name = default region_name = RegionOne project_name = service username = nova password = stack [oslo_concurrency] lock_path = /var/lib/neutron/tmp [oslo_messaging_amqp] [oslo_messaging_kafka] [oslo_messaging_notifications] [oslo_messaging_rabbit] [oslo_messaging_zmq] [oslo_middleware] [oslo_policy] [quotas] [ssl]

nova-api.log

2019-04-02 10:33:43.269 2875 WARNING oslo_config.cfg [req-dbba5751-74f1-49a0-bcf0-8f564f0a214a - - - - -] Option "enable" from group "cells" is deprecated for removal (Cells v1 is being replaced with Cells v2.). Its value may be silently ignored in the future. 2019-04-02 10:33:43.515 2875 WARNING oslo_config.cfg [req-dbba5751-74f1-49a0-bcf0-8f564f0a214a - - - - -] Option "use_neutron" from group "DEFAULT" is deprecated for removal ( nova-network is deprecated, as are any related configuration options. ). Its value may be silently ignored in the future. 2019-04-02 10:33:44.202 2875 WARNING keystonemiddleware._common.config [req-dbba5751-74f1-49a0-bcf0-8f564f0a214a - - - - -] The option "__file__" in conf is not known to auth_token 2019-04-02 10:33:44.203 2875 WARNING keystonemiddleware._common.config [req-dbba5751-74f1-49a0-bcf0-8f564f0a214a - - - - -] The option "here" in conf is not known to auth_token 2019-04-02 10:33:44.204 2875 WARNING oslo_config.cfg [req-dbba5751-74f1-49a0-bcf0-8f564f0a214a - - - - -] Option "firewall_driver" from group "DEFAULT" is deprecated for removal ( nova-network is deprecated, as are any related configuration options. ). Its value may be silently ignored in the future. 2019-04-02 10:33:44.206 2875 WARNING keystonemiddleware.auth_token [req-dbba5751-74f1-49a0-bcf0-8f564f0a214a - - - - -] AuthToken middleware is set with keystone_authtoken.service_token_roles_required set to False. This is backwards compatible but deprecated behaviour. Please set this to True. 2019-04-02 10:33:44.209 2875 WARNING keystonemiddleware.auth_token [req-dbba5751-74f1-49a0-bcf0-8f564f0a214a - - - - -] Configuring www_authenticate_uri to point to the public identity endpoint is required; clients may not be able to authenticate against an admin endpoint 2019-04-02 10:33:44.877 2875 WARNING keystonemiddleware._common.config [req-dbba5751-74f1-49a0-bcf0-8f564f0a214a - - - - -] The option "__file__" in conf is not known to auth_token 2019-04-02 10:33:44.877 2875 WARNING keystonemiddleware._common.config [req-dbba5751-74f1-49a0-bcf0-8f564f0a214a - - - - -] The option "here" in conf is not known to auth_token 2019-04-02 10:33:44.882 2875 WARNING keystonemiddleware.auth_token [req-dbba5751-74f1-49a0-bcf0-8f564f0a214a - - - - -] AuthToken middleware is set with keystone_authtoken.service_token_roles_required set to False. This is backwards compatible but deprecated behaviour. Please set this to True. 2019-04-02 10:33:44.884 2875 WARNING keystonemiddleware.auth_token [req-dbba5751-74f1-49a0-bcf0-8f564f0a214a - - - - -] Configuring www_authenticate_uri to point to the public identity endpoint is required; clients may not be able to authenticate against an admin endpoint

cat nova-novncproxy.log

2019-04-02 09:54:05.720 3131 WARNING oslo_config.cfg [req-a02335a2-385f-4be6-b2e7-c7029002889b - - - - -] Option "enable" from group "cells" is deprecated for removal (Cells v1 is being replaced with Cells v2.). Its value may be silently ignored in the future.

nova-conductor.log 2019-04-02 10:12:26.820 3484 ERROR nova.conductor.manager [req-9dc860bd-a446-4db8-8025-a993d6cef41d 3a067873f9604019862625c317b2f8bc 795dc91461a74adaa377f2d77176dafb - default default] Failed to schedule instances: NoValidHost_Remote: No valid host was found.

nova-consoleauth.log

2019-04-02 09:40:21.994 3570 WARNING oslo_config.cfg [req-12110410-976b-4693-9a28-578e87f2852a - - - - -] Option "url" from group "neutron" is deprecated for removal (Endpoint lookup uses the service catalog via common keystoneauth1 Adapter configuration options. In the current release, "url" will override this behavior, but will be ignored and/or removed in a future release. To achieve the same result, use the endpoint_override option instead.). Its value may be silently ignored in the future. 2019-04-02 09:40:22.007 3570 WARNING oslo_config.cfg [req-12110410-976b-4693-9a28-578e87f2852a - - - - -] Option "enable" from group "cells" is deprecated for removal (Cells v1 is being replaced with Cells v2.). Its value may be silently ignored in the future. 2019-04-02 09:40:22.010 3570 INFO nova.service [-] Starting consoleauth node (version 17.0.7) 2019-04-02 09:54:03.972 3142 WARNING oslo_config.cfg [req-88db6d74-e576-4bd5-99ae-fcc96c83e35c - - - - -] Option "firewall_driver" from group "DEFAULT" is deprecated for removal ( nova-network is deprecated, as are any related configuration options. ). Its value may be silently ignored in the future. 2019-04-02 09:54:04.475 3142 WARNING oslo_config.cfg [req-88db6d74-e576-4bd5-99ae-fcc96c83e35c - - - - -] Option "use_neutron" from group "DEFAULT" is deprecated for removal ( nova-network is deprecated, as are any related configuration options. ). Its value may be silently ignored in the future. 2019-04-02 09:54:04.481 3142 WARNING oslo_config.cfg [req-88db6d74-e576-4bd5-99ae-fcc96c83e35c - - - - -] Option "url" from group "neutron" is deprecated for removal (Endpoint lookup uses the service catalog via common keystoneauth1 Adapter configuration options. In the current release, "url" will override this behavior, but will be ignored and/or removed in a future release. To achieve the same result, use the endpoint_override option instead.). Its value may be silently ignored in the future. 2019-04-02 09:54:04.502 3142 WARNING oslo_config.cfg [req-88db6d74-e576-4bd5-99ae-fcc96c83e35c - - - - -] Option "enable" from group "cells" is deprecated for removal (Cells v1 is being replaced with Cells v2.). Its value may be silently ignored in the future. 2019-04-02 09:54:04.504 3142 INFO nova.service [-] Starting consoleauth node (version 17.0.7)

nova-scheduler.log

2019-04-02 09:54:05.581 3131 WARNING oslo_config.cfg [req-a02335a2-385f-4be6-b2e7-c7029002889b - - - - -] Option "firewall_driver" from group "DEFAULT" is deprecated for removal ( nova-network is deprecated, as are any related configuration options. ). Its value may be silently ignored in the future. 2019-04-02 09:54:05.703 3131 WARNING oslo_config.cfg [req-a02335a2-385f-4be6-b2e7-c7029002889b - - - - -] Option "use_neutron" from group "DEFAULT" is deprecated for removal ( nova-network is deprecated, as are any related configuration options. ). Its value may be silently ignored in the future. 2019-04-02 09:54:05.712 3131 WARNING oslo_config.cfg [req-a02335a2-385f-4be6-b2e7-c7029002889b - - - - -] Option "url" from group "neutron" is deprecated for removal (Endpoint lookup uses the service catalog via common keystoneauth1 Adapter configuration options. In the current release, "url" will override this behavior, but will be ignored and/or removed in a future release. To achieve the same result, use the endpoint_override option instead.). Its value may be silently ignored in the future. 2019-04-02 09:54:05.720 3131 WARNING oslo_config.cfg [req-a02335a2-385f-4be6-b2e7-c7029002889b - - - - -] Option "enable" from group "cells" is deprecated for removal (Cells v1 is being replaced with Cells v2.). Its value may be silently ignored in the future.

rabbit@controller.log

=WARNING REPORT==== 2-Apr-2019::10:10:10 === closing AMQP connection <0.1462.0> (127.0.0.1:60004 -> 127.0.1.1:5672 - nova-conductor:5273:47ac3fb8-1365-4af4-90d8-544d1dc12558, vhost: '/', user: 'openstack'): client unexpectedly closed TCP connection

=WARNING REPORT==== 2-Apr-2019::10:10:10 === closing AMQP connection <0.1459.0> (127.0.0.1:60002 -> 127.0.1.1:5672 - nova-conductor:5272:e94cfc87-3ea5-47f3-b53f-ae2485f8eaef, vhost: '/', user: 'openstack'): client unexpectedly closed TCP connection

rabbit@controller-sasl.log

=SUPERVISOR REPORT==== 2-Apr-2019::09:52:32 === Supervisor: {<0.827.0>,rabbit_channel_sup_sup} Context: shutdown_error Reason: shutdown Offender: [{nb_children,1}, {name,channel_sup}, {mfargs,{rabbit_channel_sup,start_link,[]}}, {restart_type,temporary}, {shutdown,infinity}, {child_type,supervisor}]

neutron-dhcp-agent.log

2019-04-02 10:31:01.876 3135 ERROR neutron.agent.linux.external_process [-] dnsmasq for dhcp with uuid 3c2ce37a-0c41-4aed-89de-6d40acd4bb92 not found. The process should not have died 2019-04-02 10:31:01.877 3135 WARNING neutron.agent.linux.external_process [-] Respawning dnsmasq for uuid 3c2ce37a-0c41-4aed-89de-6d40acd4bb92 2019-04-02 10:31:02.053 3135 ERROR neutron.agent.linux.external_process [-] metadata-proxy for dhcp with uuid aea652ef-d66c-457d-bbd5-f61d29f1bd79 not found. The process should not have died 2019-04-02 10:31:02.054 3135 WARNING neutron.agent.linux.external_process [-] Respawning metadata-proxy for uuid aea652ef-d66c-457d-bbd5-f61d29f1bd79 2019-04-02 10:31:02.215 3135 ERROR neutron.agent.linux.external_process [-] dnsmasq for dhcp with uuid aea652ef-d66c-457d-bbd5-f61d29f1bd79 not found. The process should not have died 2019-04-02 10:31:02.215 3135 WARNING neutron.agent.linux.external_process [-] Respawning dnsmasq for uuid aea652ef-d66c-457d-bbd5-f61d29f1bd79 2019-04-02 10:31:02.381 3135 ERROR neutron.agent.linux.external_process [-] dnsmasq for dhcp with uuid 43307e64-b652-43c0-8b54-0056739bc7ce not found. The process should not have died 2019-04-02 10:31:02.381 3135 WARNING neutron.agent.linux.external_process [-] Respawning dnsmasq for uuid 43307e64-b652-43c0-8b54-0056739bc7ce 2019-04-02 10:31:02.540 3135 ERROR neutron.agent.linux.external_process [-] metadata-proxy for dhcp with uuid 43307e64-b652-43c0-8b54-0056739bc7ce not found. The process should not have died 2019-04-02 10:31:02.540 3135 WARNING neutron.agent.linux.external_process [-] Respawning metadata-proxy for uuid 43307e64-b652-43c0-8b54-0056739bc7ce 2019-04-02 10:31:02.681 3135 ERROR neutron.agent.linux.external_process [-] metadata-proxy for dhcp with uuid 3c2ce37a-0c41-4aed-89de-6d40acd4bb92 not found. The process should not have died 2019-04-02 10:31:02.681 3135 WARNING neutron.agent.linux.external_process [-] Respawning metadata-proxy for uuid 3c2ce37a-0c41-4aed-89de-6d40acd4bb92 2019-04-02 10:33:33.794 2867 INFO neutron.common.config [-] Logging enabled! 2019-04-02 10:33:33.880 2867 INFO neutron.common.config [-] /usr/bin/neutron-dhcp-agent version 12.0.5 2019-04-02 10:33:37.415 2867 INFO neutron.agent.dhcp.agent [-] Synchronizing state 2019-04-02 10:34:37.481 2867 ERROR neutron.agent.dhcp.agent [req-5eafc5cd-19ec-4507-9777-a9583d0f9e27 - - - - -] Failed reporting state!: MessagingTimeout: Timed out waiting for a reply to message ID 1fe11419358d4ef788e22192ecdbb35c 2019-04-02 10:34:37.481 2867 ERROR neutron.agent.dhcp.agent Traceback (most recent call last): 2019-04-02 10:34:37.481 2867 ERROR neutron.agent.dhcp.agent File "/usr/lib/python2.7/dist-packages/neutron/agent/dhcp/agent.py", line 762, in _report_state 2019-04-02 10:34:37.481 2867 ERROR neutron.agent.dhcp.agent ctx, self.agent_state, True) 2019-04-02 10:34:37.481 2867 ERROR neutron.agent.dhcp.agent File "/usr/lib/python2.7/dist-packages/neutron/agent/rpc.py", line 93, in report_state 2019-04-02 10:34:37.481 2867 ERROR neutron.agent.dhcp.agent return method(context, 'report_state', **kwargs) 2019-04-02 10:34:37.481 2867 ERROR neutron.agent.dhcp.agent File "/usr/lib/python2.7/dist-packages/oslo_messaging/rpc/client.py", line 174, in call 2019-04-02 10:34:37.481 2867 ERROR neutron.agent.dhcp.agent retry=self.retry) 2019-04-02 10:34:37.481 2867 ERROR neutron.agent.dhcp.agent File "/usr/lib/python2.7/dist-packages/oslo_messaging/transport.py", line 131, in _send 2019-04-02 10:34:37.481 2867 ERROR neutron.agent.dhcp.agent timeout=timeout, retry=retry) 2019-04-02 10:34:37.481 2867 ERROR neutron.agent.dhcp.agent File "/usr/lib/python2.7/dist-packages/oslo_messaging/_drivers/amqpdriver.py", line 559, in send 2019-04-02 10:34:37.481 2867 ERROR neutron.agent.dhcp.agent retry=retry) 2019-04-02 10:34:37.481 2867 ERROR neutron.agent.dhcp.agent File "/usr/lib/python2.7/dist-packages/oslo_messaging/_drivers/amqpdriver.py", line 548, in _send 2019-04-02 10:34:37.481 2867 ERROR neutron.agent.dhcp.agent result = self._waiter.wait(msg_id, timeout) 2019-04-02 10:34:37.481 2867 ERROR neutron.agent.dhcp.agent File "/usr/lib/python2.7/dist-packages/oslo_messaging/_drivers/amqpdriver.py", line 440, in wait 2019-04-02 10:34:37.481 2867 ERROR neutron.agent.dhcp.agent message = self.waiters.get(msg_id, timeout=timeout) 2019-04-02 10:34:37.481 2867 ERROR neutron.agent.dhcp.agent File "/usr/lib/python2.7/dist-packages/oslo_messaging/_drivers/amqpdriver.py", line 328, in get 2019-04-02 10:34:37.481 2867 ERROR neutron.agent.dhcp.agent 'to message ID %s' % msg_id) 2019-04-02 10:34:37.481 2867 ERROR neutron.agent.dhcp.agent MessagingTimeout: Timed out waiting for a reply to message ID 1fe11419358d4ef788e22192ecdbb35c 2019-04-02 10:34:37.481 2867 ERROR neutron.agent.dhcp.agent 2019-04-02 10:34:38.784 2867 WARNING oslo.service.loopingcall [req-5eafc5cd-19ec-4507-9777-a9583d0f9e27 - - - - -] Function 'neutron.agent.dhcp.agent.DhcpAgentWithStateReport._report_state' run outlasted interval by 31.57 sec 2019-04-02 10:34:38.785 2867 ERROR neutron.common.rpc [req-b571f0fc-bd50-40ad-905c-68d1e9dbe795 - - - - -] Timeout in RPC method get_active_networks_info. Waiting for 29 seconds before next attempt. If the server is not down, consider increasing the rpc_response_timeout option as Neutron server(s) may be overloaded and unable to respond quickly enough.: MessagingTimeout: Timed out waiting for a reply to message ID 7818b274383541b1b29b6f8cce1aa3f3 2019-04-02 10:34:38.786 2867 WARNING neutron.common.rpc [req-b571f0fc-bd50-40ad-905c-68d1e9dbe795 - - - - -] Increasing timeout for get_active_networks_info calls to 120 seconds. Restart the agent to restore it to the default value.: MessagingTimeout: Timed out waiting for a reply to message ID 7818b274383541b1b29b6f8cce1aa3f3

neutron-l3-agent.log

2019-04-02 10:34:37.815 2850 ERROR neutron.common.rpc [req-f7d324bd-71bf-4034-8383-ae39d91e9309 - - - - -] Timeout in RPC method get_service_plugin_list. Waiting for 50 seconds before next attempt. If the server is not down, consider increasing the rpc_response_timeout option as Neutron server(s) may be overloaded and unable to respond quickly enough.: MessagingTimeout: Timed out waiting for a reply to message ID f3ada601dc9e488e9630f0d58d1e6647 2019-04-02 10:34:37.817 2850 WARNING neutron.common.rpc [req-f7d324bd-71bf-4034-8383-ae39d91e9309 - - - - -] Increasing timeout for get_service_plugin_list calls to 120 seconds. Restart the agent to restore it to the default value.: MessagingTimeout: Timed out waiting for a reply to message ID f3ada601dc9e488e9630f0d58d1e6647 2019-04-02 10:35:27.831 2850 WARNING neutron.agent.l3.agent [req-f7d324bd-71bf-4034-8383-ae39d91e9309 - - - - -] l3-agent cannot contact neutron server to retrieve service plugins enabled. Check connectivity to neutron server. Retrying... Detailed message: Timed out waiting for a reply to message ID f3ada601dc9e488e9630f0d58d1e6647.: MessagingTimeout: Timed out waiting for a reply to message ID f3ada601dc9e488e9630f0d58d1e6647

neutron-linuxbridge-agent.log

2019-04-02 10:33:41.021 2944 ERROR neutron.agent.linux.utils [-] Exit code: 2; Stdin: ; Stdout: ; Stderr: vxlan: destination port not specified Will use Linux kernel default (non-standard value) Use 'dstport 4789' to get the IANA assigned value Use 'dstport 0' to get default and quiet this message RTNETLINK answers: File exists

2019-04-02 10:33:41.064 2944 ERROR neutron.plugins.ml2.drivers.linuxbridge.agent.linuxbridge_neutron_agent [-] Unable to create VXLAN interface for VNI 1 because it is in use by another interface.: ProcessExecutionError: Exit code: 2; Stdin: ; Stdout: ; Stderr: vxlan: destination port not specified 2019-04-02 10:33:41.263 2944 INFO neutron.plugins.ml2.drivers.linuxbridge.agent.linuxbridge_neutron_agent [-] Agent initialized successfully, now running... 2019-04-02 10:33:41.303 2944 INFO neutron.plugins.ml2.drivers.agent._common_agent [req-e3c7aa55-4185-40c4-bbcf-0383bfef72db - - - - -] RPC agent_id: lb0800277c7c42 2019-04-02 10:33:41.309 2944 INFO neutron.agent.agent_extensions_manager [req-e3c7aa55-4185-40c4-bbcf-0383bfef72db - - - - -] Loaded agent extensions: [] 2019-04-02 10:33:41.517 2944 INFO neutron.plugins.ml2.drivers.agent._common_agent [req-e3c7aa55-4185-40c4-bbcf-0383bfef72db - - - - -] Linux bridge agent Agent RPC Daemon Started! 2019-04-02 10:33:41.518 2944 INFO neutron.plugins.ml2.drivers.agent._common_agent [req-e3c7aa55-4185-40c4-bbcf-0383bfef72db - - - - -] Linux bridge agent Agent out of sync with plugin! 2019-04-02 10:33:42.249 2944 INFO neutron.plugins.ml2.drivers.linuxbridge.agent.arp_protect [req-e3c7aa55-4185-40c4-bbcf-0383bfef72db - - - - -] Clearing orphaned ARP spoofing entries for devices [] 2019-04-02 10:33:42.406 2944 INFO neutron.plugins.ml2.drivers.linuxbridge.agent.arp_protect [req-e3c7aa55-4185-40c4-bbcf-0383bfef72db - - - - -] Clearing orphaned ARP spoofing entries for devices [] 2019-04-02 10:34:41.387 2944 ERROR neutron.plugins.ml2.drivers.agent._common_agent [-] Failed reporting state!: MessagingTimeout: Timed out waiting for a reply to message ID 702ec59b12f24d8da4a249cf40795078 2019-04-02 10:34:41.387 2944 ERROR neutron.plugins.ml2.drivers.agent._common_agent Traceback (most recent call last): 2019-04-02 10:34:41.387 2944 ERROR neutron.plugins.ml2.drivers.agent._common_agent File "/usr/lib/python2.7/dist-packages/neutron/plugins/ml2/drivers/agent/_common_agent.py", line 128, in _report_state 2019-04-02 10:34:41.387 2944 ERROR neutron.plugins.ml2.drivers.agent._common_agent True) 2019-04-02 10:34:41.387 2944 ERROR neutron.plugins.ml2.drivers.agent._common_agent File "/usr/lib/python2.7/dist-packages/neutron/agent/rpc.py", line 93, in report_state 2019-04-02 10:34:41.387 2944 ERROR neutron.plugins.ml2.drivers.agent._common_agent return method(context, 'report_state', **kwargs) 2019-04-02 10:34:41.387 2944 ERROR neutron.plugins.ml2.drivers.agent._common_agent File "/usr/lib/python2.7/dist-packages/oslo_messaging/rpc/client.py", line 174, in call 2019-04-02 10:34:41.387 2944 ERROR neutron.plugins.ml2.drivers.agent._common_agent retry=self.retry) 2019-04-02 10:34:41.387 2944 ERROR neutron.plugins.ml2.drivers.agent._common_agent File "/usr/lib/python2.7/dist-packages/oslo_messaging/transport.py", line 131, in _send 2019-04-02 10:34:41.387 2944 ERROR neutron.plugins.ml2.drivers.agent._common_agent timeout=timeout, retry=retry) 2019-04-02 10:34:41.387 2944 ERROR neutron.plugins.ml2.drivers.agent._common_agent File "/usr/lib/python2.7/dist-packages/oslo_messaging/_drivers/amqpdriver.py", line 559, in send 2019-04-02 10:34:41.387 2944 ERROR neutron.plugins.ml2.drivers.agent._common_agent retry=retry) 2019-04-02 10:34:41.387 2944 ERROR neutron.plugins.ml2.drivers.agent._common_agent File "/usr/lib/python2.7/dist-packages/oslo_messaging/_drivers/amqpdriver.py", line 548, in _send 2019-04-02 10:34:41.387 2944 ERROR neutron.plugins.ml2.drivers.agent._common_agent result = self._waiter.wait(msg_id, timeout) 2019-04-02 10:34:41.387 2944 ERROR neutron.plugins.ml2.drivers.agent._common_agent File "/usr/lib/python2.7/dist-packages/oslo_messaging/_drivers/amqpdriver.py", line 440, in wait 2019-04-02 10:34:41.387 2944 ERROR neutron.plugins.ml2.drivers.agent._common_agent message = self.waiters.get(msg_id, timeout=timeout) 2019-04-02 10:34:41.387 2944 ERROR neutron.plugins.ml2.drivers.agent._common_agent File "/usr/lib/python2.7/dist-packages/oslo_messaging/_drivers/amqpdriver.py", line 328, in get 2019-04-02 10:34:41.387 2944 ERROR neutron.plugins.ml2.drivers.agent._common_agent 'to message ID %s' % msg_id) 2019-04-02 10:34:41.387 2944 ERROR neutron.plugins.ml2.drivers.agent._common_agent MessagingTimeout: Timed out waiting for a reply to message ID 702ec59b12f24d8da4a249cf40795078 2019-04-02 10:34:41.387 2944 ERROR neutron.plugins.ml2.drivers.agent._common_agent 2019-04-02 10:34:41.421 2944 WARNING oslo.service.loopingcall [-] Function 'neutron.plugins.ml2.drivers.agent._common_agent.CommonAgentLoop._report_state' run outlasted interval by 30.10 sec

neutron-linuxbridge-cleanup.log

2019-04-01 10:14:58.238 1175 ERROR neutron.plugins.ml2.drivers.linuxbridge.agent.linuxbridge_neutron_agent [-] Unable to create VXLAN interface for VNI 1 because it is in use by another interface.: ProcessExecutionError: Exit code: 2; Stdin: ; Stdout: ; Stderr: vxlan: destination port not specified