Revision history [back]

click to hide/show revision 1
initial version

not able to create volume from image with ceph backend after update from icehouse to juno

Hi,

I am currently running a openstack setup with a ceph backend for glance and cinder. 
on icehouse it was working ok. I updated the setup to Juno and
now I am not able to create volume from image anymore. The Volume status says error, However when I look at ceph with rbd there is a volume present.

In the cinder volume log I can see ImageCopyFailure: Failed to copy image to volume: HTTPInternalServerError (HTTP 500)

Anybody any idea?


BR,
Jeroen



phoenix:/etc/cinder # glance image-list
+--------------------------------------+-------------------------+-------------+------------------+----------+--------+
| ID                                   | Name                    | Disk Format | Container Format | Size     | Status |
+--------------------------------------+-------------------------+-------------+------------------+----------+--------+
| b7fff5f9-d63d-4729-ac1e-b8831a828e26 | cirros-0.3.3-x86_64     | qcow2       | bare             | 13200896 | active |
| 7d9eef7f-15f1-40e3-9e75-593d94a8571d | raw cirros-0.3.3-x86_64 | qcow2       | bare             | 41126400 | active |
+--------------------------------------+-------------------------+-------------+------------------+----------+--------+

phoenix:/etc/cinder # rbd ls images
7d9eef7f-15f1-40e3-9e75-593d94a8571d
b7fff5f9-d63d-4729-ac1e-b8831a828e26


phoenix:/etc/cinder # cinder create 1 --display-name vol_fail_test --image-id b7fff5f9-d63d-4729-ac1e-b8831a828e26 --availability-zone nova
+---------------------+--------------------------------------+
|       Property      |                Value                 |
+---------------------+--------------------------------------+
|     attachments     |                  []                  |
|  availability_zone  |                 nova                 |
|       bootable      |                false                 |
|      created_at     |      2014-10-23T14:11:59.820165      |
| display_description |                 None                 |
|     display_name    |            vol_fail_test             |
|      encrypted      |                False                 |
|          id         | 52df23c7-b988-4f8c-9754-deab0a349887 |
|       image_id      | b7fff5f9-d63d-4729-ac1e-b8831a828e26 |
|       metadata      |                  {}                  |
|         size        |                  1                   |
|     snapshot_id     |                 None                 |
|     source_volid    |                 None                 |
|        status       |               creating               |
|     volume_type     |                 None                 |
+---------------------+--------------------------------------+
phoenix:/etc/cinder # cinder list
+--------------------------------------+--------+---------------------+------+-------------+----------+-------------+
|                  ID                  | Status |     Display Name    | Size | Volume Type | Bootable | Attached to |
+--------------------------------------+--------+---------------------+------+-------------+----------+-------------+
| 52df23c7-b988-4f8c-9754-deab0a349887 | error  |    vol_fail_test    |  1   |     None    |  false   |             |
| 628bf569-7654-4b49-aec5-b02ab49dd522 | error  | cirros-0.3.3-x86_64 |  1   |     None    |  false   |             |
+--------------------------------------+--------+---------------------+------+-------------+----------+-------------+
phoenix:/etc/cinder # rbd ls volumes
volume-52df23c7-b988-4f8c-9754-deab0a349887
volume-628bf569-7654-4b49-aec5-b02ab49dd522

grep "^[a-zA-Z[]" cinder.conf

[DEFAULT]
verbose = True
log_dir = /var/log/cinder
auth_strategy = keystone
rootwrap_config = /etc/cinder/rootwrap.conf
state_path = /var/lib/cinder
lock_path = /var/run/cinder
rabbit_host=phoenix
rabbit_userid=guest
rabbit_password=passw
rpc_backend=rabbit
backup_driver=cinder.backup.drivers.ceph
backup_ceph_conf=/etc/ceph/ceph.conf
backup_ceph_user=cinder-backup
backup_ceph_chunk_size=134217728
backup_ceph_pool=backups
backup_ceph_stripe_unit=0
backup_ceph_stripe_count=0
restore_discard_excess_bytes=true
my_ip=172.16.98.3
glance_host=phoenix
debug=True
volume_driver = cinder.volume.drivers.rbd.RBDDriver
rbd_pool = volumes
glance_api_version = 2
rbd_ceph_conf = /etc/ceph/ceph.conf
rbd_user = volumes
rbd_secret_uuid=df556fc6-8137-4bc0-b54a-1cd9be3df459
volume_tmp_dir = /tmp
glance_host = phoenix
[BRCD_FABRIC_EXAMPLE]
[CISCO_FABRIC_EXAMPLE]
[database]
connection = mysql://cinder:passw@phoenix/cinder
[fc-zone-manager]
[keymgr]
[keystone_authtoken]
signing_dir = /var/cache/cinder/keystone-signing
auth_uri = http://phoenix/:5000/v2.0
identity_uri = http://phoenix:35357
admin_tenant_name = service
admin_user = cinder
admin_password = cinder
[matchmaker_redis]
[matchmaker_ring]
[profiler]
[ssl]



2014-10-23 08:26:45.530 29913 DEBUG glanceclient.common.http [req-9e6baed2-693a-4506-9dcc-f31dd65f9ef7 8ca4594188154742b89c49d12efcde7e 0c7c025a9519414992a40ab9af9a18e0 - - -] curl -i -X GET -H 'User-Agent: python-glanceclient' -H 'Content-Type: application/octet-stream' -H 'Accept-Encoding: gzip, deflate' -H 'Accept: */*' -H 'X-Auth-Token: {SHA1}8d5b9794e7a32fe15b6698bfa91a3f2ce38e46f6' http://phoenix:9292/v2/schemas/image log_curl_request /usr/lib/python2.7/site-packages/glanceclient/common/http.py:118
2014-10-23 08:26:45.542 29913 DEBUG glanceclient.common.http [req-9e6baed2-693a-4506-9dcc-f31dd65f9ef7 8ca4594188154742b89c49d12efcde7e 0c7c025a9519414992a40ab9af9a18e0 - - -]
HTTP/1.1 200 OK
date: Thu, 23 Oct 2014 06:26:45 GMT
content-length: 3785
content-type: application/json; charset=UTF-8
x-openstack-request-id: req-f519ca6c-4328-470d-b552-9250c1f2e028
{"additionalProperties": {"type": "string"}, "name": "image", "links": [{"href": "{self}", "rel": "self"}, {"href": "{file}", "rel": "enclosure"}, {"href": "{schema}", "rel": "describedby"}], "properties": {"status": {"enum": ["queued", "saving", "active", "killed", "deleted", "pending_delete"], "type": "string", "description": "Status of the image (READ-ONLY)"}, "tags": {"items": {"type": "string", "maxLength": 255}, "type": "array", "description": "List of strings related to the image"}, "kernel_id": {"pattern": "^([0-9a-fA-F]){8}-([0-9a-fA-F]){4}-([0-9a-fA-F]){4}-([0-9a-fA-F]){4}-([0-9a-fA-F]){12}$", "type": "string", "description": "ID of image stored in Glance that should be used as the kernel when booting an AMI-style image.", "is_base": false}, "container_format": {"enum": ["ami", "ari", "aki", "bare", "ovf", "ova"], "type": "string", "description": "Format of the container"}, "min_ram": {"type": "integer", "description": "Amount of ram (in MB) required to boot image."}, "ramdisk_id": {"pattern": "^([0-9a-fA-F]){8}-([0-9a-fA-F]){4}-([0-9a-fA-F]){4}-([0-9a-fA-F]){4}-([0-9a-fA-F]){12}$", "type": "string", "description": "ID of image stored in Glance that should be used as the ramdisk when booting an AMI-style image.", "is_base": false}, "locations": {"items": {"required": ["url", "metadata"], "type": "object", "properties": {"url": {"type": "string", "maxLength": 255}, "metadata": {"type": "object"}}}, "type": "array", "description": "A set of URLs to access the image file kept in external store"}, "visibility": {"enum": ["public", "private"], "type": "string", "description": "Scope of image accessibility"}, "updated_at": {"type": "string", "description": "Date and time of the last image modification (READ-ONLY)"}, "owner": {"type": "string", "description": "Owner of the image", "maxLength": 255}, "file": {"type": "string", "description": "(READ-ONLY)"}, "min_disk": {"type": "integer", "description": "Amount of disk space (in GB) required to boot image."}, "virtual_size": {"type": "integer", "description": "Virtual size of image in bytes (READ-ONLY)"}, "id": {"pattern": "^([0-9a-fA-F]){8}-([0-9a-fA-F]){4}-([0-9a-fA-F]){4}-([0-9a-fA-F]){4}-([0-9a-fA-F]){12}$", "type": "string", "description": "An identifier for the image"}, "size": {"type": "integer", "description": "Size of image file in bytes (READ-ONLY)"}, "instance_uuid": {"type": "string", "description": "ID of instance used to create this image.", "is_base": false}, "os_distro": {"type": "string", "description": "Common name of operating system distribution as specified in http://docs.openstack.org/trunk/openstack-compute/admin/content/adding-images.html", "is_base": false}, "name": {"type": "string", "description": "Descriptive name for the image", "maxLength": 255}, "checksum": {"type": "string", "description": "md5 hash of image contents. (READ-ONLY)", "maxLength": 32}, "created_at": {"type": "string", "description": "Date and time of image registration (READ-ONLY)"}, "disk_format": {"enum": ["ami", "ari", "aki", "vhd", "vmdk", "raw", "qcow2", "vdi", "iso"], "type": "string", "description": "Format of the disk"}, "os_version": {"type": "string", "description": "Operating system version as specified by the distributor", "is_base": false}, "protected": {"type": "boolean", "description": "If true, image will not be deletable."}, "architecture": {"type": "string", "description": "Operating system architecture as specified in http://docs.openstack.org/trunk/openstack-compute/admin/content/adding-images.html", "is_base": false}, "direct_url": {"type": "string", "description": "URL to access the image file kept in external store (READ-ONLY)"}, "self": {"type": "string", "description": "(READ-ONLY)"}, "schema": {"type": "string", "description": "(READ-ONLY)"}}}
 log_http_response /usr/lib/python2.7/site-packages/glanceclient/common/http.py:131
2014-10-23 08:26:45.558 29913 DEBUG cinder.openstack.common.processutils [req-9e6baed2-693a-4506-9dcc-f31dd65f9ef7 8ca4594188154742b89c49d12efcde7e 0c7c025a9519414992a40ab9af9a18e0 - - -] Running cmd (subprocess): sudo cinder-rootwrap /etc/cinder/rootwrap.conf env LC_ALL=C qemu-img info /var/lib/cinder/conversion/tmp6U4DTk execute /usr/lib/python2.7/site-packages/cinder/openstack/common/processutils.py:158
2014-10-23 08:26:45.665 29913 DEBUG cinder.openstack.common.processutils [req-9e6baed2-693a-4506-9dcc-f31dd65f9ef7 8ca4594188154742b89c49d12efcde7e 0c7c025a9519414992a40ab9af9a18e0 - - -] Result was 0 execute /usr/lib/python2.7/site-packages/cinder/openstack/common/processutils.py:192
2014-10-23 08:26:45.668 29913 DEBUG glanceclient.common.http [req-9e6baed2-693a-4506-9dcc-f31dd65f9ef7 8ca4594188154742b89c49d12efcde7e 0c7c025a9519414992a40ab9af9a18e0 - - -] curl -i -X GET -H 'User-Agent: python-glanceclient' -H 'Content-Type: application/octet-stream' -H 'Accept-Encoding: gzip, deflate' -H 'Accept: */*' -H 'X-Auth-Token: {SHA1}8d5b9794e7a32fe15b6698bfa91a3f2ce38e46f6' http://phoenix:9292/v2/images/434cbaf5-0b1c-45ed-9e62-418e9a374102/file log_curl_request /usr/lib/python2.7/site-packages/glanceclient/common/http.py:118
2014-10-23 08:26:45.775 29913 DEBUG glanceclient.common.http [req-9e6baed2-693a-4506-9dcc-f31dd65f9ef7 8ca4594188154742b89c49d12efcde7e 0c7c025a9519414992a40ab9af9a18e0 - - -] Request returned failure status 500. _request /usr/lib/python2.7/site-packages/glanceclient/common/http.py:218
2014-10-23 08:26:45.777 29913 ERROR cinder.volume.flows.manager.create_volume [req-9e6baed2-693a-4506-9dcc-f31dd65f9ef7 8ca4594188154742b89c49d12efcde7e 0c7c025a9519414992a40ab9af9a18e0 - - -] Failed to copy image 434cbaf5-0b1c-45ed-9e62-418e9a374102 to volume: ea2d3e2f-d388-4ec2-a92f-c6dd07d90906, error: HTTPInternalServerError (HTTP 500)
2014-10-23 08:26:45.778 29913 DEBUG taskflow.engines.action_engine.runner [req-9e6baed2-693a-4506-9dcc-f31dd65f9ef7 8ca4594188154742b89c49d12efcde7e 0c7c025a9519414992a40ab9af9a18e0 - - -] Exiting old state 'SCHEDULING' in response to event 'wait' on_exit /usr/lib/python2.7/site-packages/taskflow/engines/action_engine/runner.py:156
2014-10-23 08:26:45.779 29913 DEBUG taskflow.engines.action_engine.runner [req-9e6baed2-693a-4506-9dcc-f31dd65f9ef7 8ca4594188154742b89c49d12efcde7e 0c7c025a9519414992a40ab9af9a18e0 - - -] Entering new state 'WAITING' in response to event 'wait' on_enter /usr/lib/python2.7/site-packages/taskflow/engines/action_engine/runner.py:160
2014-10-23 08:26:45.779 29913 DEBUG taskflow.engines.action_engine.runner [req-9e6baed2-693a-4506-9dcc-f31dd65f9ef7 8ca4594188154742b89c49d12efcde7e 0c7c025a9519414992a40ab9af9a18e0 - - -] Exiting old state 'WAITING' in response to event 'analyze' on_exit /usr/lib/python2.7/site-packages/taskflow/engines/action_engine/runner.py:156
2014-10-23 08:26:45.780 29913 DEBUG taskflow.engines.action_engine.runner [req-9e6baed2-693a-4506-9dcc-f31dd65f9ef7 8ca4594188154742b89c49d12efcde7e 0c7c025a9519414992a40ab9af9a18e0 - - -] Entering new state 'ANALYZING' in response to event 'analyze' on_enter /usr/lib/python2.7/site-packages/taskflow/engines/action_engine/runner.py:160
2014-10-23 08:26:45.781 29913 WARNING cinder.volume.manager [req-9e6baed2-693a-4506-9dcc-f31dd65f9ef7 8ca4594188154742b89c49d12efcde7e 0c7c025a9519414992a40ab9af9a18e0 - - -] Task 'cinder.volume.flows.manager.create_volume.CreateVolumeFromSpecTask;volume:create' (9afc4293-4c15-48aa-9534-6cafabd2c08b) transitioned into state 'FAILURE'
2014-10-23 08:26:45.781 29913 TRACE cinder.volume.manager Traceback (most recent call last):
2014-10-23 08:26:45.781 29913 TRACE cinder.volume.manager   File "/usr/lib/python2.7/site-packages/taskflow/engines/action_engine/executor.py", line 35, in _execute_task
2014-10-23 08:26:45.781 29913 TRACE cinder.volume.manager     result = task.execute(**arguments)
2014-10-23 08:26:45.781 29913 TRACE cinder.volume.manager   File "/usr/lib/python2.7/site-packages/cinder/volume/flows/manager/create_volume.py", line 638, in execute
2014-10-23 08:26:45.781 29913 TRACE cinder.volume.manager     **volume_spec)
2014-10-23 08:26:45.781 29913 TRACE cinder.volume.manager   File "/usr/lib/python2.7/site-packages/cinder/volume/flows/manager/create_volume.py", line 590, in _create_from_image
2014-10-23 08:26:45.781 29913 TRACE cinder.volume.manager     image_id, image_location, image_service)
2014-10-23 08:26:45.781 29913 TRACE cinder.volume.manager   File "/usr/lib/python2.7/site-packages/cinder/volume/flows/manager/create_volume.py", line 504, in _copy_image_to_volume
2014-10-23 08:26:45.781 29913 TRACE cinder.volume.manager     raise exception.ImageCopyFailure(reason=ex)
2014-10-23 08:26:45.781 29913 TRACE cinder.volume.manager ImageCopyFailure: Failed to copy image to volume: HTTPInternalServerError (HTTP 500)
2014-10-23 08:26:45.781 29913 TRACE cinder.volume.manager

not able to create volume from image with ceph backend after update from icehouse to juno

 Hi,

 I am currently running a openstack setup with a ceph backend for glance and cinder. 
 on icehouse it was working ok. I updated the setup to Juno and
 now I am not able to create volume from image anymore. The Volume status says error, However when I look at ceph with rbd there is a volume present.

 In the cinder volume log I can see ImageCopyFailure: Failed to copy image to volume: HTTPInternalServerError (HTTP 500)

 Anybody any idea?


 BR,
 Jeroen



 phoenix:/etc/cinder # glance image-list
 +--------------------------------------+-------------------------+-------------+------------------+----------+--------+
 | ID                                   | Name                    | Disk Format | Container Format | Size     | Status |
 +--------------------------------------+-------------------------+-------------+------------------+----------+--------+
 | b7fff5f9-d63d-4729-ac1e-b8831a828e26 | cirros-0.3.3-x86_64     | qcow2       | bare             | 13200896 | active |
 | 7d9eef7f-15f1-40e3-9e75-593d94a8571d | raw cirros-0.3.3-x86_64 | qcow2       | bare             | 41126400 | active |
 +--------------------------------------+-------------------------+-------------+------------------+----------+--------+

 phoenix:/etc/cinder # rbd ls images
 7d9eef7f-15f1-40e3-9e75-593d94a8571d
 b7fff5f9-d63d-4729-ac1e-b8831a828e26


 phoenix:/etc/cinder # cinder create 1 --display-name vol_fail_test --image-id b7fff5f9-d63d-4729-ac1e-b8831a828e26 --availability-zone nova
 +---------------------+--------------------------------------+
 |       Property      |                Value                 |
 +---------------------+--------------------------------------+
 |     attachments     |                  []                  |
 |  availability_zone  |                 nova                 |
 |       bootable      |                false                 |
 |      created_at     |      2014-10-23T14:11:59.820165      |
 | display_description |                 None                 |
 |     display_name    |            vol_fail_test             |
 |      encrypted      |                False                 |
 |          id         | 52df23c7-b988-4f8c-9754-deab0a349887 |
 |       image_id      | b7fff5f9-d63d-4729-ac1e-b8831a828e26 |
 |       metadata      |                  {}                  |
 |         size        |                  1                   |
 |     snapshot_id     |                 None                 |
 |     source_volid    |                 None                 |
 |        status       |               creating               |
 |     volume_type     |                 None                 |
 +---------------------+--------------------------------------+
 phoenix:/etc/cinder # cinder list
 +--------------------------------------+--------+---------------------+------+-------------+----------+-------------+
 |                  ID                  | Status |     Display Name    | Size | Volume Type | Bootable | Attached to |
 +--------------------------------------+--------+---------------------+------+-------------+----------+-------------+
 | 52df23c7-b988-4f8c-9754-deab0a349887 | error  |    vol_fail_test    |  1   |     None    |  false   |             |
 | 628bf569-7654-4b49-aec5-b02ab49dd522 | error  | cirros-0.3.3-x86_64 |  1   |     None    |  false   |             |
 +--------------------------------------+--------+---------------------+------+-------------+----------+-------------+
 phoenix:/etc/cinder # rbd ls volumes
 volume-52df23c7-b988-4f8c-9754-deab0a349887
 volume-628bf569-7654-4b49-aec5-b02ab49dd522

 grep "^[a-zA-Z[]" cinder.conf

 [DEFAULT]
 verbose = True
 log_dir = /var/log/cinder
 auth_strategy = keystone
 rootwrap_config = /etc/cinder/rootwrap.conf
 state_path = /var/lib/cinder
 lock_path = /var/run/cinder
 rabbit_host=phoenix
 rabbit_userid=guest
 rabbit_password=passw
 rpc_backend=rabbit
 backup_driver=cinder.backup.drivers.ceph
 backup_ceph_conf=/etc/ceph/ceph.conf
 backup_ceph_user=cinder-backup
 backup_ceph_chunk_size=134217728
 backup_ceph_pool=backups
 backup_ceph_stripe_unit=0
 backup_ceph_stripe_count=0
 restore_discard_excess_bytes=true
 my_ip=172.16.98.3
 glance_host=phoenix
 debug=True
 volume_driver = cinder.volume.drivers.rbd.RBDDriver
 rbd_pool = volumes
 glance_api_version = 2
 rbd_ceph_conf = /etc/ceph/ceph.conf
 rbd_user = volumes
 rbd_secret_uuid=df556fc6-8137-4bc0-b54a-1cd9be3df459
 volume_tmp_dir = /tmp
 glance_host = phoenix
 [BRCD_FABRIC_EXAMPLE]
 [CISCO_FABRIC_EXAMPLE]
 [database]
 connection = mysql://cinder:passw@phoenix/cinder
 [fc-zone-manager]
 [keymgr]
 [keystone_authtoken]
 signing_dir = /var/cache/cinder/keystone-signing
 auth_uri = http://phoenix/:5000/v2.0
 identity_uri = http://phoenix:35357
 admin_tenant_name = service
 admin_user = cinder
 admin_password = cinder
 [matchmaker_redis]
 [matchmaker_ring]
 [profiler]
 [ssl]



 2014-10-23 08:26:45.530 29913 DEBUG glanceclient.common.http [req-9e6baed2-693a-4506-9dcc-f31dd65f9ef7 8ca4594188154742b89c49d12efcde7e 0c7c025a9519414992a40ab9af9a18e0 - - -] curl -i -X GET -H 'User-Agent: python-glanceclient' -H 'Content-Type: application/octet-stream' -H 'Accept-Encoding: gzip, deflate' -H 'Accept: */*' -H 'X-Auth-Token: {SHA1}8d5b9794e7a32fe15b6698bfa91a3f2ce38e46f6' http://phoenix:9292/v2/schemas/image log_curl_request /usr/lib/python2.7/site-packages/glanceclient/common/http.py:118
 2014-10-23 08:26:45.542 29913 DEBUG glanceclient.common.http [req-9e6baed2-693a-4506-9dcc-f31dd65f9ef7 8ca4594188154742b89c49d12efcde7e 0c7c025a9519414992a40ab9af9a18e0 - - -]
 HTTP/1.1 200 OK
 date: Thu, 23 Oct 2014 06:26:45 GMT
 content-length: 3785
 content-type: application/json; charset=UTF-8
 x-openstack-request-id: req-f519ca6c-4328-470d-b552-9250c1f2e028
 {"additionalProperties": {"type": "string"}, "name": "image", "links": [{"href": "{self}", "rel": "self"}, {"href": "{file}", "rel": "enclosure"}, {"href": "{schema}", "rel": "describedby"}], "properties": {"status": {"enum": ["queued", "saving", "active", "killed", "deleted", "pending_delete"], "type": "string", "description": "Status of the image (READ-ONLY)"}, "tags": {"items": {"type": "string", "maxLength": 255}, "type": "array", "description": "List of strings related to the image"}, "kernel_id": {"pattern": "^([0-9a-fA-F]){8}-([0-9a-fA-F]){4}-([0-9a-fA-F]){4}-([0-9a-fA-F]){4}-([0-9a-fA-F]){12}$", "type": "string", "description": "ID of image stored in Glance that should be used as the kernel when booting an AMI-style image.", "is_base": false}, "container_format": {"enum": ["ami", "ari", "aki", "bare", "ovf", "ova"], "type": "string", "description": "Format of the container"}, "min_ram": {"type": "integer", "description": "Amount of ram (in MB) required to boot image."}, "ramdisk_id": {"pattern": "^([0-9a-fA-F]){8}-([0-9a-fA-F]){4}-([0-9a-fA-F]){4}-([0-9a-fA-F]){4}-([0-9a-fA-F]){12}$", "type": "string", "description": "ID of image stored in Glance that should be used as the ramdisk when booting an AMI-style image.", "is_base": false}, "locations": {"items": {"required": ["url", "metadata"], "type": "object", "properties": {"url": {"type": "string", "maxLength": 255}, "metadata": {"type": "object"}}}, "type": "array", "description": "A set of URLs to access the image file kept in external store"}, "visibility": {"enum": ["public", "private"], "type": "string", "description": "Scope of image accessibility"}, "updated_at": {"type": "string", "description": "Date and time of the last image modification (READ-ONLY)"}, "owner": {"type": "string", "description": "Owner of the image", "maxLength": 255}, "file": {"type": "string", "description": "(READ-ONLY)"}, "min_disk": {"type": "integer", "description": "Amount of disk space (in GB) required to boot image."}, "virtual_size": {"type": "integer", "description": "Virtual size of image in bytes (READ-ONLY)"}, "id": {"pattern": "^([0-9a-fA-F]){8}-([0-9a-fA-F]){4}-([0-9a-fA-F]){4}-([0-9a-fA-F]){4}-([0-9a-fA-F]){12}$", "type": "string", "description": "An identifier for the image"}, "size": {"type": "integer", "description": "Size of image file in bytes (READ-ONLY)"}, "instance_uuid": {"type": "string", "description": "ID of instance used to create this image.", "is_base": false}, "os_distro": {"type": "string", "description": "Common name of operating system distribution as specified in http://docs.openstack.org/trunk/openstack-compute/admin/content/adding-images.html", "is_base": false}, "name": {"type": "string", "description": "Descriptive name for the image", "maxLength": 255}, "checksum": {"type": "string", "description": "md5 hash of image contents. (READ-ONLY)", "maxLength": 32}, "created_at": {"type": "string", "description": "Date and time of image registration (READ-ONLY)"}, "disk_format": {"enum": ["ami", "ari", "aki", "vhd", "vmdk", "raw", "qcow2", "vdi", "iso"], "type": "string", "description": "Format of the disk"}, "os_version": {"type": "string", "description": "Operating system version as specified by the distributor", "is_base": false}, "protected": {"type": "boolean", "description": "If true, image will not be deletable."}, "architecture": {"type": "string", "description": "Operating system architecture as specified in http://docs.openstack.org/trunk/openstack-compute/admin/content/adding-images.html", "is_base": false}, "direct_url": {"type": "string", "description": "URL to access the image file kept in external store (READ-ONLY)"}, "self": {"type": "string", "description": "(READ-ONLY)"}, "schema": {"type": "string", "description": "(READ-ONLY)"}}}
  log_http_response /usr/lib/python2.7/site-packages/glanceclient/common/http.py:131
 2014-10-23 08:26:45.558 29913 DEBUG cinder.openstack.common.processutils [req-9e6baed2-693a-4506-9dcc-f31dd65f9ef7 8ca4594188154742b89c49d12efcde7e 0c7c025a9519414992a40ab9af9a18e0 - - -] Running cmd (subprocess): sudo cinder-rootwrap /etc/cinder/rootwrap.conf env LC_ALL=C qemu-img info /var/lib/cinder/conversion/tmp6U4DTk execute /usr/lib/python2.7/site-packages/cinder/openstack/common/processutils.py:158
 2014-10-23 08:26:45.665 29913 DEBUG cinder.openstack.common.processutils [req-9e6baed2-693a-4506-9dcc-f31dd65f9ef7 8ca4594188154742b89c49d12efcde7e 0c7c025a9519414992a40ab9af9a18e0 - - -] Result was 0 execute /usr/lib/python2.7/site-packages/cinder/openstack/common/processutils.py:192
 2014-10-23 08:26:45.668 29913 DEBUG glanceclient.common.http [req-9e6baed2-693a-4506-9dcc-f31dd65f9ef7 8ca4594188154742b89c49d12efcde7e 0c7c025a9519414992a40ab9af9a18e0 - - -] curl -i -X GET -H 'User-Agent: python-glanceclient' -H 'Content-Type: application/octet-stream' -H 'Accept-Encoding: gzip, deflate' -H 'Accept: */*' -H 'X-Auth-Token: {SHA1}8d5b9794e7a32fe15b6698bfa91a3f2ce38e46f6' http://phoenix:9292/v2/images/434cbaf5-0b1c-45ed-9e62-418e9a374102/file log_curl_request /usr/lib/python2.7/site-packages/glanceclient/common/http.py:118
 2014-10-23 08:26:45.775 29913 DEBUG glanceclient.common.http [req-9e6baed2-693a-4506-9dcc-f31dd65f9ef7 8ca4594188154742b89c49d12efcde7e 0c7c025a9519414992a40ab9af9a18e0 - - -] Request returned failure status 500. _request /usr/lib/python2.7/site-packages/glanceclient/common/http.py:218
 2014-10-23 08:26:45.777 29913 ERROR cinder.volume.flows.manager.create_volume [req-9e6baed2-693a-4506-9dcc-f31dd65f9ef7 8ca4594188154742b89c49d12efcde7e 0c7c025a9519414992a40ab9af9a18e0 - - -] Failed to copy image 434cbaf5-0b1c-45ed-9e62-418e9a374102 to volume: ea2d3e2f-d388-4ec2-a92f-c6dd07d90906, error: HTTPInternalServerError (HTTP 500)
 2014-10-23 08:26:45.778 29913 DEBUG taskflow.engines.action_engine.runner [req-9e6baed2-693a-4506-9dcc-f31dd65f9ef7 8ca4594188154742b89c49d12efcde7e 0c7c025a9519414992a40ab9af9a18e0 - - -] Exiting old state 'SCHEDULING' in response to event 'wait' on_exit /usr/lib/python2.7/site-packages/taskflow/engines/action_engine/runner.py:156
 2014-10-23 08:26:45.779 29913 DEBUG taskflow.engines.action_engine.runner [req-9e6baed2-693a-4506-9dcc-f31dd65f9ef7 8ca4594188154742b89c49d12efcde7e 0c7c025a9519414992a40ab9af9a18e0 - - -] Entering new state 'WAITING' in response to event 'wait' on_enter /usr/lib/python2.7/site-packages/taskflow/engines/action_engine/runner.py:160
 2014-10-23 08:26:45.779 29913 DEBUG taskflow.engines.action_engine.runner [req-9e6baed2-693a-4506-9dcc-f31dd65f9ef7 8ca4594188154742b89c49d12efcde7e 0c7c025a9519414992a40ab9af9a18e0 - - -] Exiting old state 'WAITING' in response to event 'analyze' on_exit /usr/lib/python2.7/site-packages/taskflow/engines/action_engine/runner.py:156
 2014-10-23 08:26:45.780 29913 DEBUG taskflow.engines.action_engine.runner [req-9e6baed2-693a-4506-9dcc-f31dd65f9ef7 8ca4594188154742b89c49d12efcde7e 0c7c025a9519414992a40ab9af9a18e0 - - -] Entering new state 'ANALYZING' in response to event 'analyze' on_enter /usr/lib/python2.7/site-packages/taskflow/engines/action_engine/runner.py:160
 2014-10-23 08:26:45.781 29913 WARNING cinder.volume.manager [req-9e6baed2-693a-4506-9dcc-f31dd65f9ef7 8ca4594188154742b89c49d12efcde7e 0c7c025a9519414992a40ab9af9a18e0 - - -] Task 'cinder.volume.flows.manager.create_volume.CreateVolumeFromSpecTask;volume:create' (9afc4293-4c15-48aa-9534-6cafabd2c08b) transitioned into state 'FAILURE'
 2014-10-23 08:26:45.781 29913 TRACE cinder.volume.manager Traceback (most recent call last):
 2014-10-23 08:26:45.781 29913 TRACE cinder.volume.manager   File "/usr/lib/python2.7/site-packages/taskflow/engines/action_engine/executor.py", line 35, in _execute_task
 2014-10-23 08:26:45.781 29913 TRACE cinder.volume.manager     result = task.execute(**arguments)
 2014-10-23 08:26:45.781 29913 TRACE cinder.volume.manager   File "/usr/lib/python2.7/site-packages/cinder/volume/flows/manager/create_volume.py", line 638, in execute
 2014-10-23 08:26:45.781 29913 TRACE cinder.volume.manager     **volume_spec)
 2014-10-23 08:26:45.781 29913 TRACE cinder.volume.manager   File "/usr/lib/python2.7/site-packages/cinder/volume/flows/manager/create_volume.py", line 590, in _create_from_image
 2014-10-23 08:26:45.781 29913 TRACE cinder.volume.manager     image_id, image_location, image_service)
 2014-10-23 08:26:45.781 29913 TRACE cinder.volume.manager   File "/usr/lib/python2.7/site-packages/cinder/volume/flows/manager/create_volume.py", line 504, in _copy_image_to_volume
 2014-10-23 08:26:45.781 29913 TRACE cinder.volume.manager     raise exception.ImageCopyFailure(reason=ex)
 2014-10-23 08:26:45.781 29913 TRACE cinder.volume.manager ImageCopyFailure: Failed to copy image to volume: HTTPInternalServerError (HTTP 500)
 2014-10-23 08:26:45.781 29913 TRACE cinder.volume.manager


update 1

In cinder I replaced the ceph backend with an lvm backend, and I have the same error. glance now on ceph en cinder on LVM

phoenix:/etc/lvm # cinder create 1 --display-name vol_fail_testVG --image-id b7fff5f9-d63d-4729-ac1e-b8831a828e26 --availability-zone nova
+---------------------+--------------------------------------+
|       Property      |                Value                 |
+---------------------+--------------------------------------+
|     attachments     |                  []                  |
|  availability_zone  |                 nova                 |
|       bootable      |                false                 |
|      created_at     |      2014-10-24T12:51:18.086673      |
| display_description |                 None                 |
|     display_name    |           vol_fail_testVG            |
|      encrypted      |                False                 |
|          id         | 1de49e62-b0cd-40ef-818b-1ab988dfd6d7 |
|       image_id      | b7fff5f9-d63d-4729-ac1e-b8831a828e26 |
|       metadata      |                  {}                  |
|         size        |                  1                   |
|     snapshot_id     |                 None                 |
|     source_volid    |                 None                 |
|        status       |               creating               |
|     volume_type     |                 None                 |
+---------------------+--------------------------------------+
phoenix:/etc/lvm # cinder list
+--------------------------------------+--------+---------------------+------+-------------+----------+-------------+
|                  ID                  | Status |     Display Name    | Size | Volume Type | Bootable | Attached to |
+--------------------------------------+--------+---------------------+------+-------------+----------+-------------+
| 1de49e62-b0cd-40ef-818b-1ab988dfd6d7 | error  |   vol_fail_testVG   |  1   |     None    |  false   |             |
| 52df23c7-b988-4f8c-9754-deab0a349887 | error  |    vol_fail_test    |  1   |     None    |  false   |             |
| 628bf569-7654-4b49-aec5-b02ab49dd522 | error  | cirros-0.3.3-x86_64 |  1   |     None    |  false   |             |
| bbe05482-e89f-4485-9b1f-3a034e82a858 | error  |    vol_fail_test3   |  1   |     None    |  false   |             |
| d2a45408-41c5-4049-97f1-63692dbae233 | error  |    vol_fail_test2   |  1   |     None    |  false   |             |
+--------------------------------------+--------+---------------------+------+-------------+----------+-------------+

phoenix:/etc/lvm # lvs
  LV                                          VG       Attr      LSize  Pool          Origin Data%  Move Log Copy%  Convert
  cinderVG-pool                               cinderVG twi-a-tz- 95.00g                        0.00              
  volume-1de49e62-b0cd-40ef-818b-1ab988dfd6d7 cinderVG Vwi-a-tz-  1.00g cinderVG-pool          0.00              

cinder-volumes.log
2014-10-24 14:51:20.694 18244 TRACE oslo.messaging.rpc.dispatcher     raise exception.ImageCopyFailure(reason=ex)
2014-10-24 14:51:20.694 18244 TRACE oslo.messaging.rpc.dispatcher ImageCopyFailure: Failed to copy image to volume: HTTPInternalServerError (HTTP 500)

not able to create volume from image with ceph backend after update from icehouse to juno

    Hi,

    I am currently running a openstack setup with a ceph backend for glance and cinder. 
    on icehouse it was working ok. I updated the setup to Juno and
    now I am not able to create volume from image anymore. The Volume status says error, However when I look at ceph with rbd there is a volume present.

    In the cinder volume log I can see ImageCopyFailure: Failed to copy image to volume: HTTPInternalServerError (HTTP 500)

update 2) at the glance api logs it says TypeError: 'ImageProxy' object is not callable

    Anybody any idea?


    BR,
    Jeroen



    phoenix:/etc/cinder # glance image-list
    +--------------------------------------+-------------------------+-------------+------------------+----------+--------+
    | ID                                   | Name                    | Disk Format | Container Format | Size     | Status |
    +--------------------------------------+-------------------------+-------------+------------------+----------+--------+
    | b7fff5f9-d63d-4729-ac1e-b8831a828e26 | cirros-0.3.3-x86_64     | qcow2       | bare             | 13200896 | active |
    | 7d9eef7f-15f1-40e3-9e75-593d94a8571d | raw cirros-0.3.3-x86_64 | qcow2       | bare             | 41126400 | active |
    +--------------------------------------+-------------------------+-------------+------------------+----------+--------+

    phoenix:/etc/cinder # rbd ls images
    7d9eef7f-15f1-40e3-9e75-593d94a8571d
    b7fff5f9-d63d-4729-ac1e-b8831a828e26


    phoenix:/etc/cinder # cinder create 1 --display-name vol_fail_test --image-id b7fff5f9-d63d-4729-ac1e-b8831a828e26 --availability-zone nova
    +---------------------+--------------------------------------+
    |       Property      |                Value                 |
    +---------------------+--------------------------------------+
    |     attachments     |                  []                  |
    |  availability_zone  |                 nova                 |
    |       bootable      |                false                 |
    |      created_at     |      2014-10-23T14:11:59.820165      |
    | display_description |                 None                 |
    |     display_name    |            vol_fail_test             |
    |      encrypted      |                False                 |
    |          id         | 52df23c7-b988-4f8c-9754-deab0a349887 |
    |       image_id      | b7fff5f9-d63d-4729-ac1e-b8831a828e26 |
    |       metadata      |                  {}                  |
    |         size        |                  1                   |
    |     snapshot_id     |                 None                 |
    |     source_volid    |                 None                 |
    |        status       |               creating               |
    |     volume_type     |                 None                 |
    +---------------------+--------------------------------------+
    phoenix:/etc/cinder # cinder list
    +--------------------------------------+--------+---------------------+------+-------------+----------+-------------+
    |                  ID                  | Status |     Display Name    | Size | Volume Type | Bootable | Attached to |
    +--------------------------------------+--------+---------------------+------+-------------+----------+-------------+
    | 52df23c7-b988-4f8c-9754-deab0a349887 | error  |    vol_fail_test    |  1   |     None    |  false   |             |
    | 628bf569-7654-4b49-aec5-b02ab49dd522 | error  | cirros-0.3.3-x86_64 |  1   |     None    |  false   |             |
    +--------------------------------------+--------+---------------------+------+-------------+----------+-------------+
    phoenix:/etc/cinder # rbd ls volumes
    volume-52df23c7-b988-4f8c-9754-deab0a349887
    volume-628bf569-7654-4b49-aec5-b02ab49dd522

    grep "^[a-zA-Z[]" cinder.conf

    [DEFAULT]
    verbose = True
    log_dir = /var/log/cinder
    auth_strategy = keystone
    rootwrap_config = /etc/cinder/rootwrap.conf
    state_path = /var/lib/cinder
    lock_path = /var/run/cinder
    rabbit_host=phoenix
    rabbit_userid=guest
    rabbit_password=passw
    rpc_backend=rabbit
    backup_driver=cinder.backup.drivers.ceph
    backup_ceph_conf=/etc/ceph/ceph.conf
    backup_ceph_user=cinder-backup
    backup_ceph_chunk_size=134217728
    backup_ceph_pool=backups
    backup_ceph_stripe_unit=0
    backup_ceph_stripe_count=0
    restore_discard_excess_bytes=true
    my_ip=172.16.98.3
    glance_host=phoenix
    debug=True
    volume_driver = cinder.volume.drivers.rbd.RBDDriver
    rbd_pool = volumes
    glance_api_version = 2
    rbd_ceph_conf = /etc/ceph/ceph.conf
    rbd_user = volumes
    rbd_secret_uuid=df556fc6-8137-4bc0-b54a-1cd9be3df459
    volume_tmp_dir = /tmp
    glance_host = phoenix
    [BRCD_FABRIC_EXAMPLE]
    [CISCO_FABRIC_EXAMPLE]
    [database]
    connection = mysql://cinder:passw@phoenix/cinder
    [fc-zone-manager]
    [keymgr]
    [keystone_authtoken]
    signing_dir = /var/cache/cinder/keystone-signing
    auth_uri = http://phoenix/:5000/v2.0
    identity_uri = http://phoenix:35357
    admin_tenant_name = service
    admin_user = cinder
    admin_password = cinder
    [matchmaker_redis]
    [matchmaker_ring]
    [profiler]
    [ssl]



    2014-10-23 08:26:45.530 29913 DEBUG glanceclient.common.http [req-9e6baed2-693a-4506-9dcc-f31dd65f9ef7 8ca4594188154742b89c49d12efcde7e 0c7c025a9519414992a40ab9af9a18e0 - - -] curl -i -X GET -H 'User-Agent: python-glanceclient' -H 'Content-Type: application/octet-stream' -H 'Accept-Encoding: gzip, deflate' -H 'Accept: */*' -H 'X-Auth-Token: {SHA1}8d5b9794e7a32fe15b6698bfa91a3f2ce38e46f6' http://phoenix:9292/v2/schemas/image log_curl_request /usr/lib/python2.7/site-packages/glanceclient/common/http.py:118
    2014-10-23 08:26:45.542 29913 DEBUG glanceclient.common.http [req-9e6baed2-693a-4506-9dcc-f31dd65f9ef7 8ca4594188154742b89c49d12efcde7e 0c7c025a9519414992a40ab9af9a18e0 - - -]
    HTTP/1.1 200 OK
    date: Thu, 23 Oct 2014 06:26:45 GMT
    content-length: 3785
    content-type: application/json; charset=UTF-8
    x-openstack-request-id: req-f519ca6c-4328-470d-b552-9250c1f2e028
    {"additionalProperties": {"type": "string"}, "name": "image", "links": [{"href": "{self}", "rel": "self"}, {"href": "{file}", "rel": "enclosure"}, {"href": "{schema}", "rel": "describedby"}], "properties": {"status": {"enum": ["queued", "saving", "active", "killed", "deleted", "pending_delete"], "type": "string", "description": "Status of the image (READ-ONLY)"}, "tags": {"items": {"type": "string", "maxLength": 255}, "type": "array", "description": "List of strings related to the image"}, "kernel_id": {"pattern": "^([0-9a-fA-F]){8}-([0-9a-fA-F]){4}-([0-9a-fA-F]){4}-([0-9a-fA-F]){4}-([0-9a-fA-F]){12}$", "type": "string", "description": "ID of image stored in Glance that should be used as the kernel when booting an AMI-style image.", "is_base": false}, "container_format": {"enum": ["ami", "ari", "aki", "bare", "ovf", "ova"], "type": "string", "description": "Format of the container"}, "min_ram": {"type": "integer", "description": "Amount of ram (in MB) required to boot image."}, "ramdisk_id": {"pattern": "^([0-9a-fA-F]){8}-([0-9a-fA-F]){4}-([0-9a-fA-F]){4}-([0-9a-fA-F]){4}-([0-9a-fA-F]){12}$", "type": "string", "description": "ID of image stored in Glance that should be used as the ramdisk when booting an AMI-style image.", "is_base": false}, "locations": {"items": {"required": ["url", "metadata"], "type": "object", "properties": {"url": {"type": "string", "maxLength": 255}, "metadata": {"type": "object"}}}, "type": "array", "description": "A set of URLs to access the image file kept in external store"}, "visibility": {"enum": ["public", "private"], "type": "string", "description": "Scope of image accessibility"}, "updated_at": {"type": "string", "description": "Date and time of the last image modification (READ-ONLY)"}, "owner": {"type": "string", "description": "Owner of the image", "maxLength": 255}, "file": {"type": "string", "description": "(READ-ONLY)"}, "min_disk": {"type": "integer", "description": "Amount of disk space (in GB) required to boot image."}, "virtual_size": {"type": "integer", "description": "Virtual size of image in bytes (READ-ONLY)"}, "id": {"pattern": "^([0-9a-fA-F]){8}-([0-9a-fA-F]){4}-([0-9a-fA-F]){4}-([0-9a-fA-F]){4}-([0-9a-fA-F]){12}$", "type": "string", "description": "An identifier for the image"}, "size": {"type": "integer", "description": "Size of image file in bytes (READ-ONLY)"}, "instance_uuid": {"type": "string", "description": "ID of instance used to create this image.", "is_base": false}, "os_distro": {"type": "string", "description": "Common name of operating system distribution as specified in http://docs.openstack.org/trunk/openstack-compute/admin/content/adding-images.html", "is_base": false}, "name": {"type": "string", "description": "Descriptive name for the image", "maxLength": 255}, "checksum": {"type": "string", "description": "md5 hash of image contents. (READ-ONLY)", "maxLength": 32}, "created_at": {"type": "string", "description": "Date and time of image registration (READ-ONLY)"}, "disk_format": {"enum": ["ami", "ari", "aki", "vhd", "vmdk", "raw", "qcow2", "vdi", "iso"], "type": "string", "description": "Format of the disk"}, "os_version": {"type": "string", "description": "Operating system version as specified by the distributor", "is_base": false}, "protected": {"type": "boolean", "description": "If true, image will not be deletable."}, "architecture": {"type": "string", "description": "Operating system architecture as specified in http://docs.openstack.org/trunk/openstack-compute/admin/content/adding-images.html", "is_base": false}, "direct_url": {"type": "string", "description": "URL to access the image file kept in external store (READ-ONLY)"}, "self": {"type": "string", "description": "(READ-ONLY)"}, "schema": {"type": "string", "description": "(READ-ONLY)"}}}
     log_http_response /usr/lib/python2.7/site-packages/glanceclient/common/http.py:131
    2014-10-23 08:26:45.558 29913 DEBUG cinder.openstack.common.processutils [req-9e6baed2-693a-4506-9dcc-f31dd65f9ef7 8ca4594188154742b89c49d12efcde7e 0c7c025a9519414992a40ab9af9a18e0 - - -] Running cmd (subprocess): sudo cinder-rootwrap /etc/cinder/rootwrap.conf env LC_ALL=C qemu-img info /var/lib/cinder/conversion/tmp6U4DTk execute /usr/lib/python2.7/site-packages/cinder/openstack/common/processutils.py:158
    2014-10-23 08:26:45.665 29913 DEBUG cinder.openstack.common.processutils [req-9e6baed2-693a-4506-9dcc-f31dd65f9ef7 8ca4594188154742b89c49d12efcde7e 0c7c025a9519414992a40ab9af9a18e0 - - -] Result was 0 execute /usr/lib/python2.7/site-packages/cinder/openstack/common/processutils.py:192
    2014-10-23 08:26:45.668 29913 DEBUG glanceclient.common.http [req-9e6baed2-693a-4506-9dcc-f31dd65f9ef7 8ca4594188154742b89c49d12efcde7e 0c7c025a9519414992a40ab9af9a18e0 - - -] curl -i -X GET -H 'User-Agent: python-glanceclient' -H 'Content-Type: application/octet-stream' -H 'Accept-Encoding: gzip, deflate' -H 'Accept: */*' -H 'X-Auth-Token: {SHA1}8d5b9794e7a32fe15b6698bfa91a3f2ce38e46f6' http://phoenix:9292/v2/images/434cbaf5-0b1c-45ed-9e62-418e9a374102/file log_curl_request /usr/lib/python2.7/site-packages/glanceclient/common/http.py:118
    2014-10-23 08:26:45.775 29913 DEBUG glanceclient.common.http [req-9e6baed2-693a-4506-9dcc-f31dd65f9ef7 8ca4594188154742b89c49d12efcde7e 0c7c025a9519414992a40ab9af9a18e0 - - -] Request returned failure status 500. _request /usr/lib/python2.7/site-packages/glanceclient/common/http.py:218
    2014-10-23 08:26:45.777 29913 ERROR cinder.volume.flows.manager.create_volume [req-9e6baed2-693a-4506-9dcc-f31dd65f9ef7 8ca4594188154742b89c49d12efcde7e 0c7c025a9519414992a40ab9af9a18e0 - - -] Failed to copy image 434cbaf5-0b1c-45ed-9e62-418e9a374102 to volume: ea2d3e2f-d388-4ec2-a92f-c6dd07d90906, error: HTTPInternalServerError (HTTP 500)
    2014-10-23 08:26:45.778 29913 DEBUG taskflow.engines.action_engine.runner [req-9e6baed2-693a-4506-9dcc-f31dd65f9ef7 8ca4594188154742b89c49d12efcde7e 0c7c025a9519414992a40ab9af9a18e0 - - -] Exiting old state 'SCHEDULING' in response to event 'wait' on_exit /usr/lib/python2.7/site-packages/taskflow/engines/action_engine/runner.py:156
    2014-10-23 08:26:45.779 29913 DEBUG taskflow.engines.action_engine.runner [req-9e6baed2-693a-4506-9dcc-f31dd65f9ef7 8ca4594188154742b89c49d12efcde7e 0c7c025a9519414992a40ab9af9a18e0 - - -] Entering new state 'WAITING' in response to event 'wait' on_enter /usr/lib/python2.7/site-packages/taskflow/engines/action_engine/runner.py:160
    2014-10-23 08:26:45.779 29913 DEBUG taskflow.engines.action_engine.runner [req-9e6baed2-693a-4506-9dcc-f31dd65f9ef7 8ca4594188154742b89c49d12efcde7e 0c7c025a9519414992a40ab9af9a18e0 - - -] Exiting old state 'WAITING' in response to event 'analyze' on_exit /usr/lib/python2.7/site-packages/taskflow/engines/action_engine/runner.py:156
    2014-10-23 08:26:45.780 29913 DEBUG taskflow.engines.action_engine.runner [req-9e6baed2-693a-4506-9dcc-f31dd65f9ef7 8ca4594188154742b89c49d12efcde7e 0c7c025a9519414992a40ab9af9a18e0 - - -] Entering new state 'ANALYZING' in response to event 'analyze' on_enter /usr/lib/python2.7/site-packages/taskflow/engines/action_engine/runner.py:160
    2014-10-23 08:26:45.781 29913 WARNING cinder.volume.manager [req-9e6baed2-693a-4506-9dcc-f31dd65f9ef7 8ca4594188154742b89c49d12efcde7e 0c7c025a9519414992a40ab9af9a18e0 - - -] Task 'cinder.volume.flows.manager.create_volume.CreateVolumeFromSpecTask;volume:create' (9afc4293-4c15-48aa-9534-6cafabd2c08b) transitioned into state 'FAILURE'
    2014-10-23 08:26:45.781 29913 TRACE cinder.volume.manager Traceback (most recent call last):
    2014-10-23 08:26:45.781 29913 TRACE cinder.volume.manager   File "/usr/lib/python2.7/site-packages/taskflow/engines/action_engine/executor.py", line 35, in _execute_task
    2014-10-23 08:26:45.781 29913 TRACE cinder.volume.manager     result = task.execute(**arguments)
    2014-10-23 08:26:45.781 29913 TRACE cinder.volume.manager   File "/usr/lib/python2.7/site-packages/cinder/volume/flows/manager/create_volume.py", line 638, in execute
    2014-10-23 08:26:45.781 29913 TRACE cinder.volume.manager     **volume_spec)
    2014-10-23 08:26:45.781 29913 TRACE cinder.volume.manager   File "/usr/lib/python2.7/site-packages/cinder/volume/flows/manager/create_volume.py", line 590, in _create_from_image
    2014-10-23 08:26:45.781 29913 TRACE cinder.volume.manager     image_id, image_location, image_service)
    2014-10-23 08:26:45.781 29913 TRACE cinder.volume.manager   File "/usr/lib/python2.7/site-packages/cinder/volume/flows/manager/create_volume.py", line 504, in _copy_image_to_volume
    2014-10-23 08:26:45.781 29913 TRACE cinder.volume.manager     raise exception.ImageCopyFailure(reason=ex)
    2014-10-23 08:26:45.781 29913 TRACE cinder.volume.manager ImageCopyFailure: Failed to copy image to volume: HTTPInternalServerError (HTTP 500)
    2014-10-23 08:26:45.781 29913 TRACE cinder.volume.manager


update 1

In cinder I replaced the ceph backend with an lvm backend, and I have the same error. glance now on ceph en cinder on LVM

phoenix:/etc/lvm # cinder create 1 --display-name vol_fail_testVG --image-id b7fff5f9-d63d-4729-ac1e-b8831a828e26 --availability-zone nova
+---------------------+--------------------------------------+
|       Property      |                Value                 |
+---------------------+--------------------------------------+
|     attachments     |                  []                  |
|  availability_zone  |                 nova                 |
|       bootable      |                false                 |
|      created_at     |      2014-10-24T12:51:18.086673      |
| display_description |                 None                 |
|     display_name    |           vol_fail_testVG            |
|      encrypted      |                False                 |
|          id         | 1de49e62-b0cd-40ef-818b-1ab988dfd6d7 |
|       image_id      | b7fff5f9-d63d-4729-ac1e-b8831a828e26 |
|       metadata      |                  {}                  |
|         size        |                  1                   |
|     snapshot_id     |                 None                 |
|     source_volid    |                 None                 |
|        status       |               creating               |
|     volume_type     |                 None                 |
+---------------------+--------------------------------------+
phoenix:/etc/lvm # cinder list
+--------------------------------------+--------+---------------------+------+-------------+----------+-------------+
|                  ID                  | Status |     Display Name    | Size | Volume Type | Bootable | Attached to |
+--------------------------------------+--------+---------------------+------+-------------+----------+-------------+
| 1de49e62-b0cd-40ef-818b-1ab988dfd6d7 | error  |   vol_fail_testVG   |  1   |     None    |  false   |             |
| 52df23c7-b988-4f8c-9754-deab0a349887 | error  |    vol_fail_test    |  1   |     None    |  false   |             |
| 628bf569-7654-4b49-aec5-b02ab49dd522 | error  | cirros-0.3.3-x86_64 |  1   |     None    |  false   |             |
| bbe05482-e89f-4485-9b1f-3a034e82a858 | error  |    vol_fail_test3   |  1   |     None    |  false   |             |
| d2a45408-41c5-4049-97f1-63692dbae233 | error  |    vol_fail_test2   |  1   |     None    |  false   |             |
+--------------------------------------+--------+---------------------+------+-------------+----------+-------------+

phoenix:/etc/lvm # lvs
  LV                                          VG       Attr      LSize  Pool          Origin Data%  Move Log Copy%  Convert
  cinderVG-pool                               cinderVG twi-a-tz- 95.00g                        0.00              
  volume-1de49e62-b0cd-40ef-818b-1ab988dfd6d7 cinderVG Vwi-a-tz-  1.00g cinderVG-pool          0.00              

cinder-volumes.log
2014-10-24 14:51:20.694 18244 TRACE oslo.messaging.rpc.dispatcher     raise exception.ImageCopyFailure(reason=ex)
2014-10-24 14:51:20.694 18244 TRACE oslo.messaging.rpc.dispatcher ImageCopyFailure: Failed to copy image to volume: HTTPInternalServerError (HTTP 500)

Update 2)
After some more testing it looks like the problem is in Glance after all, when I revert the glance store to file it is working, when i change the store to RBD it is not working. 

phoenix:/etc/glance # grep "^[a-zA-Z []" glance-api.conf
[DEFAULT]
verbose = True
debug = True
default_store = rbd
bind_host = 0.0.0.0
bind_port = 9292
log_file = /var/log/glance/api.log
backlog = 4096
registry_host = 0.0.0.0
registry_port = 9191
registry_client_protocol = http
auth_strategy = keystone
rabbit_host = phoenix
rabbit_port = 5672
rabbit_use_ssl = false
rabbit_userid = guest
rabbit_password = passw
rabbit_virtual_host = /
rabbit_notification_exchange = glance
rabbit_notification_topic = notifications
rabbit_durable_queues = False
qpid_notification_exchange = glance
qpid_notification_topic = notifications
qpid_hostname = localhost
qpid_port = 5672
qpid_username =
qpid_password =
qpid_sasl_mechanisms =
qpid_reconnect_timeout = 0
qpid_reconnect_limit = 0
qpid_reconnect_interval_min = 0
qpid_reconnect_interval_max = 0
qpid_reconnect_interval = 0
qpid_heartbeat = 5
qpid_protocol = tcp
qpid_tcp_nodelay = True
delayed_delete = False
scrub_time = 43200
scrubber_datadir = /var/lib/glance/scrubber
image_cache_dir = /var/lib/glance/image-cache/
verbose = True
notification_driver = messaging
lock_path = /var/run/glance
[database]
connection = mysql://glance:passw@phoenix/glance
[keystone_authtoken]
identity_uri = http://phoenix:35357
admin_tenant_name = service
admin_user = glance
admin_password = passw
revocation_cache_time = 10
signing_dir = /var/cache/glance/keystone-signing
[paste_deploy]
flavor = keystone
[store_type_location_strategy]
[profiler]
[task]
[glance_store]
stores = glance.store.filesystem.Store,
         glance.store.rbd.Store,
         glance.store.http.Store
filesystem_store_datadir = /var/lib/glance/images/
swift_store_auth_version = 2
swift_store_auth_address = 127.0.0.1:5000/v2.0/
swift_store_user = jdoe:jdoe
swift_store_key = a86850deb2742ec3cb41518e26aa2d89
swift_store_container = glance
swift_store_create_container_on_put = False
swift_store_large_object_size = 5120
swift_store_large_object_chunk_size = 200
swift_enable_snet = False
s3_store_host = 127.0.0.1:8080/v1.0/
s3_store_access_key = <20-char AWS access key>
s3_store_secret_key = <40-char AWS secret key>
s3_store_bucket = <lowercased 20-char aws access key>glance
s3_store_create_bucket_on_put = False
rbd_store_ceph_conf = /etc/glance/ceph.client.images.keyring
rbd_store_user = images
rbd_store_pool = images
show_image_direct_url=True
sheepdog_store_address = localhost
sheepdog_store_port = 7000
sheepdog_store_chunk_size = 64


2014-10-24 12:41:43.841 32678 WARNING glance.location [-] Get image b7fff5f9-d63d-4729-ac1e-b8831a828e26 data failed: too many values to unpack.
2014-10-24 12:41:43.842 32678 ERROR glance.location [-] Glance tried all active locations to get data for image b7fff5f9-d63d-4729-ac1e-b8831a828e26 but all have failed.
2014-10-24 12:41:43.846 32678 INFO glance.wsgi.server [-] Traceback (most recent call last):
  File "/usr/lib/python2.7/site-packages/eventlet/wsgi.py", line 433, in handle_one_response
    result = self.application(self.environ, start_response)
  File "/usr/lib/python2.7/site-packages/webob/dec.py", line 130, in __call__
    resp = self.call_func(req, *args, **self.kwargs)
  File "/usr/lib/python2.7/site-packages/webob/dec.py", line 195, in call_func
    return self.func(req, *args, **kwargs)
  File "/usr/lib/python2.7/site-packages/glance/common/wsgi.py", line 394, in __call__
    response = req.get_response(self.application)
  File "/usr/lib/python2.7/site-packages/webob/request.py", line 1296, in send
    application, catch_exc_info=False)
  File "/usr/lib/python2.7/site-packages/webob/request.py", line 1260, in call_application
    app_iter = application(self.environ, start_response)
  File "/usr/lib/python2.7/site-packages/webob/dec.py", line 130, in __call__
    resp = self.call_func(req, *args, **self.kwargs)
  File "/usr/lib/python2.7/site-packages/webob/dec.py", line 195, in call_func
    return self.func(req, *args, **kwargs)
  File "/usr/lib/python2.7/site-packages/osprofiler/web.py", line 106, in __call__
    return request.get_response(self.application)
  File "/usr/lib/python2.7/site-packages/webob/request.py", line 1296, in send
    application, catch_exc_info=False)
  File "/usr/lib/python2.7/site-packages/webob/request.py", line 1260, in call_application
    app_iter = application(self.environ, start_response)
  File "/usr/lib/python2.7/site-packages/keystonemiddleware/auth_token.py", line 748, in __call__
    return self._call_app(env, start_response)
  File "/usr/lib/python2.7/site-packages/keystonemiddleware/auth_token.py", line 684, in _call_app
    return self._app(env, _fake_start_response)
  File "/usr/lib/python2.7/site-packages/webob/dec.py", line 130, in __call__
    resp = self.call_func(req, *args, **self.kwargs)
  File "/usr/lib/python2.7/site-packages/webob/dec.py", line 195, in call_func
    return self.func(req, *args, **kwargs)
  File "/usr/lib/python2.7/site-packages/glance/common/wsgi.py", line 394, in __call__
    response = req.get_response(self.application)
  File "/usr/lib/python2.7/site-packages/webob/dec.py", line 195, in call_func
    return self.func(req, *args, **kwargs)
  File "/usr/lib/python2.7/site-packages/glance/common/wsgi.py", line 394, in __call__
    response = req.get_response(self.application)
  File "/usr/lib/python2.7/site-packages/webob/request.py", line 1296, in send
    application, catch_exc_info=False)
  File "/usr/lib/python2.7/site-packages/webob/request.py", line 1260, in call_application
    app_iter = application(self.environ, start_response)
  File "/usr/lib/python2.7/site-packages/paste/urlmap.py", line 203, in __call__
    return app(environ, start_response)
  File "/usr/lib/python2.7/site-packages/webob/dec.py", line 144, in __call__
    return resp(environ, start_response)
  File "/usr/lib/python2.7/site-packages/routes/middleware.py", line 131, in __call__
    response = self.app(environ, start_response)
  File "/usr/lib/python2.7/site-packages/webob/dec.py", line 144, in __call__
    return resp(environ, start_response)
  File "/usr/lib/python2.7/site-packages/webob/dec.py", line 144, in __call__
    return resp(environ, start_response)
TypeError: 'ImageProxy' object is not callable
2014-10-24 12:41:43.847 32678 INFO glance.wsgi.server [-] 172.16.98.3 - - [24/Oct/2014 12:41:43] "GET /v2/images/b7fff5f9-d63d-4729-ac1e-b8831a828e26/file HTTP/1.1" 500 139 0.100226

not able to create volume from image with ceph backend after update from icehouse to juno

    Hi,

    I am currently running a openstack setup with a ceph backend for glance and cinder. 
    on icehouse it was working ok. I updated the setup to Juno and
    now I am not able to create volume from image anymore. The Volume status says error, However when I look at ceph with rbd there is a volume present.

    In the cinder volume log I can see ImageCopyFailure: Failed to copy image to volume: HTTPInternalServerError (HTTP 500)

update 2) at the glance api logs it says TypeError: 'ImageProxy' object is not callable

update 3)

A simple download from glance also does not seem to work, it fails with too many values to unpack

phoenix:/var/log/glance # glance image-download 1f5f7809-34a9-4035-bcf9-bb527a03720b

HTTPInternalServerError (HTTP 500)
phoenix:/var/log/glance # tail -f api.log
  File "/usr/lib/python2.7/site-packages/glance/common/wsgi.py", line 683, in __call__
    request, **action_args)
  File "/usr/lib/python2.7/site-packages/glance/common/wsgi.py", line 707, in dispatch
    return method(*args, **kwargs)
  File "/usr/lib/python2.7/site-packages/glance/api/v1/images.py", line 498, in show
    image_meta['location'])
  File "/usr/lib/python2.7/site-packages/glance/api/v1/images.py", line 460, in _get_from_store
    image_data, image_size = src_store.get(loc, context=context)
ValueError: too many values to unpack
2014-10-30 16:18:25.825 6247 INFO glance.wsgi.server [20722acd-2805-4295-a543-e11c9dc79413 8ca4594188154742b89c49d12efcde7e 0c7c025a9519414992a40ab9af9a18e0 - - -] 172.16.98.3 - - [30/Oct/2014 16:18:25] "GET /v1/images/1f5f7809-34a9-4035-bcf9-bb527a03720b HTTP/1.1" 500 139 0.138539

    Anybody any idea?


    BR,
    Jeroen



    phoenix:/etc/cinder # glance image-list
    +--------------------------------------+-------------------------+-------------+------------------+----------+--------+
    | ID                                   | Name                    | Disk Format | Container Format | Size     | Status |
    +--------------------------------------+-------------------------+-------------+------------------+----------+--------+
    | b7fff5f9-d63d-4729-ac1e-b8831a828e26 | cirros-0.3.3-x86_64     | qcow2       | bare             | 13200896 | active |
    | 7d9eef7f-15f1-40e3-9e75-593d94a8571d | raw cirros-0.3.3-x86_64 | qcow2       | bare             | 41126400 | active |
    +--------------------------------------+-------------------------+-------------+------------------+----------+--------+

    phoenix:/etc/cinder # rbd ls images
    7d9eef7f-15f1-40e3-9e75-593d94a8571d
    b7fff5f9-d63d-4729-ac1e-b8831a828e26


    phoenix:/etc/cinder # cinder create 1 --display-name vol_fail_test --image-id b7fff5f9-d63d-4729-ac1e-b8831a828e26 --availability-zone nova
    +---------------------+--------------------------------------+
    |       Property      |                Value                 |
    +---------------------+--------------------------------------+
    |     attachments     |                  []                  |
    |  availability_zone  |                 nova                 |
    |       bootable      |                false                 |
    |      created_at     |      2014-10-23T14:11:59.820165      |
    | display_description |                 None                 |
    |     display_name    |            vol_fail_test             |
    |      encrypted      |                False                 |
    |          id         | 52df23c7-b988-4f8c-9754-deab0a349887 |
    |       image_id      | b7fff5f9-d63d-4729-ac1e-b8831a828e26 |
    |       metadata      |                  {}                  |
    |         size        |                  1                   |
    |     snapshot_id     |                 None                 |
    |     source_volid    |                 None                 |
    |        status       |               creating               |
    |     volume_type     |                 None                 |
    +---------------------+--------------------------------------+
    phoenix:/etc/cinder # cinder list
    +--------------------------------------+--------+---------------------+------+-------------+----------+-------------+
    |                  ID                  | Status |     Display Name    | Size | Volume Type | Bootable | Attached to |
    +--------------------------------------+--------+---------------------+------+-------------+----------+-------------+
    | 52df23c7-b988-4f8c-9754-deab0a349887 | error  |    vol_fail_test    |  1   |     None    |  false   |             |
    | 628bf569-7654-4b49-aec5-b02ab49dd522 | error  | cirros-0.3.3-x86_64 |  1   |     None    |  false   |             |
    +--------------------------------------+--------+---------------------+------+-------------+----------+-------------+
    phoenix:/etc/cinder # rbd ls volumes
    volume-52df23c7-b988-4f8c-9754-deab0a349887
    volume-628bf569-7654-4b49-aec5-b02ab49dd522

    grep "^[a-zA-Z[]" cinder.conf

    [DEFAULT]
    verbose = True
    log_dir = /var/log/cinder
    auth_strategy = keystone
    rootwrap_config = /etc/cinder/rootwrap.conf
    state_path = /var/lib/cinder
    lock_path = /var/run/cinder
    rabbit_host=phoenix
    rabbit_userid=guest
    rabbit_password=passw
    rpc_backend=rabbit
    backup_driver=cinder.backup.drivers.ceph
    backup_ceph_conf=/etc/ceph/ceph.conf
    backup_ceph_user=cinder-backup
    backup_ceph_chunk_size=134217728
    backup_ceph_pool=backups
    backup_ceph_stripe_unit=0
    backup_ceph_stripe_count=0
    restore_discard_excess_bytes=true
    my_ip=172.16.98.3
    glance_host=phoenix
    debug=True
    volume_driver = cinder.volume.drivers.rbd.RBDDriver
    rbd_pool = volumes
    glance_api_version = 2
    rbd_ceph_conf = /etc/ceph/ceph.conf
    rbd_user = volumes
    rbd_secret_uuid=df556fc6-8137-4bc0-b54a-1cd9be3df459
    volume_tmp_dir = /tmp
    glance_host = phoenix
    [BRCD_FABRIC_EXAMPLE]
    [CISCO_FABRIC_EXAMPLE]
    [database]
    connection = mysql://cinder:passw@phoenix/cinder
    [fc-zone-manager]
    [keymgr]
    [keystone_authtoken]
    signing_dir = /var/cache/cinder/keystone-signing
    auth_uri = http://phoenix/:5000/v2.0
    identity_uri = http://phoenix:35357
    admin_tenant_name = service
    admin_user = cinder
    admin_password = cinder
    [matchmaker_redis]
    [matchmaker_ring]
    [profiler]
    [ssl]



    2014-10-23 08:26:45.530 29913 DEBUG glanceclient.common.http [req-9e6baed2-693a-4506-9dcc-f31dd65f9ef7 8ca4594188154742b89c49d12efcde7e 0c7c025a9519414992a40ab9af9a18e0 - - -] curl -i -X GET -H 'User-Agent: python-glanceclient' -H 'Content-Type: application/octet-stream' -H 'Accept-Encoding: gzip, deflate' -H 'Accept: */*' -H 'X-Auth-Token: {SHA1}8d5b9794e7a32fe15b6698bfa91a3f2ce38e46f6' http://phoenix:9292/v2/schemas/image log_curl_request /usr/lib/python2.7/site-packages/glanceclient/common/http.py:118
    2014-10-23 08:26:45.542 29913 DEBUG glanceclient.common.http [req-9e6baed2-693a-4506-9dcc-f31dd65f9ef7 8ca4594188154742b89c49d12efcde7e 0c7c025a9519414992a40ab9af9a18e0 - - -]
    HTTP/1.1 200 OK
    date: Thu, 23 Oct 2014 06:26:45 GMT
    content-length: 3785
    content-type: application/json; charset=UTF-8
    x-openstack-request-id: req-f519ca6c-4328-470d-b552-9250c1f2e028
    {"additionalProperties": {"type": "string"}, "name": "image", "links": [{"href": "{self}", "rel": "self"}, {"href": "{file}", "rel": "enclosure"}, {"href": "{schema}", "rel": "describedby"}], "properties": {"status": {"enum": ["queued", "saving", "active", "killed", "deleted", "pending_delete"], "type": "string", "description": "Status of the image (READ-ONLY)"}, "tags": {"items": {"type": "string", "maxLength": 255}, "type": "array", "description": "List of strings related to the image"}, "kernel_id": {"pattern": "^([0-9a-fA-F]){8}-([0-9a-fA-F]){4}-([0-9a-fA-F]){4}-([0-9a-fA-F]){4}-([0-9a-fA-F]){12}$", "type": "string", "description": "ID of image stored in Glance that should be used as the kernel when booting an AMI-style image.", "is_base": false}, "container_format": {"enum": ["ami", "ari", "aki", "bare", "ovf", "ova"], "type": "string", "description": "Format of the container"}, "min_ram": {"type": "integer", "description": "Amount of ram (in MB) required to boot image."}, "ramdisk_id": {"pattern": "^([0-9a-fA-F]){8}-([0-9a-fA-F]){4}-([0-9a-fA-F]){4}-([0-9a-fA-F]){4}-([0-9a-fA-F]){12}$", "type": "string", "description": "ID of image stored in Glance that should be used as the ramdisk when booting an AMI-style image.", "is_base": false}, "locations": {"items": {"required": ["url", "metadata"], "type": "object", "properties": {"url": {"type": "string", "maxLength": 255}, "metadata": {"type": "object"}}}, "type": "array", "description": "A set of URLs to access the image file kept in external store"}, "visibility": {"enum": ["public", "private"], "type": "string", "description": "Scope of image accessibility"}, "updated_at": {"type": "string", "description": "Date and time of the last image modification (READ-ONLY)"}, "owner": {"type": "string", "description": "Owner of the image", "maxLength": 255}, "file": {"type": "string", "description": "(READ-ONLY)"}, "min_disk": {"type": "integer", "description": "Amount of disk space (in GB) required to boot image."}, "virtual_size": {"type": "integer", "description": "Virtual size of image in bytes (READ-ONLY)"}, "id": {"pattern": "^([0-9a-fA-F]){8}-([0-9a-fA-F]){4}-([0-9a-fA-F]){4}-([0-9a-fA-F]){4}-([0-9a-fA-F]){12}$", "type": "string", "description": "An identifier for the image"}, "size": {"type": "integer", "description": "Size of image file in bytes (READ-ONLY)"}, "instance_uuid": {"type": "string", "description": "ID of instance used to create this image.", "is_base": false}, "os_distro": {"type": "string", "description": "Common name of operating system distribution as specified in http://docs.openstack.org/trunk/openstack-compute/admin/content/adding-images.html", "is_base": false}, "name": {"type": "string", "description": "Descriptive name for the image", "maxLength": 255}, "checksum": {"type": "string", "description": "md5 hash of image contents. (READ-ONLY)", "maxLength": 32}, "created_at": {"type": "string", "description": "Date and time of image registration (READ-ONLY)"}, "disk_format": {"enum": ["ami", "ari", "aki", "vhd", "vmdk", "raw", "qcow2", "vdi", "iso"], "type": "string", "description": "Format of the disk"}, "os_version": {"type": "string", "description": "Operating system version as specified by the distributor", "is_base": false}, "protected": {"type": "boolean", "description": "If true, image will not be deletable."}, "architecture": {"type": "string", "description": "Operating system architecture as specified in http://docs.openstack.org/trunk/openstack-compute/admin/content/adding-images.html", "is_base": false}, "direct_url": {"type": "string", "description": "URL to access the image file kept in external store (READ-ONLY)"}, "self": {"type": "string", "description": "(READ-ONLY)"}, "schema": {"type": "string", "description": "(READ-ONLY)"}}}
     log_http_response /usr/lib/python2.7/site-packages/glanceclient/common/http.py:131
    2014-10-23 08:26:45.558 29913 DEBUG cinder.openstack.common.processutils [req-9e6baed2-693a-4506-9dcc-f31dd65f9ef7 8ca4594188154742b89c49d12efcde7e 0c7c025a9519414992a40ab9af9a18e0 - - -] Running cmd (subprocess): sudo cinder-rootwrap /etc/cinder/rootwrap.conf env LC_ALL=C qemu-img info /var/lib/cinder/conversion/tmp6U4DTk execute /usr/lib/python2.7/site-packages/cinder/openstack/common/processutils.py:158
    2014-10-23 08:26:45.665 29913 DEBUG cinder.openstack.common.processutils [req-9e6baed2-693a-4506-9dcc-f31dd65f9ef7 8ca4594188154742b89c49d12efcde7e 0c7c025a9519414992a40ab9af9a18e0 - - -] Result was 0 execute /usr/lib/python2.7/site-packages/cinder/openstack/common/processutils.py:192
    2014-10-23 08:26:45.668 29913 DEBUG glanceclient.common.http [req-9e6baed2-693a-4506-9dcc-f31dd65f9ef7 8ca4594188154742b89c49d12efcde7e 0c7c025a9519414992a40ab9af9a18e0 - - -] curl -i -X GET -H 'User-Agent: python-glanceclient' -H 'Content-Type: application/octet-stream' -H 'Accept-Encoding: gzip, deflate' -H 'Accept: */*' -H 'X-Auth-Token: {SHA1}8d5b9794e7a32fe15b6698bfa91a3f2ce38e46f6' http://phoenix:9292/v2/images/434cbaf5-0b1c-45ed-9e62-418e9a374102/file log_curl_request /usr/lib/python2.7/site-packages/glanceclient/common/http.py:118
    2014-10-23 08:26:45.775 29913 DEBUG glanceclient.common.http [req-9e6baed2-693a-4506-9dcc-f31dd65f9ef7 8ca4594188154742b89c49d12efcde7e 0c7c025a9519414992a40ab9af9a18e0 - - -] Request returned failure status 500. _request /usr/lib/python2.7/site-packages/glanceclient/common/http.py:218
    2014-10-23 08:26:45.777 29913 ERROR cinder.volume.flows.manager.create_volume [req-9e6baed2-693a-4506-9dcc-f31dd65f9ef7 8ca4594188154742b89c49d12efcde7e 0c7c025a9519414992a40ab9af9a18e0 - - -] Failed to copy image 434cbaf5-0b1c-45ed-9e62-418e9a374102 to volume: ea2d3e2f-d388-4ec2-a92f-c6dd07d90906, error: HTTPInternalServerError (HTTP 500)
    2014-10-23 08:26:45.778 29913 DEBUG taskflow.engines.action_engine.runner [req-9e6baed2-693a-4506-9dcc-f31dd65f9ef7 8ca4594188154742b89c49d12efcde7e 0c7c025a9519414992a40ab9af9a18e0 - - -] Exiting old state 'SCHEDULING' in response to event 'wait' on_exit /usr/lib/python2.7/site-packages/taskflow/engines/action_engine/runner.py:156
    2014-10-23 08:26:45.779 29913 DEBUG taskflow.engines.action_engine.runner [req-9e6baed2-693a-4506-9dcc-f31dd65f9ef7 8ca4594188154742b89c49d12efcde7e 0c7c025a9519414992a40ab9af9a18e0 - - -] Entering new state 'WAITING' in response to event 'wait' on_enter /usr/lib/python2.7/site-packages/taskflow/engines/action_engine/runner.py:160
    2014-10-23 08:26:45.779 29913 DEBUG taskflow.engines.action_engine.runner [req-9e6baed2-693a-4506-9dcc-f31dd65f9ef7 8ca4594188154742b89c49d12efcde7e 0c7c025a9519414992a40ab9af9a18e0 - - -] Exiting old state 'WAITING' in response to event 'analyze' on_exit /usr/lib/python2.7/site-packages/taskflow/engines/action_engine/runner.py:156
    2014-10-23 08:26:45.780 29913 DEBUG taskflow.engines.action_engine.runner [req-9e6baed2-693a-4506-9dcc-f31dd65f9ef7 8ca4594188154742b89c49d12efcde7e 0c7c025a9519414992a40ab9af9a18e0 - - -] Entering new state 'ANALYZING' in response to event 'analyze' on_enter /usr/lib/python2.7/site-packages/taskflow/engines/action_engine/runner.py:160
    2014-10-23 08:26:45.781 29913 WARNING cinder.volume.manager [req-9e6baed2-693a-4506-9dcc-f31dd65f9ef7 8ca4594188154742b89c49d12efcde7e 0c7c025a9519414992a40ab9af9a18e0 - - -] Task 'cinder.volume.flows.manager.create_volume.CreateVolumeFromSpecTask;volume:create' (9afc4293-4c15-48aa-9534-6cafabd2c08b) transitioned into state 'FAILURE'
    2014-10-23 08:26:45.781 29913 TRACE cinder.volume.manager Traceback (most recent call last):
    2014-10-23 08:26:45.781 29913 TRACE cinder.volume.manager   File "/usr/lib/python2.7/site-packages/taskflow/engines/action_engine/executor.py", line 35, in _execute_task
    2014-10-23 08:26:45.781 29913 TRACE cinder.volume.manager     result = task.execute(**arguments)
    2014-10-23 08:26:45.781 29913 TRACE cinder.volume.manager   File "/usr/lib/python2.7/site-packages/cinder/volume/flows/manager/create_volume.py", line 638, in execute
    2014-10-23 08:26:45.781 29913 TRACE cinder.volume.manager     **volume_spec)
    2014-10-23 08:26:45.781 29913 TRACE cinder.volume.manager   File "/usr/lib/python2.7/site-packages/cinder/volume/flows/manager/create_volume.py", line 590, in _create_from_image
    2014-10-23 08:26:45.781 29913 TRACE cinder.volume.manager     image_id, image_location, image_service)
    2014-10-23 08:26:45.781 29913 TRACE cinder.volume.manager   File "/usr/lib/python2.7/site-packages/cinder/volume/flows/manager/create_volume.py", line 504, in _copy_image_to_volume
    2014-10-23 08:26:45.781 29913 TRACE cinder.volume.manager     raise exception.ImageCopyFailure(reason=ex)
    2014-10-23 08:26:45.781 29913 TRACE cinder.volume.manager ImageCopyFailure: Failed to copy image to volume: HTTPInternalServerError (HTTP 500)
    2014-10-23 08:26:45.781 29913 TRACE cinder.volume.manager


update 1

In cinder I replaced the ceph backend with an lvm backend, and I have the same error. glance now on ceph en cinder on LVM

phoenix:/etc/lvm # cinder create 1 --display-name vol_fail_testVG --image-id b7fff5f9-d63d-4729-ac1e-b8831a828e26 --availability-zone nova
+---------------------+--------------------------------------+
|       Property      |                Value                 |
+---------------------+--------------------------------------+
|     attachments     |                  []                  |
|  availability_zone  |                 nova                 |
|       bootable      |                false                 |
|      created_at     |      2014-10-24T12:51:18.086673      |
| display_description |                 None                 |
|     display_name    |           vol_fail_testVG            |
|      encrypted      |                False                 |
|          id         | 1de49e62-b0cd-40ef-818b-1ab988dfd6d7 |
|       image_id      | b7fff5f9-d63d-4729-ac1e-b8831a828e26 |
|       metadata      |                  {}                  |
|         size        |                  1                   |
|     snapshot_id     |                 None                 |
|     source_volid    |                 None                 |
|        status       |               creating               |
|     volume_type     |                 None                 |
+---------------------+--------------------------------------+
phoenix:/etc/lvm # cinder list
+--------------------------------------+--------+---------------------+------+-------------+----------+-------------+
|                  ID                  | Status |     Display Name    | Size | Volume Type | Bootable | Attached to |
+--------------------------------------+--------+---------------------+------+-------------+----------+-------------+
| 1de49e62-b0cd-40ef-818b-1ab988dfd6d7 | error  |   vol_fail_testVG   |  1   |     None    |  false   |             |
| 52df23c7-b988-4f8c-9754-deab0a349887 | error  |    vol_fail_test    |  1   |     None    |  false   |             |
| 628bf569-7654-4b49-aec5-b02ab49dd522 | error  | cirros-0.3.3-x86_64 |  1   |     None    |  false   |             |
| bbe05482-e89f-4485-9b1f-3a034e82a858 | error  |    vol_fail_test3   |  1   |     None    |  false   |             |
| d2a45408-41c5-4049-97f1-63692dbae233 | error  |    vol_fail_test2   |  1   |     None    |  false   |             |
+--------------------------------------+--------+---------------------+------+-------------+----------+-------------+

phoenix:/etc/lvm # lvs
  LV                                          VG       Attr      LSize  Pool          Origin Data%  Move Log Copy%  Convert
  cinderVG-pool                               cinderVG twi-a-tz- 95.00g                        0.00              
  volume-1de49e62-b0cd-40ef-818b-1ab988dfd6d7 cinderVG Vwi-a-tz-  1.00g cinderVG-pool          0.00              

cinder-volumes.log
2014-10-24 14:51:20.694 18244 TRACE oslo.messaging.rpc.dispatcher     raise exception.ImageCopyFailure(reason=ex)
2014-10-24 14:51:20.694 18244 TRACE oslo.messaging.rpc.dispatcher ImageCopyFailure: Failed to copy image to volume: HTTPInternalServerError (HTTP 500)

Update 2)
After some more testing it looks like the problem is in Glance after all, when I revert the glance store to file it is working, when i change the store to RBD it is not working. 

phoenix:/etc/glance # grep "^[a-zA-Z []" glance-api.conf
[DEFAULT]
verbose = True
debug = True
default_store = rbd
bind_host = 0.0.0.0
bind_port = 9292
log_file = /var/log/glance/api.log
backlog = 4096
registry_host = 0.0.0.0
registry_port = 9191
registry_client_protocol = http
auth_strategy = keystone
rabbit_host = phoenix
rabbit_port = 5672
rabbit_use_ssl = false
rabbit_userid = guest
rabbit_password = passw
rabbit_virtual_host = /
rabbit_notification_exchange = glance
rabbit_notification_topic = notifications
rabbit_durable_queues = False
qpid_notification_exchange = glance
qpid_notification_topic = notifications
qpid_hostname = localhost
qpid_port = 5672
qpid_username =
qpid_password =
qpid_sasl_mechanisms =
qpid_reconnect_timeout = 0
qpid_reconnect_limit = 0
qpid_reconnect_interval_min = 0
qpid_reconnect_interval_max = 0
qpid_reconnect_interval = 0
qpid_heartbeat = 5
qpid_protocol = tcp
qpid_tcp_nodelay = True
delayed_delete = False
scrub_time = 43200
scrubber_datadir = /var/lib/glance/scrubber
image_cache_dir = /var/lib/glance/image-cache/
verbose = True
notification_driver = messaging
lock_path = /var/run/glance
[database]
connection = mysql://glance:passw@phoenix/glance
[keystone_authtoken]
identity_uri = http://phoenix:35357
admin_tenant_name = service
admin_user = glance
admin_password = passw
revocation_cache_time = 10
signing_dir = /var/cache/glance/keystone-signing
[paste_deploy]
flavor = keystone
[store_type_location_strategy]
[profiler]
[task]
[glance_store]
stores = glance.store.filesystem.Store,
         glance.store.rbd.Store,
         glance.store.http.Store
filesystem_store_datadir = /var/lib/glance/images/
swift_store_auth_version = 2
swift_store_auth_address = 127.0.0.1:5000/v2.0/
swift_store_user = jdoe:jdoe
swift_store_key = a86850deb2742ec3cb41518e26aa2d89
swift_store_container = glance
swift_store_create_container_on_put = False
swift_store_large_object_size = 5120
swift_store_large_object_chunk_size = 200
swift_enable_snet = False
s3_store_host = 127.0.0.1:8080/v1.0/
s3_store_access_key = <20-char AWS access key>
s3_store_secret_key = <40-char AWS secret key>
s3_store_bucket = <lowercased 20-char aws access key>glance
s3_store_create_bucket_on_put = False
rbd_store_ceph_conf = /etc/glance/ceph.client.images.keyring
rbd_store_user = images
rbd_store_pool = images
show_image_direct_url=True
sheepdog_store_address = localhost
sheepdog_store_port = 7000
sheepdog_store_chunk_size = 64


2014-10-24 12:41:43.841 32678 WARNING glance.location [-] Get image b7fff5f9-d63d-4729-ac1e-b8831a828e26 data failed: too many values to unpack.
2014-10-24 12:41:43.842 32678 ERROR glance.location [-] Glance tried all active locations to get data for image b7fff5f9-d63d-4729-ac1e-b8831a828e26 but all have failed.
2014-10-24 12:41:43.846 32678 INFO glance.wsgi.server [-] Traceback (most recent call last):
  File "/usr/lib/python2.7/site-packages/eventlet/wsgi.py", line 433, in handle_one_response
    result = self.application(self.environ, start_response)
  File "/usr/lib/python2.7/site-packages/webob/dec.py", line 130, in __call__
    resp = self.call_func(req, *args, **self.kwargs)
  File "/usr/lib/python2.7/site-packages/webob/dec.py", line 195, in call_func
    return self.func(req, *args, **kwargs)
  File "/usr/lib/python2.7/site-packages/glance/common/wsgi.py", line 394, in __call__
    response = req.get_response(self.application)
  File "/usr/lib/python2.7/site-packages/webob/request.py", line 1296, in send
    application, catch_exc_info=False)
  File "/usr/lib/python2.7/site-packages/webob/request.py", line 1260, in call_application
    app_iter = application(self.environ, start_response)
  File "/usr/lib/python2.7/site-packages/webob/dec.py", line 130, in __call__
    resp = self.call_func(req, *args, **self.kwargs)
  File "/usr/lib/python2.7/site-packages/webob/dec.py", line 195, in call_func
    return self.func(req, *args, **kwargs)
  File "/usr/lib/python2.7/site-packages/osprofiler/web.py", line 106, in __call__
    return request.get_response(self.application)
  File "/usr/lib/python2.7/site-packages/webob/request.py", line 1296, in send
    application, catch_exc_info=False)
  File "/usr/lib/python2.7/site-packages/webob/request.py", line 1260, in call_application
    app_iter = application(self.environ, start_response)
  File "/usr/lib/python2.7/site-packages/keystonemiddleware/auth_token.py", line 748, in __call__
    return self._call_app(env, start_response)
  File "/usr/lib/python2.7/site-packages/keystonemiddleware/auth_token.py", line 684, in _call_app
    return self._app(env, _fake_start_response)
  File "/usr/lib/python2.7/site-packages/webob/dec.py", line 130, in __call__
    resp = self.call_func(req, *args, **self.kwargs)
  File "/usr/lib/python2.7/site-packages/webob/dec.py", line 195, in call_func
    return self.func(req, *args, **kwargs)
  File "/usr/lib/python2.7/site-packages/glance/common/wsgi.py", line 394, in __call__
    response = req.get_response(self.application)
  File "/usr/lib/python2.7/site-packages/webob/dec.py", line 195, in call_func
    return self.func(req, *args, **kwargs)
  File "/usr/lib/python2.7/site-packages/glance/common/wsgi.py", line 394, in __call__
    response = req.get_response(self.application)
  File "/usr/lib/python2.7/site-packages/webob/request.py", line 1296, in send
    application, catch_exc_info=False)
  File "/usr/lib/python2.7/site-packages/webob/request.py", line 1260, in call_application
    app_iter = application(self.environ, start_response)
  File "/usr/lib/python2.7/site-packages/paste/urlmap.py", line 203, in __call__
    return app(environ, start_response)
  File "/usr/lib/python2.7/site-packages/webob/dec.py", line 144, in __call__
    return resp(environ, start_response)
  File "/usr/lib/python2.7/site-packages/routes/middleware.py", line 131, in __call__
    response = self.app(environ, start_response)
  File "/usr/lib/python2.7/site-packages/webob/dec.py", line 144, in __call__
    return resp(environ, start_response)
  File "/usr/lib/python2.7/site-packages/webob/dec.py", line 144, in __call__
    return resp(environ, start_response)
TypeError: 'ImageProxy' object is not callable
2014-10-24 12:41:43.847 32678 INFO glance.wsgi.server [-] 172.16.98.3 - - [24/Oct/2014 12:41:43] "GET /v2/images/b7fff5f9-d63d-4729-ac1e-b8831a828e26/file HTTP/1.1" 500 139 0.100226

not able to create volume from image with ceph backend after update from icehouse to juno

    Hi,

    I am currently running a openstack setup with a ceph backend for glance and cinder. 
    on icehouse it was working ok. I updated the setup to Juno and
    now I am not able to create volume from image anymore. The Volume status says error, However when I look at ceph with rbd there is a volume present.

    In the cinder volume log I can see ImageCopyFailure: Failed to copy image to volume: HTTPInternalServerError (HTTP 500)

update 2) at the glance api logs it says TypeError: 'ImageProxy' object is not callable

update 3)

A simple download from glance also does not seem to work, it fails with too many values to unpack

phoenix:/var/log/glance # glance image-download 1f5f7809-34a9-4035-bcf9-bb527a03720b1f5f7809-34a9-4035-bcf9-bb527a03720b 2014-11-04 06:39:20.216 5953 DEBUG routes.middleware [d84e93d9-9f5c-47f3-950c-ff3f0fe247f1 db3cb9cfeddd4cc294ede6cef97c2c89 03110aa32a35426bbc7d861dcb61ad94 - - -] Match dict: {'action': u'download', 'image_id': u'1f5f7809-34a9-4035-bcf9-bb527a03720b', 'controller': <glance.common.wsgi.resource object="" at="" 0x3343c90="">} __call__ /usr/lib/python2.7/site-packages/routes/middleware.py:103 2014-11-04 06:39:20.251 5953 DEBUG oslo.db.sqlalchemy.session [d84e93d9-9f5c-47f3-950c-ff3f0fe247f1 db3cb9cfeddd4cc294ede6cef97c2c89 03110aa32a35426bbc7d861dcb61ad94 - - -] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION _init_events /usr/lib/python2.7/site-packages/oslo/db/sqlalchemy/session.py:461 2014-11-04 06:39:20.391 5953 WARNING glance.location [d84e93d9-9f5c-47f3-950c-ff3f0fe247f1 db3cb9cfeddd4cc294ede6cef97c2c89 03110aa32a35426bbc7d861dcb61ad94 - - -] Get image 1f5f7809-34a9-4035-bcf9-bb527a03720b data failed: too many values to unpack. 2014-11-04 06:39:20.392 5953 ERROR glance.location [d84e93d9-9f5c-47f3-950c-ff3f0fe247f1 db3cb9cfeddd4cc294ede6cef97c2c89 03110aa32a35426bbc7d861dcb61ad94 - - -] Glance tried all active locations to get data for image 1f5f7809-34a9-4035-bcf9-bb527a03720b but all have failed.

It looks to me it makes an sql query and the values returned cannot be parsed properly

HTTPInternalServerError (HTTP 500)
phoenix:/var/log/glance # tail -f api.log
  File "/usr/lib/python2.7/site-packages/glance/common/wsgi.py", line 683, in __call__
    request, **action_args)
  File "/usr/lib/python2.7/site-packages/glance/common/wsgi.py", line 707, in dispatch
    return method(*args, **kwargs)
  File "/usr/lib/python2.7/site-packages/glance/api/v1/images.py", line 498, in show
    image_meta['location'])
  File "/usr/lib/python2.7/site-packages/glance/api/v1/images.py", line 460, in _get_from_store
    image_data, image_size = src_store.get(loc, context=context)
ValueError: too many values to unpack
2014-10-30 16:18:25.825 6247 INFO glance.wsgi.server [20722acd-2805-4295-a543-e11c9dc79413 8ca4594188154742b89c49d12efcde7e 0c7c025a9519414992a40ab9af9a18e0 - - -] 172.16.98.3 - - [30/Oct/2014 16:18:25] "GET /v1/images/1f5f7809-34a9-4035-bcf9-bb527a03720b HTTP/1.1" 500 139 0.138539

    Anybody any idea?


    BR,
    Jeroen



    phoenix:/etc/cinder # glance image-list
    +--------------------------------------+-------------------------+-------------+------------------+----------+--------+
    | ID                                   | Name                    | Disk Format | Container Format | Size     | Status |
    +--------------------------------------+-------------------------+-------------+------------------+----------+--------+
    | b7fff5f9-d63d-4729-ac1e-b8831a828e26 | cirros-0.3.3-x86_64     | qcow2       | bare             | 13200896 | active |
    | 7d9eef7f-15f1-40e3-9e75-593d94a8571d | raw cirros-0.3.3-x86_64 | qcow2       | bare             | 41126400 | active |
    +--------------------------------------+-------------------------+-------------+------------------+----------+--------+

    phoenix:/etc/cinder # rbd ls images
    7d9eef7f-15f1-40e3-9e75-593d94a8571d
    b7fff5f9-d63d-4729-ac1e-b8831a828e26


    phoenix:/etc/cinder # cinder create 1 --display-name vol_fail_test --image-id b7fff5f9-d63d-4729-ac1e-b8831a828e26 --availability-zone nova
    +---------------------+--------------------------------------+
    |       Property      |                Value                 |
    +---------------------+--------------------------------------+
    |     attachments     |                  []                  |
    |  availability_zone  |                 nova                 |
    |       bootable      |                false                 |
    |      created_at     |      2014-10-23T14:11:59.820165      |
    | display_description |                 None                 |
    |     display_name    |            vol_fail_test             |
    |      encrypted      |                False                 |
    |          id         | 52df23c7-b988-4f8c-9754-deab0a349887 |
    |       image_id      | b7fff5f9-d63d-4729-ac1e-b8831a828e26 |
    |       metadata      |                  {}                  |
    |         size        |                  1                   |
    |     snapshot_id     |                 None                 |
    |     source_volid    |                 None                 |
    |        status       |               creating               |
    |     volume_type     |                 None                 |
    +---------------------+--------------------------------------+
    phoenix:/etc/cinder # cinder list
    +--------------------------------------+--------+---------------------+------+-------------+----------+-------------+
    |                  ID                  | Status |     Display Name    | Size | Volume Type | Bootable | Attached to |
    +--------------------------------------+--------+---------------------+------+-------------+----------+-------------+
    | 52df23c7-b988-4f8c-9754-deab0a349887 | error  |    vol_fail_test    |  1   |     None    |  false   |             |
    | 628bf569-7654-4b49-aec5-b02ab49dd522 | error  | cirros-0.3.3-x86_64 |  1   |     None    |  false   |             |
    +--------------------------------------+--------+---------------------+------+-------------+----------+-------------+
    phoenix:/etc/cinder # rbd ls volumes
    volume-52df23c7-b988-4f8c-9754-deab0a349887
    volume-628bf569-7654-4b49-aec5-b02ab49dd522

    grep "^[a-zA-Z[]" cinder.conf

    [DEFAULT]
    verbose = True
    log_dir = /var/log/cinder
    auth_strategy = keystone
    rootwrap_config = /etc/cinder/rootwrap.conf
    state_path = /var/lib/cinder
    lock_path = /var/run/cinder
    rabbit_host=phoenix
    rabbit_userid=guest
    rabbit_password=passw
    rpc_backend=rabbit
    backup_driver=cinder.backup.drivers.ceph
    backup_ceph_conf=/etc/ceph/ceph.conf
    backup_ceph_user=cinder-backup
    backup_ceph_chunk_size=134217728
    backup_ceph_pool=backups
    backup_ceph_stripe_unit=0
    backup_ceph_stripe_count=0
    restore_discard_excess_bytes=true
    my_ip=172.16.98.3
    glance_host=phoenix
    debug=True
    volume_driver = cinder.volume.drivers.rbd.RBDDriver
    rbd_pool = volumes
    glance_api_version = 2
    rbd_ceph_conf = /etc/ceph/ceph.conf
    rbd_user = volumes
    rbd_secret_uuid=df556fc6-8137-4bc0-b54a-1cd9be3df459
    volume_tmp_dir = /tmp
    glance_host = phoenix
    [BRCD_FABRIC_EXAMPLE]
    [CISCO_FABRIC_EXAMPLE]
    [database]
    connection = mysql://cinder:passw@phoenix/cinder
    [fc-zone-manager]
    [keymgr]
    [keystone_authtoken]
    signing_dir = /var/cache/cinder/keystone-signing
    auth_uri = http://phoenix/:5000/v2.0
    identity_uri = http://phoenix:35357
    admin_tenant_name = service
    admin_user = cinder
    admin_password = cinder
    [matchmaker_redis]
    [matchmaker_ring]
    [profiler]
    [ssl]



    2014-10-23 08:26:45.530 29913 DEBUG glanceclient.common.http [req-9e6baed2-693a-4506-9dcc-f31dd65f9ef7 8ca4594188154742b89c49d12efcde7e 0c7c025a9519414992a40ab9af9a18e0 - - -] curl -i -X GET -H 'User-Agent: python-glanceclient' -H 'Content-Type: application/octet-stream' -H 'Accept-Encoding: gzip, deflate' -H 'Accept: */*' -H 'X-Auth-Token: {SHA1}8d5b9794e7a32fe15b6698bfa91a3f2ce38e46f6' http://phoenix:9292/v2/schemas/image log_curl_request /usr/lib/python2.7/site-packages/glanceclient/common/http.py:118
    2014-10-23 08:26:45.542 29913 DEBUG glanceclient.common.http [req-9e6baed2-693a-4506-9dcc-f31dd65f9ef7 8ca4594188154742b89c49d12efcde7e 0c7c025a9519414992a40ab9af9a18e0 - - -]
    HTTP/1.1 200 OK
    date: Thu, 23 Oct 2014 06:26:45 GMT
    content-length: 3785
    content-type: application/json; charset=UTF-8
    x-openstack-request-id: req-f519ca6c-4328-470d-b552-9250c1f2e028
    {"additionalProperties": {"type": "string"}, "name": "image", "links": [{"href": "{self}", "rel": "self"}, {"href": "{file}", "rel": "enclosure"}, {"href": "{schema}", "rel": "describedby"}], "properties": {"status": {"enum": ["queued", "saving", "active", "killed", "deleted", "pending_delete"], "type": "string", "description": "Status of the image (READ-ONLY)"}, "tags": {"items": {"type": "string", "maxLength": 255}, "type": "array", "description": "List of strings related to the image"}, "kernel_id": {"pattern": "^([0-9a-fA-F]){8}-([0-9a-fA-F]){4}-([0-9a-fA-F]){4}-([0-9a-fA-F]){4}-([0-9a-fA-F]){12}$", "type": "string", "description": "ID of image stored in Glance that should be used as the kernel when booting an AMI-style image.", "is_base": false}, "container_format": {"enum": ["ami", "ari", "aki", "bare", "ovf", "ova"], "type": "string", "description": "Format of the container"}, "min_ram": {"type": "integer", "description": "Amount of ram (in MB) required to boot image."}, "ramdisk_id": {"pattern": "^([0-9a-fA-F]){8}-([0-9a-fA-F]){4}-([0-9a-fA-F]){4}-([0-9a-fA-F]){4}-([0-9a-fA-F]){12}$", "type": "string", "description": "ID of image stored in Glance that should be used as the ramdisk when booting an AMI-style image.", "is_base": false}, "locations": {"items": {"required": ["url", "metadata"], "type": "object", "properties": {"url": {"type": "string", "maxLength": 255}, "metadata": {"type": "object"}}}, "type": "array", "description": "A set of URLs to access the image file kept in external store"}, "visibility": {"enum": ["public", "private"], "type": "string", "description": "Scope of image accessibility"}, "updated_at": {"type": "string", "description": "Date and time of the last image modification (READ-ONLY)"}, "owner": {"type": "string", "description": "Owner of the image", "maxLength": 255}, "file": {"type": "string", "description": "(READ-ONLY)"}, "min_disk": {"type": "integer", "description": "Amount of disk space (in GB) required to boot image."}, "virtual_size": {"type": "integer", "description": "Virtual size of image in bytes (READ-ONLY)"}, "id": {"pattern": "^([0-9a-fA-F]){8}-([0-9a-fA-F]){4}-([0-9a-fA-F]){4}-([0-9a-fA-F]){4}-([0-9a-fA-F]){12}$", "type": "string", "description": "An identifier for the image"}, "size": {"type": "integer", "description": "Size of image file in bytes (READ-ONLY)"}, "instance_uuid": {"type": "string", "description": "ID of instance used to create this image.", "is_base": false}, "os_distro": {"type": "string", "description": "Common name of operating system distribution as specified in http://docs.openstack.org/trunk/openstack-compute/admin/content/adding-images.html", "is_base": false}, "name": {"type": "string", "description": "Descriptive name for the image", "maxLength": 255}, "checksum": {"type": "string", "description": "md5 hash of image contents. (READ-ONLY)", "maxLength": 32}, "created_at": {"type": "string", "description": "Date and time of image registration (READ-ONLY)"}, "disk_format": {"enum": ["ami", "ari", "aki", "vhd", "vmdk", "raw", "qcow2", "vdi", "iso"], "type": "string", "description": "Format of the disk"}, "os_version": {"type": "string", "description": "Operating system version as specified by the distributor", "is_base": false}, "protected": {"type": "boolean", "description": "If true, image will not be deletable."}, "architecture": {"type": "string", "description": "Operating system architecture as specified in http://docs.openstack.org/trunk/openstack-compute/admin/content/adding-images.html", "is_base": false}, "direct_url": {"type": "string", "description": "URL to access the image file kept in external store (READ-ONLY)"}, "self": {"type": "string", "description": "(READ-ONLY)"}, "schema": {"type": "string", "description": "(READ-ONLY)"}}}
     log_http_response /usr/lib/python2.7/site-packages/glanceclient/common/http.py:131
    2014-10-23 08:26:45.558 29913 DEBUG cinder.openstack.common.processutils [req-9e6baed2-693a-4506-9dcc-f31dd65f9ef7 8ca4594188154742b89c49d12efcde7e 0c7c025a9519414992a40ab9af9a18e0 - - -] Running cmd (subprocess): sudo cinder-rootwrap /etc/cinder/rootwrap.conf env LC_ALL=C qemu-img info /var/lib/cinder/conversion/tmp6U4DTk execute /usr/lib/python2.7/site-packages/cinder/openstack/common/processutils.py:158
    2014-10-23 08:26:45.665 29913 DEBUG cinder.openstack.common.processutils [req-9e6baed2-693a-4506-9dcc-f31dd65f9ef7 8ca4594188154742b89c49d12efcde7e 0c7c025a9519414992a40ab9af9a18e0 - - -] Result was 0 execute /usr/lib/python2.7/site-packages/cinder/openstack/common/processutils.py:192
    2014-10-23 08:26:45.668 29913 DEBUG glanceclient.common.http [req-9e6baed2-693a-4506-9dcc-f31dd65f9ef7 8ca4594188154742b89c49d12efcde7e 0c7c025a9519414992a40ab9af9a18e0 - - -] curl -i -X GET -H 'User-Agent: python-glanceclient' -H 'Content-Type: application/octet-stream' -H 'Accept-Encoding: gzip, deflate' -H 'Accept: */*' -H 'X-Auth-Token: {SHA1}8d5b9794e7a32fe15b6698bfa91a3f2ce38e46f6' http://phoenix:9292/v2/images/434cbaf5-0b1c-45ed-9e62-418e9a374102/file log_curl_request /usr/lib/python2.7/site-packages/glanceclient/common/http.py:118
    2014-10-23 08:26:45.775 29913 DEBUG glanceclient.common.http [req-9e6baed2-693a-4506-9dcc-f31dd65f9ef7 8ca4594188154742b89c49d12efcde7e 0c7c025a9519414992a40ab9af9a18e0 - - -] Request returned failure status 500. _request /usr/lib/python2.7/site-packages/glanceclient/common/http.py:218
    2014-10-23 08:26:45.777 29913 ERROR cinder.volume.flows.manager.create_volume [req-9e6baed2-693a-4506-9dcc-f31dd65f9ef7 8ca4594188154742b89c49d12efcde7e 0c7c025a9519414992a40ab9af9a18e0 - - -] Failed to copy image 434cbaf5-0b1c-45ed-9e62-418e9a374102 to volume: ea2d3e2f-d388-4ec2-a92f-c6dd07d90906, error: HTTPInternalServerError (HTTP 500)
    2014-10-23 08:26:45.778 29913 DEBUG taskflow.engines.action_engine.runner [req-9e6baed2-693a-4506-9dcc-f31dd65f9ef7 8ca4594188154742b89c49d12efcde7e 0c7c025a9519414992a40ab9af9a18e0 - - -] Exiting old state 'SCHEDULING' in response to event 'wait' on_exit /usr/lib/python2.7/site-packages/taskflow/engines/action_engine/runner.py:156
    2014-10-23 08:26:45.779 29913 DEBUG taskflow.engines.action_engine.runner [req-9e6baed2-693a-4506-9dcc-f31dd65f9ef7 8ca4594188154742b89c49d12efcde7e 0c7c025a9519414992a40ab9af9a18e0 - - -] Entering new state 'WAITING' in response to event 'wait' on_enter /usr/lib/python2.7/site-packages/taskflow/engines/action_engine/runner.py:160
    2014-10-23 08:26:45.779 29913 DEBUG taskflow.engines.action_engine.runner [req-9e6baed2-693a-4506-9dcc-f31dd65f9ef7 8ca4594188154742b89c49d12efcde7e 0c7c025a9519414992a40ab9af9a18e0 - - -] Exiting old state 'WAITING' in response to event 'analyze' on_exit /usr/lib/python2.7/site-packages/taskflow/engines/action_engine/runner.py:156
    2014-10-23 08:26:45.780 29913 DEBUG taskflow.engines.action_engine.runner [req-9e6baed2-693a-4506-9dcc-f31dd65f9ef7 8ca4594188154742b89c49d12efcde7e 0c7c025a9519414992a40ab9af9a18e0 - - -] Entering new state 'ANALYZING' in response to event 'analyze' on_enter /usr/lib/python2.7/site-packages/taskflow/engines/action_engine/runner.py:160
    2014-10-23 08:26:45.781 29913 WARNING cinder.volume.manager [req-9e6baed2-693a-4506-9dcc-f31dd65f9ef7 8ca4594188154742b89c49d12efcde7e 0c7c025a9519414992a40ab9af9a18e0 - - -] Task 'cinder.volume.flows.manager.create_volume.CreateVolumeFromSpecTask;volume:create' (9afc4293-4c15-48aa-9534-6cafabd2c08b) transitioned into state 'FAILURE'
    2014-10-23 08:26:45.781 29913 TRACE cinder.volume.manager Traceback (most recent call last):
    2014-10-23 08:26:45.781 29913 TRACE cinder.volume.manager   File "/usr/lib/python2.7/site-packages/taskflow/engines/action_engine/executor.py", line 35, in _execute_task
    2014-10-23 08:26:45.781 29913 TRACE cinder.volume.manager     result = task.execute(**arguments)
    2014-10-23 08:26:45.781 29913 TRACE cinder.volume.manager   File "/usr/lib/python2.7/site-packages/cinder/volume/flows/manager/create_volume.py", line 638, in execute
    2014-10-23 08:26:45.781 29913 TRACE cinder.volume.manager     **volume_spec)
    2014-10-23 08:26:45.781 29913 TRACE cinder.volume.manager   File "/usr/lib/python2.7/site-packages/cinder/volume/flows/manager/create_volume.py", line 590, in _create_from_image
    2014-10-23 08:26:45.781 29913 TRACE cinder.volume.manager     image_id, image_location, image_service)
    2014-10-23 08:26:45.781 29913 TRACE cinder.volume.manager   File "/usr/lib/python2.7/site-packages/cinder/volume/flows/manager/create_volume.py", line 504, in _copy_image_to_volume
    2014-10-23 08:26:45.781 29913 TRACE cinder.volume.manager     raise exception.ImageCopyFailure(reason=ex)
    2014-10-23 08:26:45.781 29913 TRACE cinder.volume.manager ImageCopyFailure: Failed to copy image to volume: HTTPInternalServerError (HTTP 500)
    2014-10-23 08:26:45.781 29913 TRACE cinder.volume.manager


update 1

In cinder I replaced the ceph backend with an lvm backend, and I have the same error. glance now on ceph en cinder on LVM

phoenix:/etc/lvm # cinder create 1 --display-name vol_fail_testVG --image-id b7fff5f9-d63d-4729-ac1e-b8831a828e26 --availability-zone nova
+---------------------+--------------------------------------+
|       Property      |                Value                 |
+---------------------+--------------------------------------+
|     attachments     |                  []                  |
|  availability_zone  |                 nova                 |
|       bootable      |                false                 |
|      created_at     |      2014-10-24T12:51:18.086673      |
| display_description |                 None                 |
|     display_name    |           vol_fail_testVG            |
|      encrypted      |                False                 |
|          id         | 1de49e62-b0cd-40ef-818b-1ab988dfd6d7 |
|       image_id      | b7fff5f9-d63d-4729-ac1e-b8831a828e26 |
|       metadata      |                  {}                  |
|         size        |                  1                   |
|     snapshot_id     |                 None                 |
|     source_volid    |                 None                 |
|        status       |               creating               |
|     volume_type     |                 None                 |
+---------------------+--------------------------------------+
phoenix:/etc/lvm # cinder list
+--------------------------------------+--------+---------------------+------+-------------+----------+-------------+
|                  ID                  | Status |     Display Name    | Size | Volume Type | Bootable | Attached to |
+--------------------------------------+--------+---------------------+------+-------------+----------+-------------+
| 1de49e62-b0cd-40ef-818b-1ab988dfd6d7 | error  |   vol_fail_testVG   |  1   |     None    |  false   |             |
| 52df23c7-b988-4f8c-9754-deab0a349887 | error  |    vol_fail_test    |  1   |     None    |  false   |             |
| 628bf569-7654-4b49-aec5-b02ab49dd522 | error  | cirros-0.3.3-x86_64 |  1   |     None    |  false   |             |
| bbe05482-e89f-4485-9b1f-3a034e82a858 | error  |    vol_fail_test3   |  1   |     None    |  false   |             |
| d2a45408-41c5-4049-97f1-63692dbae233 | error  |    vol_fail_test2   |  1   |     None    |  false   |             |
+--------------------------------------+--------+---------------------+------+-------------+----------+-------------+

phoenix:/etc/lvm # lvs
  LV                                          VG       Attr      LSize  Pool          Origin Data%  Move Log Copy%  Convert
  cinderVG-pool                               cinderVG twi-a-tz- 95.00g                        0.00              
  volume-1de49e62-b0cd-40ef-818b-1ab988dfd6d7 cinderVG Vwi-a-tz-  1.00g cinderVG-pool          0.00              

cinder-volumes.log
2014-10-24 14:51:20.694 18244 TRACE oslo.messaging.rpc.dispatcher     raise exception.ImageCopyFailure(reason=ex)
2014-10-24 14:51:20.694 18244 TRACE oslo.messaging.rpc.dispatcher ImageCopyFailure: Failed to copy image to volume: HTTPInternalServerError (HTTP 500)

Update 2)
After some more testing it looks like the problem is in Glance after all, when I revert the glance store to file it is working, when i change the store to RBD it is not working. 

phoenix:/etc/glance # grep "^[a-zA-Z []" glance-api.conf
[DEFAULT]
verbose = True
debug = True
default_store = rbd
bind_host = 0.0.0.0
bind_port = 9292
log_file = /var/log/glance/api.log
backlog = 4096
registry_host = 0.0.0.0
registry_port = 9191
registry_client_protocol = http
auth_strategy = keystone
rabbit_host = phoenix
rabbit_port = 5672
rabbit_use_ssl = false
rabbit_userid = guest
rabbit_password = passw
rabbit_virtual_host = /
rabbit_notification_exchange = glance
rabbit_notification_topic = notifications
rabbit_durable_queues = False
qpid_notification_exchange = glance
qpid_notification_topic = notifications
qpid_hostname = localhost
qpid_port = 5672
qpid_username =
qpid_password =
qpid_sasl_mechanisms =
qpid_reconnect_timeout = 0
qpid_reconnect_limit = 0
qpid_reconnect_interval_min = 0
qpid_reconnect_interval_max = 0
qpid_reconnect_interval = 0
qpid_heartbeat = 5
qpid_protocol = tcp
qpid_tcp_nodelay = True
delayed_delete = False
scrub_time = 43200
scrubber_datadir = /var/lib/glance/scrubber
image_cache_dir = /var/lib/glance/image-cache/
verbose = True
notification_driver = messaging
lock_path = /var/run/glance
[database]
connection = mysql://glance:passw@phoenix/glance
[keystone_authtoken]
identity_uri = http://phoenix:35357
admin_tenant_name = service
admin_user = glance
admin_password = passw
revocation_cache_time = 10
signing_dir = /var/cache/glance/keystone-signing
[paste_deploy]
flavor = keystone
[store_type_location_strategy]
[profiler]
[task]
[glance_store]
stores = glance.store.filesystem.Store,
         glance.store.rbd.Store,
         glance.store.http.Store
filesystem_store_datadir = /var/lib/glance/images/
swift_store_auth_version = 2
swift_store_auth_address = 127.0.0.1:5000/v2.0/
swift_store_user = jdoe:jdoe
swift_store_key = a86850deb2742ec3cb41518e26aa2d89
swift_store_container = glance
swift_store_create_container_on_put = False
swift_store_large_object_size = 5120
swift_store_large_object_chunk_size = 200
swift_enable_snet = False
s3_store_host = 127.0.0.1:8080/v1.0/
s3_store_access_key = <20-char AWS access key>
s3_store_secret_key = <40-char AWS secret key>
s3_store_bucket = <lowercased 20-char aws access key>glance
s3_store_create_bucket_on_put = False
rbd_store_ceph_conf = /etc/glance/ceph.client.images.keyring
rbd_store_user = images
rbd_store_pool = images
show_image_direct_url=True
sheepdog_store_address = localhost
sheepdog_store_port = 7000
sheepdog_store_chunk_size = 64


2014-10-24 12:41:43.841 32678 WARNING glance.location [-] Get image b7fff5f9-d63d-4729-ac1e-b8831a828e26 data failed: too many values to unpack.
2014-10-24 12:41:43.842 32678 ERROR glance.location [-] Glance tried all active locations to get data for image b7fff5f9-d63d-4729-ac1e-b8831a828e26 but all have failed.
2014-10-24 12:41:43.846 32678 INFO glance.wsgi.server [-] Traceback (most recent call last):
  File "/usr/lib/python2.7/site-packages/eventlet/wsgi.py", line 433, in handle_one_response
    result = self.application(self.environ, start_response)
  File "/usr/lib/python2.7/site-packages/webob/dec.py", line 130, in __call__
    resp = self.call_func(req, *args, **self.kwargs)
  File "/usr/lib/python2.7/site-packages/webob/dec.py", line 195, in call_func
    return self.func(req, *args, **kwargs)
  File "/usr/lib/python2.7/site-packages/glance/common/wsgi.py", line 394, in __call__
    response = req.get_response(self.application)
  File "/usr/lib/python2.7/site-packages/webob/request.py", line 1296, in send
    application, catch_exc_info=False)
  File "/usr/lib/python2.7/site-packages/webob/request.py", line 1260, in call_application
    app_iter = application(self.environ, start_response)
  File "/usr/lib/python2.7/site-packages/webob/dec.py", line 130, in __call__
    resp = self.call_func(req, *args, **self.kwargs)
  File "/usr/lib/python2.7/site-packages/webob/dec.py", line 195, in call_func
    return self.func(req, *args, **kwargs)
  File "/usr/lib/python2.7/site-packages/osprofiler/web.py", line 106, in __call__
    return request.get_response(self.application)
  File "/usr/lib/python2.7/site-packages/webob/request.py", line 1296, in send
    application, catch_exc_info=False)
  File "/usr/lib/python2.7/site-packages/webob/request.py", line 1260, in call_application
    app_iter = application(self.environ, start_response)
  File "/usr/lib/python2.7/site-packages/keystonemiddleware/auth_token.py", line 748, in __call__
    return self._call_app(env, start_response)
  File "/usr/lib/python2.7/site-packages/keystonemiddleware/auth_token.py", line 684, in _call_app
    return self._app(env, _fake_start_response)
  File "/usr/lib/python2.7/site-packages/webob/dec.py", line 130, in __call__
    resp = self.call_func(req, *args, **self.kwargs)
  File "/usr/lib/python2.7/site-packages/webob/dec.py", line 195, in call_func
    return self.func(req, *args, **kwargs)
  File "/usr/lib/python2.7/site-packages/glance/common/wsgi.py", line 394, in __call__
    response = req.get_response(self.application)
  File "/usr/lib/python2.7/site-packages/webob/dec.py", line 195, in call_func
    return self.func(req, *args, **kwargs)
  File "/usr/lib/python2.7/site-packages/glance/common/wsgi.py", line 394, in __call__
    response = req.get_response(self.application)
  File "/usr/lib/python2.7/site-packages/webob/request.py", line 1296, in send
    application, catch_exc_info=False)
  File "/usr/lib/python2.7/site-packages/webob/request.py", line 1260, in call_application
    app_iter = application(self.environ, start_response)
  File "/usr/lib/python2.7/site-packages/paste/urlmap.py", line 203, in __call__
    return app(environ, start_response)
  File "/usr/lib/python2.7/site-packages/webob/dec.py", line 144, in __call__
    return resp(environ, start_response)
  File "/usr/lib/python2.7/site-packages/routes/middleware.py", line 131, in __call__
    response = self.app(environ, start_response)
  File "/usr/lib/python2.7/site-packages/webob/dec.py", line 144, in __call__
    return resp(environ, start_response)
  File "/usr/lib/python2.7/site-packages/webob/dec.py", line 144, in __call__
    return resp(environ, start_response)
TypeError: 'ImageProxy' object is not callable
2014-10-24 12:41:43.847 32678 INFO glance.wsgi.server [-] 172.16.98.3 - - [24/Oct/2014 12:41:43] "GET /v2/images/b7fff5f9-d63d-4729-ac1e-b8831a828e26/file HTTP/1.1" 500 139 0.100226

not able to create volume from image with ceph backend after update from icehouse to juno

* this problem is solved by updating package python-glance_store from 0.1.8.2.1 to version 0.1.10-5.1 *

    Hi,

    I am currently running a openstack setup with a ceph backend for glance and cinder. 
    on icehouse it was working ok. I updated the setup to Juno and
    now I am not able to create volume from image anymore. The Volume status says error, However when I look at ceph with rbd there is a volume present.

    In the cinder volume log I can see ImageCopyFailure: Failed to copy image to volume: HTTPInternalServerError (HTTP 500)

update 2) at the glance api logs it says TypeError: 'ImageProxy' object is not callable

update 3)

A simple download from glance also does not seem to work, it fails with too many values to unpack

phoenix:/var/log/glance # glance image-download 1f5f7809-34a9-4035-bcf9-bb527a03720b 2014-11-04 06:39:20.216 5953 DEBUG routes.middleware [d84e93d9-9f5c-47f3-950c-ff3f0fe247f1 db3cb9cfeddd4cc294ede6cef97c2c89 03110aa32a35426bbc7d861dcb61ad94 - - -] Match dict: {'action': u'download', 'image_id': u'1f5f7809-34a9-4035-bcf9-bb527a03720b', 'controller': <glance.common.wsgi.resource object="" at="" 0x3343c90="">} __call__ /usr/lib/python2.7/site-packages/routes/middleware.py:103 2014-11-04 06:39:20.251 5953 DEBUG oslo.db.sqlalchemy.session [d84e93d9-9f5c-47f3-950c-ff3f0fe247f1 db3cb9cfeddd4cc294ede6cef97c2c89 03110aa32a35426bbc7d861dcb61ad94 - - -] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION _init_events /usr/lib/python2.7/site-packages/oslo/db/sqlalchemy/session.py:461 2014-11-04 06:39:20.391 5953 WARNING glance.location [d84e93d9-9f5c-47f3-950c-ff3f0fe247f1 db3cb9cfeddd4cc294ede6cef97c2c89 03110aa32a35426bbc7d861dcb61ad94 - - -] Get image 1f5f7809-34a9-4035-bcf9-bb527a03720b data failed: too many values to unpack. 2014-11-04 06:39:20.392 5953 ERROR glance.location [d84e93d9-9f5c-47f3-950c-ff3f0fe247f1 db3cb9cfeddd4cc294ede6cef97c2c89 03110aa32a35426bbc7d861dcb61ad94 - - -] Glance tried all active locations to get data for image 1f5f7809-34a9-4035-bcf9-bb527a03720b but all have failed.

It looks to me it makes an sql query and the values returned cannot be parsed properly

    BR,
    Jeroen



    phoenix:/etc/cinder # glance image-list
    +--------------------------------------+-------------------------+-------------+------------------+----------+--------+
    | ID                                   | Name                    | Disk Format | Container Format | Size     | Status |
    +--------------------------------------+-------------------------+-------------+------------------+----------+--------+
    | b7fff5f9-d63d-4729-ac1e-b8831a828e26 | cirros-0.3.3-x86_64     | qcow2       | bare             | 13200896 | active |
    | 7d9eef7f-15f1-40e3-9e75-593d94a8571d | raw cirros-0.3.3-x86_64 | qcow2       | bare             | 41126400 | active |
    +--------------------------------------+-------------------------+-------------+------------------+----------+--------+

    phoenix:/etc/cinder # rbd ls images
    7d9eef7f-15f1-40e3-9e75-593d94a8571d
    b7fff5f9-d63d-4729-ac1e-b8831a828e26


    phoenix:/etc/cinder # cinder create 1 --display-name vol_fail_test --image-id b7fff5f9-d63d-4729-ac1e-b8831a828e26 --availability-zone nova
    +---------------------+--------------------------------------+
    |       Property      |                Value                 |
    +---------------------+--------------------------------------+
    |     attachments     |                  []                  |
    |  availability_zone  |                 nova                 |
    |       bootable      |                false                 |
    |      created_at     |      2014-10-23T14:11:59.820165      |
    | display_description |                 None                 |
    |     display_name    |            vol_fail_test             |
    |      encrypted      |                False                 |
    |          id         | 52df23c7-b988-4f8c-9754-deab0a349887 |
    |       image_id      | b7fff5f9-d63d-4729-ac1e-b8831a828e26 |
    |       metadata      |                  {}                  |
    |         size        |                  1                   |
    |     snapshot_id     |                 None                 |
    |     source_volid    |                 None                 |
    |        status       |               creating               |
    |     volume_type     |                 None                 |
    +---------------------+--------------------------------------+
    phoenix:/etc/cinder # cinder list
    +--------------------------------------+--------+---------------------+------+-------------+----------+-------------+
    |                  ID                  | Status |     Display Name    | Size | Volume Type | Bootable | Attached to |
    +--------------------------------------+--------+---------------------+------+-------------+----------+-------------+
    | 52df23c7-b988-4f8c-9754-deab0a349887 | error  |    vol_fail_test    |  1   |     None    |  false   |             |
    | 628bf569-7654-4b49-aec5-b02ab49dd522 | error  | cirros-0.3.3-x86_64 |  1   |     None    |  false   |             |
    +--------------------------------------+--------+---------------------+------+-------------+----------+-------------+
    phoenix:/etc/cinder # rbd ls volumes
    volume-52df23c7-b988-4f8c-9754-deab0a349887
    volume-628bf569-7654-4b49-aec5-b02ab49dd522

    grep "^[a-zA-Z[]" cinder.conf

    [DEFAULT]
    verbose = True
    log_dir = /var/log/cinder
    auth_strategy = keystone
    rootwrap_config = /etc/cinder/rootwrap.conf
    state_path = /var/lib/cinder
    lock_path = /var/run/cinder
    rabbit_host=phoenix
    rabbit_userid=guest
    rabbit_password=passw
    rpc_backend=rabbit
    backup_driver=cinder.backup.drivers.ceph
    backup_ceph_conf=/etc/ceph/ceph.conf
    backup_ceph_user=cinder-backup
    backup_ceph_chunk_size=134217728
    backup_ceph_pool=backups
    backup_ceph_stripe_unit=0
    backup_ceph_stripe_count=0
    restore_discard_excess_bytes=true
    my_ip=172.16.98.3
    glance_host=phoenix
    debug=True
    volume_driver = cinder.volume.drivers.rbd.RBDDriver
    rbd_pool = volumes
    glance_api_version = 2
    rbd_ceph_conf = /etc/ceph/ceph.conf
    rbd_user = volumes
    rbd_secret_uuid=df556fc6-8137-4bc0-b54a-1cd9be3df459
    volume_tmp_dir = /tmp
    glance_host = phoenix
    [BRCD_FABRIC_EXAMPLE]
    [CISCO_FABRIC_EXAMPLE]
    [database]
    connection = mysql://cinder:passw@phoenix/cinder
    [fc-zone-manager]
    [keymgr]
    [keystone_authtoken]
    signing_dir = /var/cache/cinder/keystone-signing
    auth_uri = http://phoenix/:5000/v2.0
    identity_uri = http://phoenix:35357
    admin_tenant_name = service
    admin_user = cinder
    admin_password = cinder
    [matchmaker_redis]
    [matchmaker_ring]
    [profiler]
    [ssl]



    2014-10-23 08:26:45.530 29913 DEBUG glanceclient.common.http [req-9e6baed2-693a-4506-9dcc-f31dd65f9ef7 8ca4594188154742b89c49d12efcde7e 0c7c025a9519414992a40ab9af9a18e0 - - -] curl -i -X GET -H 'User-Agent: python-glanceclient' -H 'Content-Type: application/octet-stream' -H 'Accept-Encoding: gzip, deflate' -H 'Accept: */*' -H 'X-Auth-Token: {SHA1}8d5b9794e7a32fe15b6698bfa91a3f2ce38e46f6' http://phoenix:9292/v2/schemas/image log_curl_request /usr/lib/python2.7/site-packages/glanceclient/common/http.py:118
    2014-10-23 08:26:45.542 29913 DEBUG glanceclient.common.http [req-9e6baed2-693a-4506-9dcc-f31dd65f9ef7 8ca4594188154742b89c49d12efcde7e 0c7c025a9519414992a40ab9af9a18e0 - - -]
    HTTP/1.1 200 OK
    date: Thu, 23 Oct 2014 06:26:45 GMT
    content-length: 3785
    content-type: application/json; charset=UTF-8
    x-openstack-request-id: req-f519ca6c-4328-470d-b552-9250c1f2e028
    {"additionalProperties": {"type": "string"}, "name": "image", "links": [{"href": "{self}", "rel": "self"}, {"href": "{file}", "rel": "enclosure"}, {"href": "{schema}", "rel": "describedby"}], "properties": {"status": {"enum": ["queued", "saving", "active", "killed", "deleted", "pending_delete"], "type": "string", "description": "Status of the image (READ-ONLY)"}, "tags": {"items": {"type": "string", "maxLength": 255}, "type": "array", "description": "List of strings related to the image"}, "kernel_id": {"pattern": "^([0-9a-fA-F]){8}-([0-9a-fA-F]){4}-([0-9a-fA-F]){4}-([0-9a-fA-F]){4}-([0-9a-fA-F]){12}$", "type": "string", "description": "ID of image stored in Glance that should be used as the kernel when booting an AMI-style image.", "is_base": false}, "container_format": {"enum": ["ami", "ari", "aki", "bare", "ovf", "ova"], "type": "string", "description": "Format of the container"}, "min_ram": {"type": "integer", "description": "Amount of ram (in MB) required to boot image."}, "ramdisk_id": {"pattern": "^([0-9a-fA-F]){8}-([0-9a-fA-F]){4}-([0-9a-fA-F]){4}-([0-9a-fA-F]){4}-([0-9a-fA-F]){12}$", "type": "string", "description": "ID of image stored in Glance that should be used as the ramdisk when booting an AMI-style image.", "is_base": false}, "locations": {"items": {"required": ["url", "metadata"], "type": "object", "properties": {"url": {"type": "string", "maxLength": 255}, "metadata": {"type": "object"}}}, "type": "array", "description": "A set of URLs to access the image file kept in external store"}, "visibility": {"enum": ["public", "private"], "type": "string", "description": "Scope of image accessibility"}, "updated_at": {"type": "string", "description": "Date and time of the last image modification (READ-ONLY)"}, "owner": {"type": "string", "description": "Owner of the image", "maxLength": 255}, "file": {"type": "string", "description": "(READ-ONLY)"}, "min_disk": {"type": "integer", "description": "Amount of disk space (in GB) required to boot image."}, "virtual_size": {"type": "integer", "description": "Virtual size of image in bytes (READ-ONLY)"}, "id": {"pattern": "^([0-9a-fA-F]){8}-([0-9a-fA-F]){4}-([0-9a-fA-F]){4}-([0-9a-fA-F]){4}-([0-9a-fA-F]){12}$", "type": "string", "description": "An identifier for the image"}, "size": {"type": "integer", "description": "Size of image file in bytes (READ-ONLY)"}, "instance_uuid": {"type": "string", "description": "ID of instance used to create this image.", "is_base": false}, "os_distro": {"type": "string", "description": "Common name of operating system distribution as specified in http://docs.openstack.org/trunk/openstack-compute/admin/content/adding-images.html", "is_base": false}, "name": {"type": "string", "description": "Descriptive name for the image", "maxLength": 255}, "checksum": {"type": "string", "description": "md5 hash of image contents. (READ-ONLY)", "maxLength": 32}, "created_at": {"type": "string", "description": "Date and time of image registration (READ-ONLY)"}, "disk_format": {"enum": ["ami", "ari", "aki", "vhd", "vmdk", "raw", "qcow2", "vdi", "iso"], "type": "string", "description": "Format of the disk"}, "os_version": {"type": "string", "description": "Operating system version as specified by the distributor", "is_base": false}, "protected": {"type": "boolean", "description": "If true, image will not be deletable."}, "architecture": {"type": "string", "description": "Operating system architecture as specified in http://docs.openstack.org/trunk/openstack-compute/admin/content/adding-images.html", "is_base": false}, "direct_url": {"type": "string", "description": "URL to access the image file kept in external store (READ-ONLY)"}, "self": {"type": "string", "description": "(READ-ONLY)"}, "schema": {"type": "string", "description": "(READ-ONLY)"}}}
     log_http_response /usr/lib/python2.7/site-packages/glanceclient/common/http.py:131
    2014-10-23 08:26:45.558 29913 DEBUG cinder.openstack.common.processutils [req-9e6baed2-693a-4506-9dcc-f31dd65f9ef7 8ca4594188154742b89c49d12efcde7e 0c7c025a9519414992a40ab9af9a18e0 - - -] Running cmd (subprocess): sudo cinder-rootwrap /etc/cinder/rootwrap.conf env LC_ALL=C qemu-img info /var/lib/cinder/conversion/tmp6U4DTk execute /usr/lib/python2.7/site-packages/cinder/openstack/common/processutils.py:158
    2014-10-23 08:26:45.665 29913 DEBUG cinder.openstack.common.processutils [req-9e6baed2-693a-4506-9dcc-f31dd65f9ef7 8ca4594188154742b89c49d12efcde7e 0c7c025a9519414992a40ab9af9a18e0 - - -] Result was 0 execute /usr/lib/python2.7/site-packages/cinder/openstack/common/processutils.py:192
    2014-10-23 08:26:45.668 29913 DEBUG glanceclient.common.http [req-9e6baed2-693a-4506-9dcc-f31dd65f9ef7 8ca4594188154742b89c49d12efcde7e 0c7c025a9519414992a40ab9af9a18e0 - - -] curl -i -X GET -H 'User-Agent: python-glanceclient' -H 'Content-Type: application/octet-stream' -H 'Accept-Encoding: gzip, deflate' -H 'Accept: */*' -H 'X-Auth-Token: {SHA1}8d5b9794e7a32fe15b6698bfa91a3f2ce38e46f6' http://phoenix:9292/v2/images/434cbaf5-0b1c-45ed-9e62-418e9a374102/file log_curl_request /usr/lib/python2.7/site-packages/glanceclient/common/http.py:118
    2014-10-23 08:26:45.775 29913 DEBUG glanceclient.common.http [req-9e6baed2-693a-4506-9dcc-f31dd65f9ef7 8ca4594188154742b89c49d12efcde7e 0c7c025a9519414992a40ab9af9a18e0 - - -] Request returned failure status 500. _request /usr/lib/python2.7/site-packages/glanceclient/common/http.py:218
    2014-10-23 08:26:45.777 29913 ERROR cinder.volume.flows.manager.create_volume [req-9e6baed2-693a-4506-9dcc-f31dd65f9ef7 8ca4594188154742b89c49d12efcde7e 0c7c025a9519414992a40ab9af9a18e0 - - -] Failed to copy image 434cbaf5-0b1c-45ed-9e62-418e9a374102 to volume: ea2d3e2f-d388-4ec2-a92f-c6dd07d90906, error: HTTPInternalServerError (HTTP 500)
    2014-10-23 08:26:45.778 29913 DEBUG taskflow.engines.action_engine.runner [req-9e6baed2-693a-4506-9dcc-f31dd65f9ef7 8ca4594188154742b89c49d12efcde7e 0c7c025a9519414992a40ab9af9a18e0 - - -] Exiting old state 'SCHEDULING' in response to event 'wait' on_exit /usr/lib/python2.7/site-packages/taskflow/engines/action_engine/runner.py:156
    2014-10-23 08:26:45.779 29913 DEBUG taskflow.engines.action_engine.runner [req-9e6baed2-693a-4506-9dcc-f31dd65f9ef7 8ca4594188154742b89c49d12efcde7e 0c7c025a9519414992a40ab9af9a18e0 - - -] Entering new state 'WAITING' in response to event 'wait' on_enter /usr/lib/python2.7/site-packages/taskflow/engines/action_engine/runner.py:160
    2014-10-23 08:26:45.779 29913 DEBUG taskflow.engines.action_engine.runner [req-9e6baed2-693a-4506-9dcc-f31dd65f9ef7 8ca4594188154742b89c49d12efcde7e 0c7c025a9519414992a40ab9af9a18e0 - - -] Exiting old state 'WAITING' in response to event 'analyze' on_exit /usr/lib/python2.7/site-packages/taskflow/engines/action_engine/runner.py:156
    2014-10-23 08:26:45.780 29913 DEBUG taskflow.engines.action_engine.runner [req-9e6baed2-693a-4506-9dcc-f31dd65f9ef7 8ca4594188154742b89c49d12efcde7e 0c7c025a9519414992a40ab9af9a18e0 - - -] Entering new state 'ANALYZING' in response to event 'analyze' on_enter /usr/lib/python2.7/site-packages/taskflow/engines/action_engine/runner.py:160
    2014-10-23 08:26:45.781 29913 WARNING cinder.volume.manager [req-9e6baed2-693a-4506-9dcc-f31dd65f9ef7 8ca4594188154742b89c49d12efcde7e 0c7c025a9519414992a40ab9af9a18e0 - - -] Task 'cinder.volume.flows.manager.create_volume.CreateVolumeFromSpecTask;volume:create' (9afc4293-4c15-48aa-9534-6cafabd2c08b) transitioned into state 'FAILURE'
    2014-10-23 08:26:45.781 29913 TRACE cinder.volume.manager Traceback (most recent call last):
    2014-10-23 08:26:45.781 29913 TRACE cinder.volume.manager   File "/usr/lib/python2.7/site-packages/taskflow/engines/action_engine/executor.py", line 35, in _execute_task
    2014-10-23 08:26:45.781 29913 TRACE cinder.volume.manager     result = task.execute(**arguments)
    2014-10-23 08:26:45.781 29913 TRACE cinder.volume.manager   File "/usr/lib/python2.7/site-packages/cinder/volume/flows/manager/create_volume.py", line 638, in execute
    2014-10-23 08:26:45.781 29913 TRACE cinder.volume.manager     **volume_spec)
    2014-10-23 08:26:45.781 29913 TRACE cinder.volume.manager   File "/usr/lib/python2.7/site-packages/cinder/volume/flows/manager/create_volume.py", line 590, in _create_from_image
    2014-10-23 08:26:45.781 29913 TRACE cinder.volume.manager     image_id, image_location, image_service)
    2014-10-23 08:26:45.781 29913 TRACE cinder.volume.manager   File "/usr/lib/python2.7/site-packages/cinder/volume/flows/manager/create_volume.py", line 504, in _copy_image_to_volume
    2014-10-23 08:26:45.781 29913 TRACE cinder.volume.manager     raise exception.ImageCopyFailure(reason=ex)
    2014-10-23 08:26:45.781 29913 TRACE cinder.volume.manager ImageCopyFailure: Failed to copy image to volume: HTTPInternalServerError (HTTP 500)
    2014-10-23 08:26:45.781 29913 TRACE cinder.volume.manager


update 1

In cinder I replaced the ceph backend with an lvm backend, and I have the same error. glance now on ceph en cinder on LVM

phoenix:/etc/lvm # cinder create 1 --display-name vol_fail_testVG --image-id b7fff5f9-d63d-4729-ac1e-b8831a828e26 --availability-zone nova
+---------------------+--------------------------------------+
|       Property      |                Value                 |
+---------------------+--------------------------------------+
|     attachments     |                  []                  |
|  availability_zone  |                 nova                 |
|       bootable      |                false                 |
|      created_at     |      2014-10-24T12:51:18.086673      |
| display_description |                 None                 |
|     display_name    |           vol_fail_testVG            |
|      encrypted      |                False                 |
|          id         | 1de49e62-b0cd-40ef-818b-1ab988dfd6d7 |
|       image_id      | b7fff5f9-d63d-4729-ac1e-b8831a828e26 |
|       metadata      |                  {}                  |
|         size        |                  1                   |
|     snapshot_id     |                 None                 |
|     source_volid    |                 None                 |
|        status       |               creating               |
|     volume_type     |                 None                 |
+---------------------+--------------------------------------+
phoenix:/etc/lvm # cinder list
+--------------------------------------+--------+---------------------+------+-------------+----------+-------------+
|                  ID                  | Status |     Display Name    | Size | Volume Type | Bootable | Attached to |
+--------------------------------------+--------+---------------------+------+-------------+----------+-------------+
| 1de49e62-b0cd-40ef-818b-1ab988dfd6d7 | error  |   vol_fail_testVG   |  1   |     None    |  false   |             |
| 52df23c7-b988-4f8c-9754-deab0a349887 | error  |    vol_fail_test    |  1   |     None    |  false   |             |
| 628bf569-7654-4b49-aec5-b02ab49dd522 | error  | cirros-0.3.3-x86_64 |  1   |     None    |  false   |             |
| bbe05482-e89f-4485-9b1f-3a034e82a858 | error  |    vol_fail_test3   |  1   |     None    |  false   |             |
| d2a45408-41c5-4049-97f1-63692dbae233 | error  |    vol_fail_test2   |  1   |     None    |  false   |             |
+--------------------------------------+--------+---------------------+------+-------------+----------+-------------+

phoenix:/etc/lvm # lvs
  LV                                          VG       Attr      LSize  Pool          Origin Data%  Move Log Copy%  Convert
  cinderVG-pool                               cinderVG twi-a-tz- 95.00g                        0.00              
  volume-1de49e62-b0cd-40ef-818b-1ab988dfd6d7 cinderVG Vwi-a-tz-  1.00g cinderVG-pool          0.00              

cinder-volumes.log
2014-10-24 14:51:20.694 18244 TRACE oslo.messaging.rpc.dispatcher     raise exception.ImageCopyFailure(reason=ex)
2014-10-24 14:51:20.694 18244 TRACE oslo.messaging.rpc.dispatcher ImageCopyFailure: Failed to copy image to volume: HTTPInternalServerError (HTTP 500)

Update 2)
After some more testing it looks like the problem is in Glance after all, when I revert the glance store to file it is working, when i change the store to RBD it is not working. 

phoenix:/etc/glance # grep "^[a-zA-Z []" glance-api.conf
[DEFAULT]
verbose = True
debug = True
default_store = rbd
bind_host = 0.0.0.0
bind_port = 9292
log_file = /var/log/glance/api.log
backlog = 4096
registry_host = 0.0.0.0
registry_port = 9191
registry_client_protocol = http
auth_strategy = keystone
rabbit_host = phoenix
rabbit_port = 5672
rabbit_use_ssl = false
rabbit_userid = guest
rabbit_password = passw
rabbit_virtual_host = /
rabbit_notification_exchange = glance
rabbit_notification_topic = notifications
rabbit_durable_queues = False
qpid_notification_exchange = glance
qpid_notification_topic = notifications
qpid_hostname = localhost
qpid_port = 5672
qpid_username =
qpid_password =
qpid_sasl_mechanisms =
qpid_reconnect_timeout = 0
qpid_reconnect_limit = 0
qpid_reconnect_interval_min = 0
qpid_reconnect_interval_max = 0
qpid_reconnect_interval = 0
qpid_heartbeat = 5
qpid_protocol = tcp
qpid_tcp_nodelay = True
delayed_delete = False
scrub_time = 43200
scrubber_datadir = /var/lib/glance/scrubber
image_cache_dir = /var/lib/glance/image-cache/
verbose = True
notification_driver = messaging
lock_path = /var/run/glance
[database]
connection = mysql://glance:passw@phoenix/glance
[keystone_authtoken]
identity_uri = http://phoenix:35357
admin_tenant_name = service
admin_user = glance
admin_password = passw
revocation_cache_time = 10
signing_dir = /var/cache/glance/keystone-signing
[paste_deploy]
flavor = keystone
[store_type_location_strategy]
[profiler]
[task]
[glance_store]
stores = glance.store.filesystem.Store,
         glance.store.rbd.Store,
         glance.store.http.Store
filesystem_store_datadir = /var/lib/glance/images/
swift_store_auth_version = 2
swift_store_auth_address = 127.0.0.1:5000/v2.0/
swift_store_user = jdoe:jdoe
swift_store_key = a86850deb2742ec3cb41518e26aa2d89
swift_store_container = glance
swift_store_create_container_on_put = False
swift_store_large_object_size = 5120
swift_store_large_object_chunk_size = 200
swift_enable_snet = False
s3_store_host = 127.0.0.1:8080/v1.0/
s3_store_access_key = <20-char AWS access key>
s3_store_secret_key = <40-char AWS secret key>
s3_store_bucket = <lowercased 20-char aws access key>glance
s3_store_create_bucket_on_put = False
rbd_store_ceph_conf = /etc/glance/ceph.client.images.keyring
rbd_store_user = images
rbd_store_pool = images
show_image_direct_url=True
sheepdog_store_address = localhost
sheepdog_store_port = 7000
sheepdog_store_chunk_size = 64