Hi List,

(apologies if there's a double post the original didn't look to have been sent)

I've encountered an unusual problem of libvirtd segfaulting when a live 
migration is initiated from the CS management server.

I have 5 identical (using SaltStack) Dell PE M420 blades running CentOS 6.6 
with Intel Xeon E5-2470 v2 CPU's which all do the same thing.

The back trace from the core indicates that something is dying within libc.so.6

I've played with the cpu passthrough settings on the agent but this doesn't 
seem to influence whether it crashes or not & normal operation of the VM's 
(start, stop, usage etc.) all appears ok.

I'm considering trying out CentOS 7 to see whether it happens there but haven't 
done that yet

Here is the GDB backtrace

Program terminated with signal 11, Segmentation fault.
#0  0x00007f7d8f7fe43a in __strcmp_sse42 () from /lib64/libc.so.6
Missing separate debuginfos, use: debuginfo-install 
libvirt-0.10.2-46.el6_6.2.x86_64
(gdb) backtrace
#0  0x00007f7d8f7fe43a in __strcmp_sse42 () from /lib64/libc.so.6
#1  0x00007f7d92dd6411 in ?? () from /usr/lib64/libvirt.so.0
#2  0x00007f7d92dd87e8 in ?? () from /usr/lib64/libvirt.so.0
#3  0x00000000004aac4e in ?? ()
#4  0x000000000048a2cc in ?? ()
#5  0x0000000000491110 in ?? ()
#6  0x0000000000491ab7 in ?? ()
#7  0x00000000004550b4 in ?? ()
#8  0x00007f7d92def13f in virDomainMigratePrepare3 () from 
/usr/lib64/libvirt.so.0
#9  0x000000000042eddf in ?? ()
#10 0x00007f7d92e50132 in virNetServerProgramDispatch () from 
/usr/lib64/libvirt.so.0
#11 0x00007f7d92e4d70e in ?? () from /usr/lib64/libvirt.so.0
#12 0x00007f7d92e4ddac in ?? () from /usr/lib64/libvirt.so.0
#13 0x00007f7d92d6bb3c in ?? () from /usr/lib64/libvirt.so.0
#14 0x00007f7d92d6b429 in ?? () from /usr/lib64/libvirt.so.0
#15 0x00007f7d8fe789d1 in start_thread () from /lib64/libpthread.so.0
#16 0x00007f7d8f7be9dd in clone () from /lib64/libc.so.6
 
and more specifically

#0  __strcmp_sse42 () at ../sysdeps/x86_64/multiarch/strcmp.S:260
#1  0x00007f7d92dd6411 in x86ModelFind (cpu=0x7f7d68003440, map=0x7f7d680021e0, 
policy=1) at cpu/cpu_x86.c:831
#2  x86ModelFromCPU (cpu=0x7f7d68003440, map=0x7f7d680021e0, policy=1) at 
cpu/cpu_x86.c:850
#3  0x00007f7d92dd87e8 in x86Compute (host=<value optimized out>, 
cpu=0x7f7d68003440, guest=0x7f7d82f04df0, message=0x7f7d82f04de0) at 
cpu/cpu_x86.c:1243
#4  0x00000000004aac4e in qemuBuildCpuArgStr (conn=0x7f7d50000920, 
driver=0x7f7d78013b20, def=0x7f7d68002830, monitor_chr=0x7f7d680026f0, 
monitor_json=true, caps=0x7f7d68002c50, 
    migrateFrom=0x7f7d680136d0 "tcp:[::]:49152", migrateFd=-1, snapshot=0x0, 
vmop=VIR_NETDEV_VPORT_PROFILE_OP_MIGRATE_IN_START) at qemu/qemu_command.c:4516
#5  qemuBuildCommandLine (conn=0x7f7d50000920, driver=0x7f7d78013b20, 
def=0x7f7d68002830, monitor_chr=0x7f7d680026f0, monitor_json=true, 
caps=0x7f7d68002c50, migrateFrom=0x7f7d680136d0 "tcp:[::]:49152", 
    migrateFd=-1, snapshot=0x0, 
vmop=VIR_NETDEV_VPORT_PROFILE_OP_MIGRATE_IN_START) at qemu/qemu_command.c:5320
#6  0x000000000048a2cc in qemuProcessStart (conn=0x7f7d50000920, 
driver=0x7f7d78013b20, vm=0x7f7d68006e10, migrateFrom=0x7f7d680136d0 
"tcp:[::]:49152", stdin_fd=-1, stdin_path=0x0, snapshot=0x0, 
    vmop=VIR_NETDEV_VPORT_PROFILE_OP_MIGRATE_IN_START, flags=6) at 
qemu/qemu_process.c:4008
#7  0x0000000000491110 in qemuMigrationPrepareAny (driver=0x7f7d78013b20, 
dconn=0x7f7d50000920, cookiein=<value optimized out>, cookieinlen=255, 
cookieout=0x7f7d82f05ae0, cookieoutlen=0x7f7d82f05aec, 
    dname=0x7f7d68002570 "i-2-10-VM", 
    dom_xml=0x7f7d680013d0 "<domain type='kvm'>\n  <name>i-2-10-VM</name>\n  
<uuid>95c5aa11-f7ad-4322-b377-d153774e330f</uuid>\n  <description>CentOS 6.5 
(64-bit)</description>\n  <memory unit='KiB'>1048576</memory>\n  
<currentMemory"..., st=0x0, port=49152, autoPort=true) at 
qemu/qemu_migration.c:1502
#8  0x0000000000491ab7 in qemuMigrationPrepareDirect (driver=0x7f7d78013b20, 
dconn=0x7f7d50000920, 
    cookiein=0x7f7d680012c0 "<qemu-migration>\n  <name>i-2-10-VM</name>\n  
<uuid>95c5aa11-f7ad-4322-b377-d153774e330f</uuid>\n  
<hostname>eqx-cs-cmp-05.ipscape.com.au</hostname>\n  
<hostuuid>44454c4c-3000-104b-8043-b4c04f573232</host"..., cookieinlen=255, 
cookieout=0x7f7d82f05ae0, cookieoutlen=0x7f7d82f05aec, uri_in=<value optimized 
out>, uri_out=0x7f7d68002680, dname=0x7f7d68002570 "i-2-10-VM", 
    dom_xml=0x7f7d680013d0 "<domain type='kvm'>\n  <name>i-2-10-VM</name>\n  
<uuid>95c5aa11-f7ad-4322-b377-d153774e330f</uuid>\n  <description>CentOS 6.5 
(64-bit)</description>\n  <memory unit='KiB'>1048576</memory>\n  
<currentMemory"...) at qemu/qemu_migration.c:1747
#9  0x00000000004550b4 in qemuDomainMigratePrepare3 (dconn=0x7f7d50000920, 
    cookiein=0x7f7d680012c0 "<qemu-migration>\n  <name>i-2-10-VM</name>\n  
<uuid>95c5aa11-f7ad-4322-b377-d153774e330f</uuid>\n  
<hostname>eqx-cs-cmp-05.ipscape.com.au</hostname>\n  
<hostuuid>44454c4c-3000-104b-8043-b4c04f573232</host"..., cookieinlen=255, 
cookieout=0x7f7d82f05ae0, cookieoutlen=<value optimized out>, uri_in=<value 
optimized out>, uri_out=0x7f7d68002680, flags=1, dname=0x7f7d68002570 
"i-2-10-VM", 
    resource=10000, 
    dom_xml=0x7f7d680013d0 "<domain type='kvm'>\n  <name>i-2-10-VM</name>\n  
<uuid>95c5aa11-f7ad-4322-b377-d153774e330f</uuid>\n  <description>CentOS 6.5 
(64-bit)</description>\n  <memory unit='KiB'>1048576</memory>\n  
<currentMemory"...) at qemu/qemu_driver.c:10657
#10 0x00007f7d92def13f in virDomainMigratePrepare3 (dconn=0x7f7d50000920, 
    cookiein=0x7f7d680012c0 "<qemu-migration>\n  <name>i-2-10-VM</name>\n  
<uuid>95c5aa11-f7ad-4322-b377-d153774e330f</uuid>\n  
<hostname>eqx-cs-cmp-05.ipscape.com.au</hostname>\n  
<hostuuid>44454c4c-3000-104b-8043-b4c04f573232</host"..., cookieinlen=255, 
cookieout=0x7f7d82f05ae0, cookieoutlen=0x7f7d82f05aec, uri_in=0x7f7d680026b0 
"tcp:10.61.153.22", uri_out=0x7f7d68002680, flags=1, 
    dname=0x7f7d68002570 "i-2-10-VM", bandwidth=10000, 
    dom_xml=0x7f7d680013d0 "<domain type='kvm'>\n  <name>i-2-10-VM</name>\n  
<uuid>95c5aa11-f7ad-4322-b377-d153774e330f</uuid>\n  <description>CentOS 6.5 
(64-bit)</description>\n  <memory unit='KiB'>1048576</memory>\n  
<currentMemory"...) at libvirt.c:6157
#11 0x000000000042eddf in remoteDispatchDomainMigratePrepare3 (server=<value 
optimized out>, client=<value optimized out>, msg=<value optimized out>, 
rerr=0x7f7d82f05b80, args=0x7f7d680027b0, 
    ret=0x7f7d68002790) at remote.c:3590
#12 remoteDispatchDomainMigratePrepare3Helper (server=<value optimized out>, 
client=<value optimized out>, msg=<value optimized out>, rerr=0x7f7d82f05b80, 
args=0x7f7d680027b0, ret=0x7f7d68002790)
    at remote_dispatch.h:3695
#13 0x00007f7d92e50132 in virNetServerProgramDispatchCall (prog=0x16b7700, 
server=0x16aea20, client=0x16b7010, msg=0x16b07f0) at 
rpc/virnetserverprogram.c:431
#14 virNetServerProgramDispatch (prog=0x16b7700, server=0x16aea20, 
client=0x16b7010, msg=0x16b07f0) at rpc/virnetserverprogram.c:304
#15 0x00007f7d92e4d70e in virNetServerProcessMsg (srv=<value optimized out>, 
client=0x16b7010, prog=<value optimized out>, msg=0x16b07f0) at 
rpc/virnetserver.c:170
#16 0x00007f7d92e4ddac in virNetServerHandleJob (jobOpaque=<value optimized 
out>, opaque=0x16aea20) at rpc/virnetserver.c:191
#17 0x00007f7d92d6bb3c in virThreadPoolWorker (opaque=<value optimized out>) at 
util/threadpool.c:144
#18 0x00007f7d92d6b429 in virThreadHelper (data=<value optimized out>) at 
util/threads-pthread.c:161
#19 0x00007f7d8fe789d1 in start_thread (arg=0x7f7d82f06700) at 
pthread_create.c:301
#20 0x00007f7d8f7be9dd in clone () at 
../sysdeps/unix/sysv/linux/x86_64/clone.S:115

Any ideas?

Regards, Lee

Reply via email to