Re: Octopus upgrade breaks Ubuntu 18.04 libvirt

[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index]

 



Alexander, here you go:

1. strace of the libvirtd -l process:


root@ais-cloudhost1:/etc/libvirt# strace -f -p 53745 
strace: Process 53745 attached with 17 threads
[pid 53786] futex(0x7fd90c0f4618, FUTEX_WAIT_BITSET_PRIVATE|FUTEX_CLOCK_REALTIME, 2, NULL, 0xffffffff <unfinished ...>
[pid 53785] futex(0x55699ad13ae0, FUTEX_WAIT_PRIVATE, 0, NULL <unfinished ...>
[pid 53784] futex(0x55699ad13ae0, FUTEX_WAIT_PRIVATE, 0, NULL <unfinished ...>
[pid 53783] futex(0x55699ad13ae0, FUTEX_WAIT_PRIVATE, 0, NULL <unfinished ...>
[pid 53782] futex(0x55699ad13ae0, FUTEX_WAIT_PRIVATE, 0, NULL <unfinished ...>
[pid 53781] futex(0x55699ad13ae0, FUTEX_WAIT_PRIVATE, 0, NULL <unfinished ...>
[pid 53764] futex(0x55699acc5fc0, FUTEX_WAIT_PRIVATE, 0, NULL <unfinished ...>
[pid 53763] futex(0x55699acc5fc0, FUTEX_WAIT_PRIVATE, 0, NULL <unfinished ...>
[pid 53762] futex(0x55699acc5fc0, FUTEX_WAIT_PRIVATE, 0, NULL <unfinished ...>
[pid 53761] futex(0x55699acc5fc0, FUTEX_WAIT_PRIVATE, 0, NULL <unfinished ...>
[pid 53760] futex(0x55699acc5fc0, FUTEX_WAIT_PRIVATE, 0, NULL <unfinished ...>
[pid 53759] futex(0x55699acc5f20, FUTEX_WAIT_PRIVATE, 0, NULL <unfinished ...>
[pid 53757] futex(0x55699acc5f20, FUTEX_WAIT_PRIVATE, 0, NULL <unfinished ...>
[pid 53756] futex(0x55699acc5f20, FUTEX_WAIT_PRIVATE, 0, NULL <unfinished ...>
[pid 53755] futex(0x55699acc5f20, FUTEX_WAIT_PRIVATE, 0, NULL <unfinished ...>
[pid 53754] futex(0x55699acc5f20, FUTEX_WAIT_PRIVATE, 0, NULL <unfinished ...>
[pid 53745] restart_syscall(<... resuming interrupted poll ...>



<Stuck and no more activity below the above line>



2. The strace -f of the virsh pool-list process (the last hundred lines or so):


close(3)                                = 0
openat(AT_FDCWD, "/proc/net/psched", O_RDONLY) = 3
fstat(3, {st_mode=S_IFREG|0444, st_size=0, ...}) = 0
read(3, "000003e8 00000040 000f4240 3b9ac"..., 1024) = 36
close(3)                                = 0
openat(AT_FDCWD, "/usr/lib/locale/locale-archive", O_RDONLY|O_CLOEXEC) = 3
fstat(3, {st_mode=S_IFREG|0644, st_size=1683056, ...}) = 0
mmap(NULL, 1683056, PROT_READ, MAP_PRIVATE, 3, 0) = 0x7f9137936000
close(3)                                = 0
ioctl(0, TCGETS, {B38400 opost isig icanon echo ...}) = 0
ioctl(0, TCGETS, {B38400 opost isig icanon echo ...}) = 0
getuid()                                = 0
geteuid()                               = 0
openat(AT_FDCWD, "/usr/lib/x86_64-linux-gnu/gconv/gconv-modules.cache", O_RDONLY) = 3
fstat(3, {st_mode=S_IFREG|0644, st_size=26376, ...}) = 0
mmap(NULL, 26376, PROT_READ, MAP_SHARED, 3, 0) = 0x7f9137afa000
close(3)                                = 0
futex(0x7f91366cca08, FUTEX_WAKE_PRIVATE, 2147483647) = 0
uname({sysname="Linux", nodename="ais-cloudhost1", ...}) = 0
futex(0x7f9137286ce0, FUTEX_WAKE_PRIVATE, 2147483647) = 0
socket(AF_INET6, SOCK_DGRAM, IPPROTO_IP) = 3
close(3)                                = 0
futex(0x7f9137287700, FUTEX_WAKE_PRIVATE, 2147483647) = 0
pipe2([3, 4], O_NONBLOCK|O_CLOEXEC)     = 0
mmap(NULL, 8392704, PROT_NONE, MAP_PRIVATE|MAP_ANONYMOUS|MAP_STACK, -1, 0) = 0x7f912ac5f000
mprotect(0x7f912ac60000, 8388608, PROT_READ|PROT_WRITE) = 0
clone(child_stack=0x7f912b45edb0, flags=CLONE_VM|CLONE_FS|CLONE_FILES|CLONE_SIGHAND|CLONE_THREAD|CLONE_SYSVSEM|CLONE_SE
TTLS|CLONE_PARENT_SETTID|CLONE_CHILD_CLEARTID, parent_tidptr=0x7f912b45f9d0, tls=0x7f912b45f700, child_tidptr=0x7f912b4
5f9d0) = 54510
strace: Process 54510 attached
[pid 54509] futex(0x7f9137286fb8, FUTEX_WAKE_PRIVATE, 2147483647 <unfinished ...>
[pid 54510] set_robust_list(0x7f912b45f9e0, 24 <unfinished ...>
[pid 54509] <... futex resumed> )       = 0
[pid 54510] <... set_robust_list resumed> ) = 0
[pid 54509] futex(0x7f91372876f8, FUTEX_WAKE_PRIVATE, 2147483647 <unfinished ...>
[pid 54510] mmap(NULL, 134217728, PROT_NONE, MAP_PRIVATE|MAP_ANONYMOUS|MAP_NORESERVE, -1, 0 <unfinished ...>
[pid 54509] <... futex resumed> )       = 0
[pid 54510] <... mmap resumed> )        = 0x7f9122c5f000
[pid 54509] geteuid( <unfinished ...>
[pid 54510] munmap(0x7f9122c5f000, 20582400 <unfinished ...>
[pid 54509] <... geteuid resumed> )     = 0
[pid 54510] <... munmap resumed> )      = 0
[pid 54509] access("/etc/libvirt/libvirt.conf", F_OK <unfinished ...>
[pid 54510] munmap(0x7f9128000000, 46526464 <unfinished ...>
[pid 54509] <... access resumed> )      = 0
[pid 54510] <... munmap resumed> )      = 0
[pid 54509] openat(AT_FDCWD, "/etc/libvirt/libvirt.conf", O_RDONLY <unfinished ...>
[pid 54510] mprotect(0x7f9124000000, 135168, PROT_READ|PROT_WRITE <unfinished ...>
[pid 54509] <... openat resumed> )      = 5
[pid 54510] <... mprotect resumed> )    = 0
[pid 54509] read(5, "#\n# This can be used to setup UR"..., 8192) = 547
[pid 54510] futex(0x7f9137287218, FUTEX_WAKE_PRIVATE, 2147483647 <unfinished ...>
[pid 54509] read(5,  <unfinished ...>
[pid 54510] <... futex resumed> )       = 0
[pid 54509] <... read resumed> "", 7645) = 0
[pid 54510] gettid( <unfinished ...>
[pid 54509] close(5 <unfinished ...>
[pid 54510] <... gettid resumed> )      = 54510
[pid 54509] <... close resumed> )       = 0
[pid 54509] getuid( <unfinished ...>
[pid 54510] poll([{fd=3, events=POLLIN}], 1, -1 <unfinished ...>
[pid 54509] <... getuid resumed> )      = 0
[pid 54509] geteuid()                   = 0
[pid 54509] access("/proc/vz", F_OK)    = -1 ENOENT (No such file or directory)
[pid 54509] geteuid()                   = 0
[pid 54509] getuid()                    = 0
[pid 54509] geteuid()                   = 0
[pid 54509] socket(AF_UNIX, SOCK_STREAM, 0) = 5
[pid 54509] connect(5, {sa_family=AF_UNIX, sun_path="/var/run/libvirt/libvirt-sock"}, 110) = 0
[pid 54509] getsockname(5, {sa_family=AF_UNIX}, [128->2]) = 0
[pid 54509] futex(0x7f9137287a08, FUTEX_WAKE_PRIVATE, 2147483647) = 0
[pid 54509] fcntl(5, F_GETFD)           = 0
[pid 54509] fcntl(5, F_SETFD, FD_CLOEXEC) = 0
[pid 54509] fcntl(5, F_GETFL)           = 0x2 (flags O_RDWR)
[pid 54509] fcntl(5, F_SETFL, O_RDWR|O_NONBLOCK) = 0
[pid 54509] futex(0x7f9137287908, FUTEX_WAKE_PRIVATE, 2147483647) = 0
[pid 54509] pipe2([6, 7], O_CLOEXEC)    = 0
[pid 54509] write(4, "\0", 1)           = 1
[pid 54510] <... poll resumed> )        = 1 ([{fd=3, revents=POLLIN}])
[pid 54509] futex(0x7f91372879d0, FUTEX_WAKE_PRIVATE, 2147483647) = 0
[pid 54510] read(3,  <unfinished ...>
[pid 54509] futex(0x7f9137287920, FUTEX_WAKE_PRIVATE, 2147483647 <unfinished ...>
[pid 54510] <... read resumed> "\0", 1) = 1
[pid 54509] <... futex resumed> )       = 0
[pid 54509] brk(0x55b0b2c2f000 <unfinished ...>
[pid 54510] poll([{fd=3, events=POLLIN}, {fd=5, events=POLLIN}], 2, -1 <unfinished ...>
[pid 54509] <... brk resumed> )         = 0x55b0b2c2f000
[pid 54509] write(4, "\0", 1)           = 1
[pid 54510] <... poll resumed> )        = 1 ([{fd=3, revents=POLLIN}])
[pid 54509] rt_sigprocmask(SIG_BLOCK, [PIPE CHLD WINCH],  <unfinished ...>
[pid 54510] read(3,  <unfinished ...>
[pid 54509] <... rt_sigprocmask resumed> [], 8) = 0
[pid 54510] <... read resumed> "\0", 1) = 1
[pid 54509] poll([{fd=5, events=POLLOUT}, {fd=6, events=POLLIN}], 2, -1 <unfinished ...>
[pid 54510] poll([{fd=3, events=POLLIN}], 1, -1 <unfinished ...>
[pid 54509] <... poll resumed> )        = 1 ([{fd=5, revents=POLLOUT}])
[pid 54509] rt_sigprocmask(SIG_SETMASK, [], NULL, 8) = 0
[pid 54509] write(5, "\0\0\0\34 \0\200\206\0\0\0\1\0\0\0B\0\0\0\0\0\0\0\0\0\0\0\0", 28) = 28
[pid 54509] rt_sigprocmask(SIG_BLOCK, [PIPE CHLD WINCH], [], 8) = 0
[pid 54509] poll([{fd=5, events=POLLIN}, {fd=6, events=POLLIN}], 2, -1





Andrei


----- Original Message -----
> From: "Alexander E. Patrakov" <patrakov@xxxxxxxxx>
> To: "Andrei Mikhailovsky" <andrei@xxxxxxxxxx>
> Cc: "dillaman" <dillaman@xxxxxxxxxx>, "ceph-users" <ceph-users@xxxxxxx>
> Sent: Wednesday, 8 July, 2020 14:50:56
> Subject: Re:  Re: Octopus upgrade breaks Ubuntu 18.04 libvirt

> Please strace both virsh and libvirtd (you can attach to it by pid),
> and make sure that the strace command uses the "-f" switch (i.e.
> traces all threads).
> 
> On Wed, Jul 8, 2020 at 6:20 PM Andrei Mikhailovsky <andrei@xxxxxxxxxx> wrote:
>>
>> Jason,
>>
>> After adding the 1:storage to the log line of the config and restarting the
>> service I do not see anything in the logs. I've started the "virsh pool-list"
>> command several times and there is absolutely nothing in the logs. The command
>> keeps hanging
>>
>>
>> running the strace virsh pool-list shows (the last 50-100 lines or so):
>>
>>
>>
>> ioctl(0, TCGETS, {B38400 opost isig icanon echo ...}) = 0
>> ioctl(0, TCGETS, {B38400 opost isig icanon echo ...}) = 0
>> getuid()                                = 0
>> geteuid()                               = 0
>> openat(AT_FDCWD, "/usr/lib/x86_64-linux-gnu/gconv/gconv-modules.cache",
>> O_RDONLY) = 3
>> fstat(3, {st_mode=S_IFREG|0644, st_size=26376, ...}) = 0
>> mmap(NULL, 26376, PROT_READ, MAP_SHARED, 3, 0) = 0x7fe979933000
>> close(3)                                = 0
>> futex(0x7fe978505a08, FUTEX_WAKE_PRIVATE, 2147483647) = 0
>> uname({sysname="Linux", nodename="ais-cloudhost1", ...}) = 0
>> futex(0x7fe9790bfce0, FUTEX_WAKE_PRIVATE, 2147483647) = 0
>> socket(AF_INET6, SOCK_DGRAM, IPPROTO_IP) = 3
>> close(3)                                = 0
>> futex(0x7fe9790c0700, FUTEX_WAKE_PRIVATE, 2147483647) = 0
>> pipe2([3, 4], O_NONBLOCK|O_CLOEXEC)     = 0
>> mmap(NULL, 8392704, PROT_NONE, MAP_PRIVATE|MAP_ANONYMOUS|MAP_STACK, -1, 0) =
>> 0x7fe96ca98000
>> mprotect(0x7fe96ca99000, 8388608, PROT_READ|PROT_WRITE) = 0
>> clone(child_stack=0x7fe96d297db0,
>> flags=CLONE_VM|CLONE_FS|CLONE_FILES|CLONE_SIGHAND|CLONE_THREAD|CLONE_SYSVSEM|CLONE_SE
>> TTLS|CLONE_PARENT_SETTID|CLONE_CHILD_CLEARTID, parent_tidptr=0x7fe96d2989d0,
>> tls=0x7fe96d298700, child_tidptr=0x7fe96d2
>> 989d0) = 54218
>> futex(0x7fe9790bffb8, FUTEX_WAKE_PRIVATE, 2147483647) = 0
>> futex(0x7fe9790c06f8, FUTEX_WAKE_PRIVATE, 2147483647) = 0
>> geteuid()                               = 0
>> access("/etc/libvirt/libvirt.conf", F_OK) = 0
>> openat(AT_FDCWD, "/etc/libvirt/libvirt.conf", O_RDONLY) = 5
>> read(5, "#\n# This can be used to setup UR"..., 8192) = 547
>> read(5, "", 7645)                       = 0
>> close(5)                                = 0
>> getuid()                                = 0
>> geteuid()                               = 0
>> access("/proc/vz", F_OK)                = -1 ENOENT (No such file or directory)
>> geteuid()                               = 0
>> getuid()                                = 0
>> geteuid()                               = 0
>> socket(AF_UNIX, SOCK_STREAM, 0)         = 5
>> connect(5, {sa_family=AF_UNIX, sun_path="/var/run/libvirt/libvirt-sock"}, 110) =
>> 0
>> getsockname(5, {sa_family=AF_UNIX}, [128->2]) = 0
>> futex(0x7fe9790c0a08, FUTEX_WAKE_PRIVATE, 2147483647) = 0
>> fcntl(5, F_GETFD)                       = 0
>> fcntl(5, F_SETFD, FD_CLOEXEC)           = 0
>> fcntl(5, F_GETFL)                       = 0x2 (flags O_RDWR)
>> fcntl(5, F_SETFL, O_RDWR|O_NONBLOCK)    = 0
>> futex(0x7fe9790c0908, FUTEX_WAKE_PRIVATE, 2147483647) = 0
>> pipe2([6, 7], O_CLOEXEC)                = 0
>> write(4, "\0", 1)                       = 1
>> futex(0x7fe9790bfb60, FUTEX_WAKE_PRIVATE, 1) = 1
>> futex(0x7fe9790c09d0, FUTEX_WAKE_PRIVATE, 2147483647) = 0
>> futex(0x7fe9790c0920, FUTEX_WAKE_PRIVATE, 2147483647) = 0
>> brk(0x5598ffebb000)                     = 0x5598ffebb000
>> write(4, "\0", 1)                       = 1
>> futex(0x7fe9790bfb60, FUTEX_WAKE_PRIVATE, 1) = 1
>> rt_sigprocmask(SIG_BLOCK, [PIPE CHLD WINCH], [], 8) = 0
>> poll([{fd=5, events=POLLOUT}, {fd=6, events=POLLIN}], 2, -1) = 1 ([{fd=5,
>> revents=POLLOUT}])
>> rt_sigprocmask(SIG_SETMASK, [], NULL, 8) = 0
>> write(5, "\0\0\0\34 \0\200\206\0\0\0\1\0\0\0B\0\0\0\0\0\0\0\0\0\0\0\0", 28) = 28
>> rt_sigprocmask(SIG_BLOCK, [PIPE CHLD WINCH], [], 8) = 0
>> poll([{fd=5, events=POLLIN}, {fd=6, events=POLLIN}], 2, -1
>>
>>
>>
>>
>>
>>
>> It get's stuck at the last line and there is nothing happening.
>>
>> Andrei
>>
>> ----- Original Message -----
>> > From: "Jason Dillaman" <jdillama@xxxxxxxxxx>
>> > To: "Andrei Mikhailovsky" <andrei@xxxxxxxxxx>
>> > Cc: "ceph-users" <ceph-users@xxxxxxx>
>> > Sent: Tuesday, 7 July, 2020 23:33:03
>> > Subject: Re:  Re: Octopus upgrade breaks Ubuntu 18.04 libvirt
>>
>> > On Tue, Jul 7, 2020 at 5:14 PM Andrei Mikhailovsky <andrei@xxxxxxxxxx> wrote:
>> >>
>> >> Hi Jason,
>> >> The extract from the debug log file is given below in the first message. It just
>> >> repeats those lines every so often.
>> >>
>> >> I can't find anything else.
>> >
>> > I would expect lots of debug logs from the storage backend. Do you
>> > have a "1:storage" entry in your libvirtd.conf?
>> >
>> >>  Cheers
>> >> ----- Original Message -----
>> >> > From: "Jason Dillaman" <jdillama@xxxxxxxxxx>
>> >> > To: "Andrei Mikhailovsky" <andrei@xxxxxxxxxx>
>> >> > Cc: "ceph-users" <ceph-users@xxxxxxx>
>> >> > Sent: Tuesday, 7 July, 2020 16:33:25
>> >> > Subject: Re:  Re: Octopus upgrade breaks Ubuntu 18.04 libvirt
>> >>
>> >> > On Tue, Jul 7, 2020 at 11:07 AM Andrei Mikhailovsky <andrei@xxxxxxxxxx> wrote:
>> >> >>
>> >> >> I've left the virsh pool-list command 'hang' for a while and it did eventually
>> >> >> get the results back. In about 4 hours!
>> >> >
>> >> > Perhaps enable the debug logging of libvirt [1] to determine what it's
>> >> > spending its time on?
>> >> >
>> >> >> root@ais-cloudhost1:/home/andrei# time virsh pool-list
>> >> >>  Name                 State      Autostart
>> >> >> -------------------------------------------
>> >> >>  12ca033f-e673-4060-8db9-909d79650f39 active     no
>> >> >>  bcc753c6-e47a-3b7c-904a-fcc1d0a594c5 active     no
>> >> >>  cf771bc7-8998-354d-8e10-5564585a3c20 active     no
>> >> >>  d8d5ec36-3cb0-39af-8fc6-084a4abd5d28 active     no
>> >> >>
>> >> >>
>> >> >> real    234m23.877s
>> >> >> user    0m0.351s
>> >> >> sys     0m0.506s
>> >> >>
>> >> >>
>> >> >>
>> >> >> The second attempt was a mere 2 hours with a bit.
>> >> >>
>> >> >>
>> >> >> root@ais-cloudhost1:/home/andrei# time virsh pool-list
>> >> >>  Name                 State      Autostart
>> >> >> -------------------------------------------
>> >> >>  12ca033f-e673-4060-8db9-909d79650f39 active     no
>> >> >>  bcc753c6-e47a-3b7c-904a-fcc1d0a594c5 active     no
>> >> >>  cf771bc7-8998-354d-8e10-5564585a3c20 active     no
>> >> >>  d8d5ec36-3cb0-39af-8fc6-084a4abd5d28 active     no
>> >> >>
>> >> >>
>> >> >> real    148m54.763s
>> >> >> user    0m0.241s
>> >> >> sys     0m0.304s
>> >> >>
>> >> >>
>> >> >>
>> >> >> Am I the only person having these issues with libvirt and Octopus release?
>> >> >>
>> >> >> Cheers
>> >> >>
>> >> >> ----- Original Message -----
>> >> >> > From: "Andrei Mikhailovsky" <andrei@xxxxxxxxxx>
>> >> >> > To: "ceph-users" <ceph-users@xxxxxxx>
>> >> >> > Sent: Monday, 6 July, 2020 19:27:25
>> >> >> > Subject:  Re: Octopus upgrade breaks Ubuntu 18.04 libvirt
>> >> >>
>> >> >> > A quick update.
>> >> >> >
>> >> >> > I have done a fresh install of the CloudStack host server running Ubuntu 18.04
>> >> >> > with the latest updates. I've installed ceph 12.x and connected it to
>> >> >> > Cloudstack which uses kvm/libvirt/ceph/rbd. The rest of the ceph services
>> >> >> > (mon,mgr,osd,etc) are all running 15.2.3. Works like a charm.
>> >> >> >
>> >> >> > As soon as I've updated the host server to version 15.2.3, Libvirt stopped
>> >> >> > working. It just hangs without doing much it seems. Common commands like 'virsh
>> >> >> > pool-list' or 'virsh list' are just hanging. I've strace the process and it
>> >> >> > just doesn't show any activity.
>> >> >> >
>> >> >> >
>> >> >> > 2020-07-06 18:18:36.930+0000: 3273: info : virEventPollUpdateTimeout:265 :
>> >> >> > EVENT_POLL_UPDATE_TIMEOUT: timer=993 frequen
>> >> >> > cy=5000
>> >> >> > 2020-07-06 18:18:36.930+0000: 3273: debug : virEventPollUpdateTimeout:282 : Set
>> >> >> > timer freq=5000 expires=1594059521930
>> >> >> > 2020-07-06 18:18:36.930+0000: 3273: debug : virEventPollInterruptLocked:722 :
>> >> >> > Skip interrupt, 1 140123172218240
>> >> >> > 2020-07-06 18:18:36.930+0000: 3273: info : virEventPollUpdateHandle:152 :
>> >> >> > EVENT_POLL_UPDATE_HANDLE: watch=1004 events=1
>> >> >> > 2020-07-06 18:18:36.930+0000: 3273: debug : virEventPollInterruptLocked:722 :
>> >> >> > Skip interrupt, 1 140123172218240
>> >> >> > 2020-07-06 18:18:36.930+0000: 3273: debug : virEventPollCleanupTimeouts:525 :
>> >> >> > Cleanup 8
>> >> >> > 2020-07-06 18:18:36.930+0000: 3273: debug : virEventPollCleanupHandles:574 :
>> >> >> > Cleanup 22
>> >> >> > 2020-07-06 18:18:36.930+0000: 3273: debug : virEventRunDefaultImpl:324 : running
>> >> >> > default event implementation
>> >> >> > 2020-07-06 18:18:36.930+0000: 3273: debug : virEventPollCleanupTimeouts:525 :
>> >> >> > Cleanup 8
>> >> >> > 2020-07-06 18:18:36.930+0000: 3273: debug : virEventPollCleanupHandles:574 :
>> >> >> > Cleanup 22
>> >> >> > 2020-07-06 18:18:36.931+0000: 3273: debug : virEventPollMakePollFDs:401 :
>> >> >> > Prepare n=0 w=1, f=5 e=1 d=0
>> >> >> > 2020-07-06 18:18:36.931+0000: 3273: debug : virEventPollMakePollFDs:401 :
>> >> >> > Prepare n=1 w=2, f=7 e=1 d=0
>> >> >> > 2020-07-06 18:18:36.931+0000: 3273: debug : virEventPollMakePollFDs:401 :
>> >> >> > Prepare n=2 w=3, f=10 e=1 d=0
>> >> >> > 2020-07-06 18:18:36.931+0000: 3273: debug : virEventPollMakePollFDs:401 :
>> >> >> > Prepare n=3 w=4, f=11 e=1 d=0
>> >> >> > 2020-07-06 18:18:36.931+0000: 3273: debug : virEventPollMakePollFDs:401 :
>> >> >> > Prepare n=4 w=5, f=12 e=1 d=0
>> >> >> > 2020-07-06 18:18:36.931+0000: 3273: debug : virEventPollMakePollFDs:401 :
>> >> >> > Prepare n=5 w=6, f=13 e=1 d=0
>> >> >> > 2020-07-06 18:18:36.931+0000: 3273: debug : virEventPollMakePollFDs:401 :
>> >> >> > Prepare n=6 w=7, f=14 e=1 d=0
>> >> >> > 2020-07-06 18:18:36.931+0000: 3273: debug : virEventPollMakePollFDs:401 :
>> >> >> > Prepare n=7 w=8, f=15 e=1 d=0
>> >> >> > 2020-07-06 18:18:36.931+0000: 3273: debug : virEventPollMakePollFDs:401 :
>> >> >> > Prepare n=8 w=9, f=16 e=1 d=0
>> >> >> > 2020-07-06 18:18:36.931+0000: 3273: debug : virEventPollMakePollFDs:401 :
>> >> >> > Prepare n=9 w=10, f=17 e=1 d=0
>> >> >> > 2020-07-06 18:18:36.931+0000: 3273: debug : virEventPollMakePollFDs:401 :
>> >> >> > Prepare n=10 w=11, f=18 e=1 d=0
>> >> >> >
>> >> >> > 2020-07-06 18:18:36.932+0000: 3273: debug : virEventPollMakePollFDs:401 :
>> >> >> > Prepare n=11 w=12, f=19 e=0 d=0
>> >> >> > 2020-07-06 18:18:36.932+0000: 3273: debug : virEventPollMakePollFDs:401 :
>> >> >> > Prepare n=12 w=13, f=19 e=1 d=0
>> >> >> > 2020-07-06 18:18:36.932+0000: 3273: debug : virEventPollMakePollFDs:401 :
>> >> >> > Prepare n=13 w=14, f=24 e=1 d=0
>> >> >> > 2020-07-06 18:18:36.932+0000: 3273: debug : virEventPollMakePollFDs:401 :
>> >> >> > Prepare n=14 w=15, f=25 e=1 d=0
>> >> >> > 2020-07-06 18:18:36.932+0000: 3273: debug : virEventPollMakePollFDs:401 :
>> >> >> > Prepare n=15 w=19, f=26 e=1 d=0
>> >> >> > 2020-07-06 18:18:36.932+0000: 3273: debug : virEventPollMakePollFDs:401 :
>> >> >> > Prepare n=16 w=82, f=79 e=1 d=0
>> >> >> > 2020-07-06 18:18:36.932+0000: 3273: debug : virEventPollMakePollFDs:401 :
>> >> >> > Prepare n=17 w=944, f=22 e=1 d=0
>> >> >> > 2020-07-06 18:18:36.932+0000: 3273: debug : virEventPollMakePollFDs:401 :
>> >> >> > Prepare n=18 w=993, f=82 e=1 d=0
>> >> >> > 2020-07-06 18:18:36.932+0000: 3273: debug : virEventPollMakePollFDs:401 :
>> >> >> > Prepare n=19 w=1001, f=30 e=1 d=0
>> >> >> > 2020-07-06 18:18:36.932+0000: 3273: debug : virEventPollMakePollFDs:401 :
>> >> >> > Prepare n=20 w=1002, f=33 e=1 d=0
>> >> >> > 2020-07-06 18:18:36.932+0000: 3273: debug : virEventPollMakePollFDs:401 :
>> >> >> > Prepare n=21 w=1004, f=83 e=1 d=0
>> >> >> > 2020-07-06 18:18:36.933+0000: 3273: debug : virEventPollCalculateTimeout:338 :
>> >> >> > Calculate expiry of 8 timers
>> >> >> > 2020-07-06 18:18:36.933+0000: 3273: debug : virEventPollCalculateTimeout:346 :
>> >> >> > Got a timeout scheduled for 1594059521930
>> >> >> > 2020-07-06 18:18:36.933+0000: 3273: debug : virEventPollCalculateTimeout:359 :
>> >> >> > Schedule timeout then=1594059521930 now=1594059516933
>> >> >> > 2020-07-06 18:18:36.933+0000: 3273: debug : virEventPollCalculateTimeout:369 :
>> >> >> > Timeout at 1594059521930 due in 4997 ms
>> >> >> > 2020-07-06 18:18:36.933+0000: 3273: info : virEventPollRunOnce:640 :
>> >> >> > EVENT_POLL_RUN: nhandles=21 timeout=4997
>> >> >> >
>> >> >> >
>> >> >> > The ceph itself seems to work, i.e. I can execute ceph -s / rbd -p <pool> ls -l,
>> >> >> > etc. That produces the output. It's just the libvirt seems to be no joy.
>> >> >> >
>> >> >> > The version of libvirt installed is:
>> >> >> >
>> >> >> > libvirt-bin             4.0.0-1ubuntu8.1
>> >> >> >
>> >> >> >
>> >> >> >
>> >> >> > Any idea how I can make ceph Octopus to play nicely with libvirt?
>> >> >> >
>> >> >> > Cheers
>> >> >> >
>> >> >> > Andrei
>> >> >> >
>> >> >> > ----- Original Message -----
>> >> >> >> From: "Andrei Mikhailovsky" <andrei@xxxxxxxxxx>
>> >> >> >> To: "ceph-users" <ceph-users@xxxxxxx>
>> >> >> >> Sent: Monday, 29 June, 2020 20:40:01
>> >> >> >> Subject:  Octopus upgrade breaks Ubuntu 18.04 libvirt
>> >> >> >
>> >> >> >> Hello,
>> >> >> >>
>> >> >> >> I've upgraded ceph to Octopus (15.2.3 from repo) on one of the Ubuntu 18.04 host
>> >> >> >> servers. The update caused problem with libvirtd which hangs when it tries to
>> >> >> >> access the storage pools. The problem doesn't exist on Nautilus. The libvirtd
>> >> >> >> process simply hangs. Nothing seem to happen. The log file for the libvirtd
>> >> >> >> shows:
>> >> >> >>
>> >> >> >> 2020-06-29 19:30:51.556+0000: 12040: debug : virNetlinkEventCallback:707 :
>> >> >> >> dispatching to max 0 clients, called from event watch 11
>> >> >> >> 2020-06-29 19:30:51.556+0000: 12040: debug : virNetlinkEventCallback:720 : event
>> >> >> >> not handled.
>> >> >> >> 2020-06-29 19:30:51.556+0000: 12040: debug : virNetlinkEventCallback:707 :
>> >> >> >> dispatching to max 0 clients, called from event watch 11
>> >> >> >> 2020-06-29 19:30:51.556+0000: 12040: debug : virNetlinkEventCallback:720 : event
>> >> >> >> not handled.
>> >> >> >> 2020-06-29 19:30:51.557+0000: 12040: debug : virNetlinkEventCallback:707 :
>> >> >> >> dispatching to max 0 clients, called from event watch 11
>> >> >> >> 2020-06-29 19:30:51.557+0000: 12040: debug : virNetlinkEventCallback:720 : event
>> >> >> >> not handled.
>> >> >> >> 2020-06-29 19:30:51.591+0000: 12040: debug : virNetlinkEventCallback:707 :
>> >> >> >> dispatching to max 0 clients, called from event watch 11
>> >> >> >> 2020-06-29 19:30:51.591+0000: 12040: debug : virNetlinkEventCallback:720 : event
>> >> >> >> not handled.
>> >> >> >>
>> >> >> >> Running strace on the libvirtd process shows:
>> >> >> >>
>> >> >> >> root@ais-cloudhost1:/home/andrei# strace -p 12040
>> >> >> >> strace: Process 12040 attached
>> >> >> >> restart_syscall(<... resuming interrupted poll ...>
>> >> >> >>
>> >> >> >>
>> >> >> >> Nothing happens after that point.
>> >> >> >>
>> >> >> >> The same host server can get access to the ceph cluster and the pools by running
>> >> >> >> ceph -s or rbd -p <pool> ls -l commands for example.
>> >> >> >>
>> >> >> >> Need some help to get the host servers working again with Octopus.
>> >> >> >>
>> >> >> >> Cheers
>> >> >> >> _______________________________________________
>> >> >> >> ceph-users mailing list -- ceph-users@xxxxxxx
>> >> >> >> To unsubscribe send an email to ceph-users-leave@xxxxxxx
>> >> >> > _______________________________________________
>> >> >> > ceph-users mailing list -- ceph-users@xxxxxxx
>> >> >> > To unsubscribe send an email to ceph-users-leave@xxxxxxx
>> >> >> _______________________________________________
>> >> >> ceph-users mailing list -- ceph-users@xxxxxxx
>> >> >> To unsubscribe send an email to ceph-users-leave@xxxxxxx
>> >> >>
>> >> >
>> >> > [1] https://wiki.libvirt.org/page/DebugLogs
>> >> >
>> >> > --
>> >> > Jason
>> >>
>> >
>> >
>> > --
>> > Jason
>> _______________________________________________
>> ceph-users mailing list -- ceph-users@xxxxxxx
>> To unsubscribe send an email to ceph-users-leave@xxxxxxx
> 
> 
> 
> --
> Alexander E. Patrakov
> CV: http://pc.cd/PLz7
_______________________________________________
ceph-users mailing list -- ceph-users@xxxxxxx
To unsubscribe send an email to ceph-users-leave@xxxxxxx



[Index of Archives]     [Information on CEPH]     [Linux Filesystem Development]     [Ceph Development]     [Ceph Large]     [Ceph Dev]     [Linux USB Development]     [Video for Linux]     [Linux Audio Users]     [Yosemite News]     [Linux Kernel]     [Linux SCSI]     [xfs]


  Powered by Linux