To demonstrate the test plan I've performed a test run with packages
from my SRU ppa https://launchpad.net/~peter-
sabaini/+archive/ubuntu/ceph-noble-19.2.1

Transcript:

juju add-model default
juju model-config logging-config="<root>=WARNING;unit=DEBUG"

juju add-machine --constraints "virt-type=virtual-machine" --series
noble -n7

# let cluster settle
sleep 300
juju exec --all --wait 30m -- hostname 

for i in $( seq 0 6 ) ; do echo $i ; echo -e 'Package: *\nPin: release 
a=noble\nPin-Priority: 1001' | juju ssh $i sudo tee 
/etc/apt/preferences.d/downgrade ;done
for i in $( seq 0 6 ) ; do echo $i ; juju ssh $i 'sudo apt update ; sudo apt 
install --allow-downgrades --yes ceph ceph-mon ceph-osd ceph-mds ceph-base 
ceph-common gdisk radosgw lvm2 parted smartmontools python3-pip xfsprogs 
apparmor-utils python3-pkg-resources python3-setuptools' ; done

juju deploy ceph-mon --channel latest/edge -n3 --to 0,1,2

juju deploy ceph-osd --channel latest/edge -n3 --to 3,4,5 --storage osd-
devices="loop,2G"

juju deploy ceph-fs --channel latest/edge --to 6

juju integrate ceph-mon ceph-fs
juju integrate ceph-mon ceph-osd

sleep 300

# Test run

# Add some data
juju ssh ceph-mon/0 'sudo mkdir -p /mnt/cephfs ; sudo apt install --yes 
ceph-fuse ; sudo ceph-fuse /mnt/cephfs ; echo -n "test" | sudo tee 
/mnt/cephfs/test.txt'

# Verif data
ubuntu@juju3-huge-jammy:~$ juju ssh ceph-mon/1 'sudo mkdir -p /mnt/cephfs ; 
sudo apt install --yes ceph-fuse ; sudo ceph-fuse /mnt/cephfs ; sudo cat 
/mnt/cephfs/test.txt'
...
ceph-fuse[23659]: starting fuse
Ignoring invalid max threads value 4294967295 > max (100000).
testConnection to fd42:f498:c735:f734:216:3eff:fe5c:708b closed.


# Verif health
ubuntu@juju3-huge-jammy:~$ juju ssh ceph-mon/0 -- sudo ceph health
HEALTH_OK
Connection to fd42:f498:c735:f734:216:3eff:fe0e:bcff closed.


# Verif versions
ubuntu@juju3-huge-jammy:~$ juju ssh ceph-mon/0 -- sudo ceph versions
{
    "mon": {
        "ceph version 19.2.0~git20240301.4c76c50 
(4c76c50a73f63ba48ccdf0adccce03b00d1d80c7) squid (dev)": 3
    },
    "mgr": {
        "ceph version 19.2.0~git20240301.4c76c50 
(4c76c50a73f63ba48ccdf0adccce03b00d1d80c7) squid (dev)": 3
    },
    "osd": {
        "ceph version 19.2.0~git20240301.4c76c50 
(4c76c50a73f63ba48ccdf0adccce03b00d1d80c7) squid (dev)": 3
    },
    "mds": {
        "ceph version 19.2.0~git20240301.4c76c50 
(4c76c50a73f63ba48ccdf0adccce03b00d1d80c7) squid (dev)": 1
    },
    "overall": {
        "ceph version 19.2.0~git20240301.4c76c50 
(4c76c50a73f63ba48ccdf0adccce03b00d1d80c7) squid (dev)": 10
    }
}
Connection to 10.249.249.241 closed.

ubuntu@juju3-huge-jammy:~$ juju ssh ceph-mon/0 -- apt-cache policy ceph
ceph:
  Installed: 19.2.0~git20240301.4c76c50-0ubuntu6
  Candidate: 19.2.0~git20240301.4c76c50-0ubuntu6
  Version table:
     19.2.0-0ubuntu0.24.04.2 500
        500 http://archive.ubuntu.com/ubuntu noble-updates/main amd64 Packages
        500 http://security.ubuntu.com/ubuntu noble-security/main amd64 Packages
 *** 19.2.0~git20240301.4c76c50-0ubuntu6 1001
       1001 http://archive.ubuntu.com/ubuntu noble/main amd64 Packages
        100 /var/lib/dpkg/status
Connection to 10.249.249.241 closed.


# Upgrade ceph-mon/0 to 19.2.0
ubuntu@juju3-huge-jammy:~$ juju ssh ceph-mon/0 'sudo rm -vf 
/etc/apt/preferences.d/downgrade ; sudo apt update ; sudo apt --yes 
--fix-broken install ; sudo apt upgrade --yes'
...

ubuntu@juju3-huge-jammy:~$ juju ssh ceph-mon/0 -- apt-cache policy ceph
ceph:
  Installed: 19.2.0-0ubuntu0.24.04.2
  Candidate: 19.2.0-0ubuntu0.24.04.2
  Version table:
 *** 19.2.0-0ubuntu0.24.04.2 500
        500 http://archive.ubuntu.com/ubuntu noble-updates/main amd64 Packages
        500 http://security.ubuntu.com/ubuntu noble-security/main amd64 Packages
        100 /var/lib/dpkg/status
     19.2.0~git20240301.4c76c50-0ubuntu6 500
        500 http://archive.ubuntu.com/ubuntu noble/main amd64 Packages
Connection to fd42:f498:c735:f734:216:3eff:fe0e:bcff closed.


# Verif: MON down

ubuntu@juju3-huge-jammy:~$ juju ssh ceph-mon/1 -- sudo ceph -s
  cluster:
    id:     ba5925eb-00ae-11f0-a85e-00163ebccab8
    health: HEALTH_WARN
            1/3 mons down, quorum juju-06bdc4-1,juju-06bdc4-2
            1 daemons have recently crashed
 
  services:
    mon: 3 daemons, quorum juju-06bdc4-1,juju-06bdc4-2 (age 80s), out of 
quorum: juju-06bdc4-0
    mgr: juju-06bdc4-2(active, since 25m), standbys: juju-06bdc4-1, 
juju-06bdc4-0
    mds: 1/1 daemons up
    osd: 3 osds: 3 up (since 25m), 3 in (since 26m)
 
  data:
    volumes: 1/1 healthy
    pools:   3 pools, 81 pgs
    objects: 25 objects, 583 KiB
    usage:   82 MiB used, 5.9 GiB / 6.0 GiB avail
    pgs:     81 active+clean
 
Connection to 10.249.249.31 closed.


# From the ceph-mon/0 logfile

     0> 2025-03-14T08:57:24.437+0000 7899fa420a80 -1 *** Caught signal 
(Aborted) **
 in thread 7899fa420a80 thread_name:ceph-mon

 ceph version 19.2.0 (16063ff2022298c9300e49a547a16ffda59baf13) squid (stable)
 1: /lib/x86_64-linux-gnu/libc.so.6(+0x45330) [0x7899fa245330]
 2: pthread_kill()
 3: gsignal()
 4: abort()
 5: /lib/x86_64-linux-gnu/libstdc++.so.6(+0xa5ff5) [0x7899fa6a5ff5]
 6: /lib/x86_64-linux-gnu/libstdc++.so.6(+0xbb0da) [0x7899fa6bb0da]
 7: (std::unexpected()+0) [0x7899fa6a5a55]
 8: /lib/x86_64-linux-gnu/libstdc++.so.6(+0xbb391) [0x7899fa6bb391]
 9: (ceph::buffer::v15_2_0::list::iterator_impl<true>::copy(unsigned int, 
std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> 
>&)+0x193) [0x7899fb293593]
 10: (MDSMap::decode(ceph::buffer::v15_2_0::list::iterator_impl<true>&)+0xca1) 
[0x7899fb4c3ab1]
 11: 
(Filesystem::decode(ceph::buffer::v15_2_0::list::iterator_impl<true>&)+0x1c3) 
[0x7899fb4e4303]
 12: (FSMap::decode(ceph::buffer::v15_2_0::list::iterator_impl<true>&)+0x280) 
[0x7899fb4e6ef0]
 13: (MDSMonitor::update_from_paxos(bool*)+0x291) [0x60952cd80801]
 14: (Monitor::refresh_from_paxos(bool*)+0x124) [0x60952cb10164]
 15: (Monitor::preinit()+0x98e) [0x60952cb48fbe]
 16: main()
 17: /lib/x86_64-linux-gnu/libc.so.6(+0x2a1ca) [0x7899fa22a1ca]
 18: __libc_start_main()
 19: _start()

# Upgrade to ppa

for i in $( seq 0 6 ) ; do echo $i ; juju ssh $i 'sudo rm -vf 
/etc/apt/preferences.d/downgrade ' ; done
for i in $( seq 0 6 ) ; do echo $i ; juju ssh $i 'sudo add-apt-repository -y 
ppa:peter-sabaini/ceph-noble-19.2.1 ; sudo apt-get update' ; done
for i in $( seq 0 6 ) ; do echo $i ; juju ssh $i 'sudo apt upgrade --yes' ; done

# Restart ceph-mon/0
juju ssh ceph-mon/0 'sudo systemctl restart ceph-mon@juju-06bdc4-0.service '

# Verify versions
ubuntu@juju3-huge-jammy:~$ juju ssh ceph-mon/0 -- sudo ceph versions
{
    "mon": {
        "ceph version 19.2.1 (9efac4a81335940925dd17dbf407bfd6d3860d28) squid 
(stable)": 3
    },
    "mgr": {
        "ceph version 19.2.1 (9efac4a81335940925dd17dbf407bfd6d3860d28) squid 
(stable)": 3
    },
    "osd": {
        "ceph version 19.2.1 (9efac4a81335940925dd17dbf407bfd6d3860d28) squid 
(stable)": 3
    },
    "mds": {
        "ceph version 19.2.1 (9efac4a81335940925dd17dbf407bfd6d3860d28) squid 
(stable)": 1
    },
    "overall": {
        "ceph version 19.2.1 (9efac4a81335940925dd17dbf407bfd6d3860d28) squid 
(stable)": 10
    }
}

ubuntu@juju3-huge-jammy:~$ juju ssh ceph-mon/0 -- apt-cache policy ceph
ceph:
  Installed: 19.2.1-0ubuntu0.24.04.1~bpo24.04.1~ppa202503131846
  Candidate: 19.2.1-0ubuntu0.24.04.1~bpo24.04.1~ppa202503131846
  Version table:
 *** 19.2.1-0ubuntu0.24.04.1~bpo24.04.1~ppa202503131846 500
        500 
https://ppa.launchpadcontent.net/peter-sabaini/ceph-noble-19.2.1/ubuntu 
noble/main amd64 Packages
        100 /var/lib/dpkg/status
     19.2.0-0ubuntu0.24.04.2 500
        500 http://archive.ubuntu.com/ubuntu noble-updates/main amd64 Packages
        500 http://security.ubuntu.com/ubuntu noble-security/main amd64 Packages
     19.2.0~git20240301.4c76c50-0ubuntu6 500
        500 http://archive.ubuntu.com/ubuntu noble/main amd64 Packages
Connection to fd42:f498:c735:f734:216:3eff:fe0e:bcff closed.


# Verif data

ubuntu@juju3-huge-jammy:~$ juju ssh ceph-mon/0 'sudo mkdir -p /mnt/cephfs ; 
sudo apt install --yes ceph-fuse ; sudo ceph-fuse /mnt/cephfs ; sudo cat 
/mnt/cephfs/test.txt'
Reading package lists... Done
Building dependency tree... Done
Reading state information... Done
ceph-fuse is already the newest version 
(19.2.1-0ubuntu0.24.04.1~bpo24.04.1~ppa202503131846).
0 upgraded, 0 newly installed, 0 to remove and 4 not upgraded.
2025-03-14T17:52:48.752+0000 7d54509dc400 -1 init, newargv = 0x56b3940a52d0 
newargc=13
2025-03-14T17:52:48.752+0000 7d54509dc400 -1 init, args.argv = 0x56b3940a54a0 
args.argc=4
ceph-fuse[72294]: starting ceph client
ceph-fuse[72294]: starting fuse
Ignoring invalid max threads value 4294967295 > max (100000).
testConnection to fd42:f498:c735:f734:216:3eff:fe0e:bcff closed.

ubuntu@juju3-huge-jammy:~$ juju ssh ceph-mon/1 'sudo mkdir -p /mnt/cephfs ; 
sudo apt install --yes ceph-fuse ; sudo ceph-fuse /mnt/cephfs ; sudo cat 
/mnt/cephfs/test.txt'
Reading package lists... Done
Building dependency tree... Done
Reading state information... Done
ceph-fuse is already the newest version 
(19.2.1-0ubuntu0.24.04.1~bpo24.04.1~ppa202503131846).
0 upgraded, 0 newly installed, 0 to remove and 5 not upgraded.
2025-03-14T18:09:51.115+0000 75bd99484400 -1 init, newargv = 0x575d7f6a9630 
newargc=13
2025-03-14T18:09:51.115+0000 75bd99484400 -1 init, args.argv = 0x575d7f6a9800 
args.argc=4
ceph-fuse[15664]: starting ceph client
2025-03-14T18:09:51.161+0000 75bd99484400 -1 /mnt/cephfs already mounted by 
client54132
ceph-fuse[15664]: fuse failed to start
testConnection to 10.249.249.31 closed.


# Verif health

ubuntu@juju3-huge-jammy:~$ juju ssh ceph-mon/0 -- sudo ceph -s
  cluster:
    id:     ba5925eb-00ae-11f0-a85e-00163ebccab8
    health: HEALTH_WARN
            1 daemons have recently crashed
 
  services:
    mon: 3 daemons, quorum juju-06bdc4-1,juju-06bdc4-2,juju-06bdc4-0 (age 24m)
    mgr: juju-06bdc4-2(active, since 101m), standbys: juju-06bdc4-1, 
juju-06bdc4-0
    mds: 1/1 daemons up
    osd: 3 osds: 3 up (since 94m), 3 in (since 9h)
 
  data:
    volumes: 1/1 healthy
    pools:   3 pools, 81 pgs
    objects: 25 objects, 597 KiB
    usage:   96 MiB used, 5.9 GiB / 6.0 GiB avail
    pgs:     81 active+clean
 
Connection to fd42:f498:c735:f734:216:3eff:fe0e:bcff closed.

-- 
You received this bug notification because you are a member of Ubuntu
Bugs, which is subscribed to Ubuntu.
https://bugs.launchpad.net/bugs/2089565

Title:
  MON and MDS crash upgrading  CEPH  on ubuntu 24.04 LTS

To manage notifications about this bug go to:
https://bugs.launchpad.net/ubuntu/+source/ceph/+bug/2089565/+subscriptions


-- 
ubuntu-bugs mailing list
ubuntu-bugs@lists.ubuntu.com
https://lists.ubuntu.com/mailman/listinfo/ubuntu-bugs

Reply via email to