Subject: v7.2.13
Date: Tue Jul 16 08:40:38 2024 +0300
From: Michael Tokarev <mjt@tls.msk.ru>
Forwarded: not-needed

This is a difference between upstream qemu v7.2.12
and upstream qemu v7.2.13.

 .gitlab-ci.d/buildtest.yml                         | 34 ++++------
 .gitlab-ci.d/container-core.yml                    |  4 +-
 .gitlab-ci.d/container-cross.yml                   |  1 +
 VERSION                                            |  2 +-
 block.c                                            | 76 ++++++++++++++--------
 block/qcow2.c                                      | 17 ++++-
 chardev/char-stdio.c                               |  4 ++
 docs/devel/testing.rst                             |  6 ++
 hw/display/vga.c                                   |  7 ++
 hw/net/virtio-net.c                                | 18 +++--
 hw/virtio/virtio.c                                 |  1 -
 linux-user/syscall.c                               | 10 ++-
 target/arm/vec_helper.c                            |  4 +-
 target/i386/cpu.c                                  |  6 +-
 target/i386/tcg/translate.c                        |  2 +-
 tcg/loongarch64/tcg-target.c.inc                   | 32 +++++----
 .../dockerfiles/{centos8.docker => centos9.docker} | 11 ++--
 tests/docker/dockerfiles/fedora-win32-cross.docker |  4 +-
 tests/docker/dockerfiles/fedora-win64-cross.docker |  4 +-
 tests/docker/dockerfiles/fedora.docker             |  4 +-
 tests/docker/dockerfiles/opensuse-leap.docker      | 22 +++----
 tests/docker/dockerfiles/ubuntu2004.docker         |  2 +-
 tests/lcitool/libvirt-ci                           |  2 +-
 tests/lcitool/mappings.yml                         | 60 +++++++++++++++++
 tests/lcitool/refresh                              |  8 +--
 tests/lcitool/targets/centos-stream-8.yml          |  3 +
 tests/lcitool/targets/opensuse-leap-153.yml        |  3 +
 tests/qemu-iotests/061                             |  6 +-
 tests/qemu-iotests/061.out                         |  8 ++-
 tests/qemu-iotests/244                             | 19 +++++-
 tests/qemu-iotests/270                             | 14 +++-
 tests/vm/centos                                    |  4 +-
 32 files changed, 270 insertions(+), 128 deletions(-)

diff --git a/.gitlab-ci.d/buildtest.yml b/.gitlab-ci.d/buildtest.yml
index 7243b8079b..9b6da37582 100644
--- a/.gitlab-ci.d/buildtest.yml
+++ b/.gitlab-ci.d/buildtest.yml
@@ -162,9 +162,9 @@ crash-test-fedora:
 build-system-centos:
   extends: .native_build_job_template
   needs:
-    job: amd64-centos8-container
+    job: amd64-centos9-container
   variables:
-    IMAGE: centos8
+    IMAGE: centos9
     CONFIGURE_ARGS: --disable-nettle --enable-gcrypt --enable-fdt=system
       --enable-modules --enable-trace-backends=dtrace --enable-docs
       --enable-vfio-user-server
@@ -182,7 +182,7 @@ check-system-centos:
     - job: build-system-centos
       artifacts: true
   variables:
-    IMAGE: centos8
+    IMAGE: centos9
     MAKE_CHECK_ARGS: check
 
 avocado-system-centos:
@@ -191,7 +191,7 @@ avocado-system-centos:
     - job: build-system-centos
       artifacts: true
   variables:
-    IMAGE: centos8
+    IMAGE: centos9
     MAKE_CHECK_ARGS: check-avocado
 
 build-system-opensuse:
@@ -237,9 +237,9 @@ avocado-system-opensuse:
 build-tcg-disabled:
   extends: .native_build_job_template
   needs:
-    job: amd64-centos8-container
+    job: amd64-centos9-container
   variables:
-    IMAGE: centos8
+    IMAGE: centos9
   script:
     - mkdir build
     - cd build
@@ -469,7 +469,6 @@ tsan-build:
     CONFIGURE_ARGS: --enable-tsan --cc=clang-10 --cxx=clang++-10
           --enable-trace-backends=ust --enable-fdt=system --disable-slirp
     TARGETS: x86_64-softmmu ppc64-softmmu riscv64-softmmu x86_64-linux-user
-    MAKE_CHECK_ARGS: bench V=1
 
 # gprof/gcov are GCC features
 build-gprof-gcov:
@@ -560,29 +559,22 @@ build-coroutine-sigaltstack:
     MAKE_CHECK_ARGS: check-unit
 
 # Check our reduced build configurations
-build-without-default-devices:
+build-without-defaults:
   extends: .native_build_job_template
   needs:
-    job: amd64-centos8-container
+    job: amd64-centos9-container
   variables:
-    IMAGE: centos8
-    CONFIGURE_ARGS: --without-default-devices --disable-user
-
-build-without-default-features:
-  extends: .native_build_job_template
-  needs:
-    job: amd64-fedora-container
-  variables:
-    IMAGE: fedora
+    IMAGE: centos9
     CONFIGURE_ARGS:
+      --without-default-devices
       --without-default-features
-      --disable-capstone
+      --disable-fdt
       --disable-pie
       --disable-qom-cast-debug
       --disable-strip
-    TARGETS: avr-softmmu i386-softmmu mips64-softmmu s390x-softmmu sh4-softmmu
+    TARGETS: avr-softmmu mips64-softmmu s390x-softmmu sh4-softmmu
       sparc64-softmmu hexagon-linux-user i386-linux-user s390x-linux-user
-    MAKE_CHECK_ARGS: check-unit check-qtest SPEED=slow
+    MAKE_CHECK_ARGS: check-unit check-qtest-avr check-qtest-mips64
 
 build-libvhost-user:
   extends: .base_job_template
diff --git a/.gitlab-ci.d/container-core.yml b/.gitlab-ci.d/container-core.yml
index 08f8450fa1..5459447676 100644
--- a/.gitlab-ci.d/container-core.yml
+++ b/.gitlab-ci.d/container-core.yml
@@ -1,10 +1,10 @@
 include:
   - local: '/.gitlab-ci.d/container-template.yml'
 
-amd64-centos8-container:
+amd64-centos9-container:
   extends: .container_job_template
   variables:
-    NAME: centos8
+    NAME: centos9
 
 amd64-fedora-container:
   extends: .container_job_template
diff --git a/.gitlab-ci.d/container-cross.yml b/.gitlab-ci.d/container-cross.yml
index 2d560e9764..24343192ac 100644
--- a/.gitlab-ci.d/container-cross.yml
+++ b/.gitlab-ci.d/container-cross.yml
@@ -115,6 +115,7 @@ riscv64-debian-cross-container:
   allow_failure: true
   variables:
     NAME: debian-riscv64-cross
+    QEMU_JOB_OPTIONAL: 1
 
 # we can however build TCG tests using a non-sid base
 riscv64-debian-test-cross-container:
diff --git a/VERSION b/VERSION
index 4625f55e26..c0d5d580b2 100644
--- a/VERSION
+++ b/VERSION
@@ -1 +1 @@
-7.2.12
+7.2.13
diff --git a/block.c b/block.c
index a18f052374..ea369a3fe5 100644
--- a/block.c
+++ b/block.c
@@ -85,6 +85,7 @@ static BlockDriverState *bdrv_open_inherit(const char *filename,
                                            BlockDriverState *parent,
                                            const BdrvChildClass *child_class,
                                            BdrvChildRole child_role,
+                                           bool parse_filename,
                                            Error **errp);
 
 static bool bdrv_recurse_has_child(BlockDriverState *bs,
@@ -2051,7 +2052,8 @@ static void parse_json_protocol(QDict *options, const char **pfilename,
  * block driver has been specified explicitly.
  */
 static int bdrv_fill_options(QDict **options, const char *filename,
-                             int *flags, Error **errp)
+                             int *flags, bool allow_parse_filename,
+                             Error **errp)
 {
     const char *drvname;
     bool protocol = *flags & BDRV_O_PROTOCOL;
@@ -2093,7 +2095,7 @@ static int bdrv_fill_options(QDict **options, const char *filename,
     if (protocol && filename) {
         if (!qdict_haskey(*options, "filename")) {
             qdict_put_str(*options, "filename", filename);
-            parse_filename = true;
+            parse_filename = allow_parse_filename;
         } else {
             error_setg(errp, "Can't specify 'file' and 'filename' options at "
                              "the same time");
@@ -3516,7 +3518,8 @@ int bdrv_open_backing_file(BlockDriverState *bs, QDict *parent_options,
     }
 
     backing_hd = bdrv_open_inherit(backing_filename, reference, options, 0, bs,
-                                   &child_of_bds, bdrv_backing_role(bs), errp);
+                                   &child_of_bds, bdrv_backing_role(bs), true,
+                                   errp);
     if (!backing_hd) {
         bs->open_flags |= BDRV_O_NO_BACKING;
         error_prepend(errp, "Could not open backing file: ");
@@ -3549,7 +3552,8 @@ free_exit:
 static BlockDriverState *
 bdrv_open_child_bs(const char *filename, QDict *options, const char *bdref_key,
                    BlockDriverState *parent, const BdrvChildClass *child_class,
-                   BdrvChildRole child_role, bool allow_none, Error **errp)
+                   BdrvChildRole child_role, bool allow_none,
+                   bool parse_filename, Error **errp)
 {
     BlockDriverState *bs = NULL;
     QDict *image_options;
@@ -3580,7 +3584,8 @@ bdrv_open_child_bs(const char *filename, QDict *options, const char *bdref_key,
     }
 
     bs = bdrv_open_inherit(filename, reference, image_options, 0,
-                           parent, child_class, child_role, errp);
+                           parent, child_class, child_role, parse_filename,
+                           errp);
     if (!bs) {
         goto done;
     }
@@ -3590,6 +3595,28 @@ done:
     return bs;
 }
 
+static BdrvChild *bdrv_open_child_common(const char *filename,
+                                         QDict *options, const char *bdref_key,
+                                         BlockDriverState *parent,
+                                         const BdrvChildClass *child_class,
+                                         BdrvChildRole child_role,
+                                         bool allow_none, bool parse_filename,
+                                         Error **errp)
+{
+    BlockDriverState *bs;
+
+    GLOBAL_STATE_CODE();
+
+    bs = bdrv_open_child_bs(filename, options, bdref_key, parent, child_class,
+                            child_role, allow_none, parse_filename, errp);
+    if (bs == NULL) {
+        return NULL;
+    }
+
+    return bdrv_attach_child(parent, bs, bdref_key, child_class, child_role,
+                             errp);
+}
+
 /*
  * Opens a disk image whose options are given as BlockdevRef in another block
  * device's options.
@@ -3611,18 +3638,9 @@ BdrvChild *bdrv_open_child(const char *filename,
                            BdrvChildRole child_role,
                            bool allow_none, Error **errp)
 {
-    BlockDriverState *bs;
-
-    GLOBAL_STATE_CODE();
-
-    bs = bdrv_open_child_bs(filename, options, bdref_key, parent, child_class,
-                            child_role, allow_none, errp);
-    if (bs == NULL) {
-        return NULL;
-    }
-
-    return bdrv_attach_child(parent, bs, bdref_key, child_class, child_role,
-                             errp);
+    return bdrv_open_child_common(filename, options, bdref_key, parent,
+                                  child_class, child_role, allow_none, false,
+                                  errp);
 }
 
 /*
@@ -3639,8 +3657,8 @@ int bdrv_open_file_child(const char *filename,
     role = parent->drv->is_filter ?
         (BDRV_CHILD_FILTERED | BDRV_CHILD_PRIMARY) : BDRV_CHILD_IMAGE;
 
-    if (!bdrv_open_child(filename, options, bdref_key, parent,
-                         &child_of_bds, role, false, errp))
+    if (!bdrv_open_child_common(filename, options, bdref_key, parent,
+                                &child_of_bds, role, false, true, errp))
     {
         return -EINVAL;
     }
@@ -3685,7 +3703,8 @@ BlockDriverState *bdrv_open_blockdev_ref(BlockdevRef *ref, Error **errp)
 
     }
 
-    bs = bdrv_open_inherit(NULL, reference, qdict, 0, NULL, NULL, 0, errp);
+    bs = bdrv_open_inherit(NULL, reference, qdict, 0, NULL, NULL, 0, false,
+                           errp);
     obj = NULL;
     qobject_unref(obj);
     visit_free(v);
@@ -3775,7 +3794,7 @@ static BlockDriverState *bdrv_open_inherit(const char *filename,
                                            BlockDriverState *parent,
                                            const BdrvChildClass *child_class,
                                            BdrvChildRole child_role,
-                                           Error **errp)
+                                           bool parse_filename, Error **errp)
 {
     int ret;
     BlockBackend *file = NULL;
@@ -3819,9 +3838,11 @@ static BlockDriverState *bdrv_open_inherit(const char *filename,
     }
 
     /* json: syntax counts as explicit options, as if in the QDict */
-    parse_json_protocol(options, &filename, &local_err);
-    if (local_err) {
-        goto fail;
+    if (parse_filename) {
+        parse_json_protocol(options, &filename, &local_err);
+        if (local_err) {
+            goto fail;
+        }
     }
 
     bs->explicit_options = qdict_clone_shallow(options);
@@ -3846,7 +3867,8 @@ static BlockDriverState *bdrv_open_inherit(const char *filename,
                                      parent->open_flags, parent->options);
     }
 
-    ret = bdrv_fill_options(&options, filename, &flags, &local_err);
+    ret = bdrv_fill_options(&options, filename, &flags, parse_filename,
+                            &local_err);
     if (ret < 0) {
         goto fail;
     }
@@ -3915,7 +3937,7 @@ static BlockDriverState *bdrv_open_inherit(const char *filename,
 
         file_bs = bdrv_open_child_bs(filename, options, "file", bs,
                                      &child_of_bds, BDRV_CHILD_IMAGE,
-                                     true, &local_err);
+                                     true, true, &local_err);
         if (local_err) {
             goto fail;
         }
@@ -4062,7 +4084,7 @@ BlockDriverState *bdrv_open(const char *filename, const char *reference,
     GLOBAL_STATE_CODE();
 
     return bdrv_open_inherit(filename, reference, options, flags, NULL,
-                             NULL, 0, errp);
+                             NULL, 0, true, errp);
 }
 
 /* Return true if the NULL-terminated @list contains @str */
diff --git a/block/qcow2.c b/block/qcow2.c
index 4d6666d3ff..c810424feb 100644
--- a/block/qcow2.c
+++ b/block/qcow2.c
@@ -1614,7 +1614,22 @@ static int coroutine_fn qcow2_do_open(BlockDriverState *bs, QDict *options,
         goto fail;
     }
 
-    if (open_data_file) {
+    if (open_data_file && (flags & BDRV_O_NO_IO)) {
+        /*
+         * Don't open the data file for 'qemu-img info' so that it can be used
+         * to verify that an untrusted qcow2 image doesn't refer to external
+         * files.
+         *
+         * Note: This still makes has_data_file() return true.
+         */
+        if (s->incompatible_features & QCOW2_INCOMPAT_DATA_FILE) {
+            s->data_file = NULL;
+        } else {
+            s->data_file = bs->file;
+        }
+        qdict_extract_subqdict(options, NULL, "data-file.");
+        qdict_del(options, "data-file");
+    } else if (open_data_file) {
         /* Open external data file */
         s->data_file = bdrv_open_child(NULL, options, "data-file", bs,
                                        &child_of_bds, BDRV_CHILD_DATA,
diff --git a/chardev/char-stdio.c b/chardev/char-stdio.c
index 3c648678ab..b960ddd4e4 100644
--- a/chardev/char-stdio.c
+++ b/chardev/char-stdio.c
@@ -41,6 +41,7 @@
 /* init terminal so that we can grab keys */
 static struct termios oldtty;
 static int old_fd0_flags;
+static int old_fd1_flags;
 static bool stdio_in_use;
 static bool stdio_allow_signal;
 static bool stdio_echo_state;
@@ -50,6 +51,8 @@ static void term_exit(void)
     if (stdio_in_use) {
         tcsetattr(0, TCSANOW, &oldtty);
         fcntl(0, F_SETFL, old_fd0_flags);
+        fcntl(1, F_SETFL, old_fd1_flags);
+        stdio_in_use = false;
     }
 }
 
@@ -102,6 +105,7 @@ static void qemu_chr_open_stdio(Chardev *chr,
 
     stdio_in_use = true;
     old_fd0_flags = fcntl(0, F_GETFL);
+    old_fd1_flags = fcntl(1, F_GETFL);
     tcgetattr(0, &oldtty);
     if (!g_unix_set_fd_nonblocking(0, true, NULL)) {
         error_setg_errno(errp, errno, "Failed to set FD nonblocking");
diff --git a/docs/devel/testing.rst b/docs/devel/testing.rst
index 98c26ecf18..b4c99be195 100644
--- a/docs/devel/testing.rst
+++ b/docs/devel/testing.rst
@@ -473,6 +473,12 @@ thus some extra preparation steps will be required first
    the ``libvirt-ci`` submodule to point to a commit that contains
    the ``mappings.yml`` update.
 
+For enterprise distros that default to old, end-of-life versions of the
+Python runtime, QEMU uses a separate set of mappings that work with more
+recent versions.  These can be found in ``tests/lcitool/mappings.yml``.
+Modifying this file should not be necessary unless the new pre-requisite
+is a Python library or tool.
+
 
 Adding new OS distros
 ^^^^^^^^^^^^^^^^^^^^^
diff --git a/hw/display/vga.c b/hw/display/vga.c
index 0cb26a791b..8e2d44bea3 100644
--- a/hw/display/vga.c
+++ b/hw/display/vga.c
@@ -1746,6 +1746,13 @@ static void vga_draw_blank(VGACommonState *s, int full_update)
     if (s->last_scr_width <= 0 || s->last_scr_height <= 0)
         return;
 
+    if (is_buffer_shared(surface)) {
+        /* unshare buffer, otherwise the blanking corrupts vga vram */
+        surface = qemu_create_displaysurface(s->last_scr_width,
+                                             s->last_scr_height);
+        dpy_gfx_replace_surface(s->con, surface);
+    }
+
     w = s->last_scr_width * surface_bytes_per_pixel(surface);
     d = surface_data(surface);
     for(i = 0; i < s->last_scr_height; i++) {
diff --git a/hw/net/virtio-net.c b/hw/net/virtio-net.c
index b6177a6afe..beadea5bf8 100644
--- a/hw/net/virtio-net.c
+++ b/hw/net/virtio-net.c
@@ -2646,18 +2646,14 @@ static int32_t virtio_net_flush_tx(VirtIONetQueue *q)
         out_sg = elem->out_sg;
         if (out_num < 1) {
             virtio_error(vdev, "virtio-net header not in first element");
-            virtqueue_detach_element(q->tx_vq, elem, 0);
-            g_free(elem);
-            return -EINVAL;
+            goto detach;
         }
 
         if (n->has_vnet_hdr) {
             if (iov_to_buf(out_sg, out_num, 0, &vhdr, n->guest_hdr_len) <
                 n->guest_hdr_len) {
                 virtio_error(vdev, "virtio-net header incorrect");
-                virtqueue_detach_element(q->tx_vq, elem, 0);
-                g_free(elem);
-                return -EINVAL;
+                goto detach;
             }
             if (n->needs_vnet_hdr_swap) {
                 virtio_net_hdr_swap(vdev, (void *) &vhdr);
@@ -2688,6 +2684,11 @@ static int32_t virtio_net_flush_tx(VirtIONetQueue *q)
                              n->guest_hdr_len, -1);
             out_num = sg_num;
             out_sg = sg;
+
+            if (out_num < 1) {
+                virtio_error(vdev, "virtio-net nothing to send");
+                goto detach;
+            }
         }
 
         ret = qemu_sendv_packet_async(qemu_get_subqueue(n->nic, queue_index),
@@ -2708,6 +2709,11 @@ drop:
         }
     }
     return num_packets;
+
+detach:
+    virtqueue_detach_element(q->tx_vq, elem, 0);
+    g_free(elem);
+    return -EINVAL;
 }
 
 static void virtio_net_tx_timer(void *opaque);
diff --git a/hw/virtio/virtio.c b/hw/virtio/virtio.c
index 4a35d7cb0c..1227e3d692 100644
--- a/hw/virtio/virtio.c
+++ b/hw/virtio/virtio.c
@@ -732,7 +732,6 @@ static void vring_packed_event_read(VirtIODevice *vdev,
     /* Make sure flags is seen before off_wrap */
     smp_rmb();
     e->off_wrap = virtio_lduw_phys_cached(vdev, cache, off_off);
-    virtio_tswap16s(vdev, &e->flags);
 }
 
 static void vring_packed_off_wrap_write(VirtIODevice *vdev,
diff --git a/linux-user/syscall.c b/linux-user/syscall.c
index 74240f99ad..53c46ae951 100644
--- a/linux-user/syscall.c
+++ b/linux-user/syscall.c
@@ -7228,11 +7228,17 @@ static inline int tswapid(int id)
 #else
 #define __NR_sys_setresgid __NR_setresgid
 #endif
+#ifdef __NR_setgroups32
+#define __NR_sys_setgroups __NR_setgroups32
+#else
+#define __NR_sys_setgroups __NR_setgroups
+#endif
 
 _syscall1(int, sys_setuid, uid_t, uid)
 _syscall1(int, sys_setgid, gid_t, gid)
 _syscall3(int, sys_setresuid, uid_t, ruid, uid_t, euid, uid_t, suid)
 _syscall3(int, sys_setresgid, gid_t, rgid, gid_t, egid, gid_t, sgid)
+_syscall2(int, sys_setgroups, int, size, gid_t *, grouplist)
 
 void syscall_init(void)
 {
@@ -11453,7 +11459,7 @@ static abi_long do_syscall1(CPUArchState *cpu_env, int num, abi_long arg1,
                 unlock_user(target_grouplist, arg2,
                             gidsetsize * sizeof(target_id));
             }
-            return get_errno(setgroups(gidsetsize, grouplist));
+            return get_errno(sys_setgroups(gidsetsize, grouplist));
         }
     case TARGET_NR_fchown:
         return get_errno(fchown(arg1, low2highuid(arg2), low2highgid(arg3)));
@@ -11789,7 +11795,7 @@ static abi_long do_syscall1(CPUArchState *cpu_env, int num, abi_long arg1,
                 }
                 unlock_user(target_grouplist, arg2, 0);
             }
-            return get_errno(setgroups(gidsetsize, grouplist));
+            return get_errno(sys_setgroups(gidsetsize, grouplist));
         }
 #endif
 #ifdef TARGET_NR_fchown32
diff --git a/target/arm/vec_helper.c b/target/arm/vec_helper.c
index f59d3b26ea..859366e264 100644
--- a/target/arm/vec_helper.c
+++ b/target/arm/vec_helper.c
@@ -842,7 +842,7 @@ void HELPER(gvec_fcmlah_idx)(void *vd, void *vn, void *vm, void *va,
     intptr_t index = extract32(desc, SIMD_DATA_SHIFT + 2, 2);
     uint32_t neg_real = flip ^ neg_imag;
     intptr_t elements = opr_sz / sizeof(float16);
-    intptr_t eltspersegment = 16 / sizeof(float16);
+    intptr_t eltspersegment = MIN(16 / sizeof(float16), elements);
     intptr_t i, j;
 
     /* Shift boolean to the sign bit so we can xor to negate.  */
@@ -904,7 +904,7 @@ void HELPER(gvec_fcmlas_idx)(void *vd, void *vn, void *vm, void *va,
     intptr_t index = extract32(desc, SIMD_DATA_SHIFT + 2, 2);
     uint32_t neg_real = flip ^ neg_imag;
     intptr_t elements = opr_sz / sizeof(float32);
-    intptr_t eltspersegment = 16 / sizeof(float32);
+    intptr_t eltspersegment = MIN(16 / sizeof(float32), elements);
     intptr_t i, j;
 
     /* Shift boolean to the sign bit so we can xor to negate.  */
diff --git a/target/i386/cpu.c b/target/i386/cpu.c
index 52a3020032..9c3e64c54b 100644
--- a/target/i386/cpu.c
+++ b/target/i386/cpu.c
@@ -5297,10 +5297,8 @@ void cpu_x86_cpuid(CPUX86State *env, uint32_t index, uint32_t count,
                 int host_vcpus_per_cache = 1 + ((*eax & 0x3FFC000) >> 14);
                 int vcpus_per_socket = env->nr_dies * cs->nr_cores *
                                        cs->nr_threads;
-                if (cs->nr_cores > 1) {
-                    *eax &= ~0xFC000000;
-                    *eax |= (pow2ceil(cs->nr_cores) - 1) << 26;
-                }
+                *eax &= ~0xFC000000;
+                *eax |= (pow2ceil(cs->nr_cores) - 1) << 26;
                 if (host_vcpus_per_cache > vcpus_per_socket) {
                     *eax &= ~0x3FFC000;
                     *eax |= (pow2ceil(vcpus_per_socket) - 1) << 14;
diff --git a/target/i386/tcg/translate.c b/target/i386/tcg/translate.c
index 417bc26e8f..8eb6a974e5 100644
--- a/target/i386/tcg/translate.c
+++ b/target/i386/tcg/translate.c
@@ -2696,7 +2696,7 @@ static void gen_enter(DisasContext *s, int esp_addend, int level)
     }
 
     /* Copy the FrameTemp value to EBP.  */
-    gen_op_mov_reg_v(s, a_ot, R_EBP, s->T1);
+    gen_op_mov_reg_v(s, d_ot, R_EBP, s->T1);
 
     /* Compute the final value of ESP.  */
     tcg_gen_subi_tl(s->T1, s->T1, esp_addend + size * level);
diff --git a/tcg/loongarch64/tcg-target.c.inc b/tcg/loongarch64/tcg-target.c.inc
index d326e28740..f1934b6d7b 100644
--- a/tcg/loongarch64/tcg-target.c.inc
+++ b/tcg/loongarch64/tcg-target.c.inc
@@ -332,8 +332,7 @@ static void tcg_out_movi(TCGContext *s, TCGType type, TCGReg rd,
      * back to the slow path.
      */
 
-    intptr_t pc_offset;
-    tcg_target_long val_lo, val_hi, pc_hi, offset_hi;
+    intptr_t src_rx, pc_offset;
     tcg_target_long hi32, hi52;
     bool rd_high_bits_are_ones;
 
@@ -344,24 +343,23 @@ static void tcg_out_movi(TCGContext *s, TCGType type, TCGReg rd,
     }
 
     /* PC-relative cases.  */
-    pc_offset = tcg_pcrel_diff(s, (void *)val);
-    if (pc_offset == sextreg(pc_offset, 0, 22) && (pc_offset & 3) == 0) {
-        /* Single pcaddu2i.  */
-        tcg_out_opc_pcaddu2i(s, rd, pc_offset >> 2);
-        return;
+    src_rx = (intptr_t)tcg_splitwx_to_rx(s->code_ptr);
+    if ((val & 3) == 0) {
+        pc_offset = val - src_rx;
+        if (pc_offset == sextreg(pc_offset, 0, 22)) {
+            /* Single pcaddu2i.  */
+            tcg_out_opc_pcaddu2i(s, rd, pc_offset >> 2);
+            return;
+        }
     }
 
-    if (pc_offset == (int32_t)pc_offset) {
-        /* Offset within 32 bits; load with pcalau12i + ori.  */
-        val_lo = sextreg(val, 0, 12);
-        val_hi = val >> 12;
-        pc_hi = (val - pc_offset) >> 12;
-        offset_hi = val_hi - pc_hi;
-
-        tcg_debug_assert(offset_hi == sextreg(offset_hi, 0, 20));
-        tcg_out_opc_pcalau12i(s, rd, offset_hi);
+    pc_offset = (val >> 12) - (src_rx >> 12);
+    if (pc_offset == sextreg(pc_offset, 0, 20)) {
+        /* Load with pcalau12i + ori.  */
+        tcg_target_long val_lo = val & 0xfff;
+        tcg_out_opc_pcalau12i(s, rd, pc_offset);
         if (val_lo != 0) {
-            tcg_out_opc_ori(s, rd, rd, val_lo & 0xfff);
+            tcg_out_opc_ori(s, rd, rd, val_lo);
         }
         return;
     }
diff --git a/tests/docker/dockerfiles/centos8.docker b/tests/docker/dockerfiles/centos9.docker
similarity index 91%
rename from tests/docker/dockerfiles/centos8.docker
rename to tests/docker/dockerfiles/centos9.docker
index 1f70d41aeb..62c4896191 100644
--- a/tests/docker/dockerfiles/centos8.docker
+++ b/tests/docker/dockerfiles/centos9.docker
@@ -1,15 +1,14 @@
 # THIS FILE WAS AUTO-GENERATED
 #
-#  $ lcitool dockerfile --layers all centos-stream-8 qemu
+#  $ lcitool dockerfile --layers all centos-stream-9 qemu
 #
 # https://gitlab.com/libvirt/libvirt-ci
 
-FROM quay.io/centos/centos:stream8
+FROM quay.io/centos/centos:stream9
 
 RUN dnf distro-sync -y && \
     dnf install 'dnf-command(config-manager)' -y && \
-    dnf config-manager --set-enabled -y powertools && \
-    dnf install -y centos-release-advanced-virtualization && \
+    dnf config-manager --set-enabled -y crb && \
     dnf install -y epel-release && \
     dnf install -y epel-next-release && \
     dnf install -y \
@@ -43,7 +42,6 @@ RUN dnf distro-sync -y && \
         glib2-static \
         glibc-langpack-en \
         glibc-static \
-        glusterfs-api-devel \
         gnutls-devel \
         gtk3-devel \
         hostname \
@@ -102,19 +100,18 @@ RUN dnf distro-sync -y && \
         python3-pip \
         python3-sphinx \
         python3-sphinx_rtd_theme \
+        python3-tomli \
         rdma-core-devel \
         rpm \
         sed \
         snappy-devel \
         spice-protocol \
-        spice-server-devel \
         systemd-devel \
         systemtap-sdt-devel \
         tar \
         texinfo \
         usbredir-devel \
         util-linux \
-        virglrenderer-devel \
         vte291-devel \
         which \
         xfsprogs-devel \
diff --git a/tests/docker/dockerfiles/fedora-win32-cross.docker b/tests/docker/dockerfiles/fedora-win32-cross.docker
index 75383ba185..cc5d1ac4be 100644
--- a/tests/docker/dockerfiles/fedora-win32-cross.docker
+++ b/tests/docker/dockerfiles/fedora-win32-cross.docker
@@ -1,10 +1,10 @@
 # THIS FILE WAS AUTO-GENERATED
 #
-#  $ lcitool dockerfile --layers all --cross mingw32 fedora-35 qemu
+#  $ lcitool dockerfile --layers all --cross mingw32 fedora-37 qemu
 #
 # https://gitlab.com/libvirt/libvirt-ci
 
-FROM registry.fedoraproject.org/fedora:35
+FROM registry.fedoraproject.org/fedora:37
 
 RUN dnf install -y nosync && \
     echo -e '#!/bin/sh\n\
diff --git a/tests/docker/dockerfiles/fedora-win64-cross.docker b/tests/docker/dockerfiles/fedora-win64-cross.docker
index 98c03dc13b..cabbf4edfc 100644
--- a/tests/docker/dockerfiles/fedora-win64-cross.docker
+++ b/tests/docker/dockerfiles/fedora-win64-cross.docker
@@ -1,10 +1,10 @@
 # THIS FILE WAS AUTO-GENERATED
 #
-#  $ lcitool dockerfile --layers all --cross mingw64 fedora-35 qemu
+#  $ lcitool dockerfile --layers all --cross mingw64 fedora-37 qemu
 #
 # https://gitlab.com/libvirt/libvirt-ci
 
-FROM registry.fedoraproject.org/fedora:35
+FROM registry.fedoraproject.org/fedora:37
 
 RUN dnf install -y nosync && \
     echo -e '#!/bin/sh\n\
diff --git a/tests/docker/dockerfiles/fedora.docker b/tests/docker/dockerfiles/fedora.docker
index d200c7fc10..f44b005000 100644
--- a/tests/docker/dockerfiles/fedora.docker
+++ b/tests/docker/dockerfiles/fedora.docker
@@ -1,10 +1,10 @@
 # THIS FILE WAS AUTO-GENERATED
 #
-#  $ lcitool dockerfile --layers all fedora-35 qemu
+#  $ lcitool dockerfile --layers all fedora-37 qemu
 #
 # https://gitlab.com/libvirt/libvirt-ci
 
-FROM registry.fedoraproject.org/fedora:35
+FROM registry.fedoraproject.org/fedora:37
 
 RUN dnf install -y nosync && \
     echo -e '#!/bin/sh\n\
diff --git a/tests/docker/dockerfiles/opensuse-leap.docker b/tests/docker/dockerfiles/opensuse-leap.docker
index 4361b01464..4f1191dc05 100644
--- a/tests/docker/dockerfiles/opensuse-leap.docker
+++ b/tests/docker/dockerfiles/opensuse-leap.docker
@@ -90,16 +90,9 @@ RUN zypper update -y && \
            pcre-devel-static \
            perl-base \
            pkgconfig \
-           python3-Pillow \
-           python3-PyYAML \
-           python3-Sphinx \
-           python3-base \
-           python3-numpy \
-           python3-opencv \
-           python3-pip \
-           python3-setuptools \
-           python3-sphinx_rtd_theme \
-           python3-wheel \
+           python39-base \
+           python39-pip \
+           python39-setuptools \
            rdma-core-devel \
            rpm \
            sed \
@@ -131,10 +124,15 @@ RUN zypper update -y && \
     ln -s /usr/bin/ccache /usr/libexec/ccache-wrappers/g++ && \
     ln -s /usr/bin/ccache /usr/libexec/ccache-wrappers/gcc
 
-RUN /usr/bin/pip3 install meson==0.56.0
+RUN /usr/bin/pip3.9 install \
+                    PyYAML \
+                    meson==0.63.2 \
+                    pillow \
+                    sphinx \
+                    sphinx-rtd-theme
 
 ENV CCACHE_WRAPPERSDIR "/usr/libexec/ccache-wrappers"
 ENV LANG "en_US.UTF-8"
 ENV MAKE "/usr/bin/make"
 ENV NINJA "/usr/bin/ninja"
-ENV PYTHON "/usr/bin/python3"
+ENV PYTHON "/usr/bin/python3.9"
diff --git a/tests/docker/dockerfiles/ubuntu2004.docker b/tests/docker/dockerfiles/ubuntu2004.docker
index 9417bca2fa..39c744eba9 100644
--- a/tests/docker/dockerfiles/ubuntu2004.docker
+++ b/tests/docker/dockerfiles/ubuntu2004.docker
@@ -140,7 +140,7 @@ RUN export DEBIAN_FRONTEND=noninteractive && \
     ln -s /usr/bin/ccache /usr/libexec/ccache-wrappers/g++ && \
     ln -s /usr/bin/ccache /usr/libexec/ccache-wrappers/gcc
 
-RUN /usr/bin/pip3 install meson==0.56.0
+RUN /usr/bin/pip3 install meson==0.63.2
 
 ENV CCACHE_WRAPPERSDIR "/usr/libexec/ccache-wrappers"
 ENV LANG "en_US.UTF-8"
Submodule tests/lcitool/libvirt-ci e3eb28cf2e..319a534c22:
diff --git a/tests/lcitool/libvirt-ci/.gitlab-ci.yml b/tests/lcitool/libvirt-ci/.gitlab-ci.yml
index 21ccc2e6..1c9c8b70 100644
--- a/tests/lcitool/libvirt-ci/.gitlab-ci.yml
+++ b/tests/lcitool/libvirt-ci/.gitlab-ci.yml
@@ -148,9 +148,10 @@ unittests:
   needs: []
   before_script:
     - apk update
-    - apk add git
+    - apk add git ansible
   script:
-    - pip3 install setuptools pytest pyyaml
+    - pip3 install setuptools pytest
+    - pip3 install -r requirements.txt ansible-runner
     - python3 -m pytest --verbose
 
 x86_64-check-almalinux-8:
@@ -201,15 +202,15 @@ x86_64-check-debian-sid:
   variables:
     NAME: debian-sid
 
-x86_64-check-fedora-35:
+x86_64-check-fedora-36:
   extends: .check_container_template
   variables:
-    NAME: fedora-35
+    NAME: fedora-36
 
-x86_64-check-fedora-36:
+x86_64-check-fedora-37:
   extends: .check_container_template
   variables:
-    NAME: fedora-36
+    NAME: fedora-37
 
 x86_64-check-fedora-rawhide:
   extends: .check_container_template
diff --git a/tests/lcitool/libvirt-ci/docs/installation.rst b/tests/lcitool/libvirt-ci/docs/installation.rst
index 7a7f53e8..7134195d 100644
--- a/tests/lcitool/libvirt-ci/docs/installation.rst
+++ b/tests/lcitool/libvirt-ci/docs/installation.rst
@@ -58,6 +58,8 @@ in the previous section)
 
    $ pip3 install --user -r test-requirements.txt
 
+In addition, the ``ansible-inventory`` executable needs to be installed.
+
 Installing lcitool
 ------------------
 
diff --git a/tests/lcitool/libvirt-ci/examples/manifest.yml b/tests/lcitool/libvirt-ci/examples/manifest.yml
index 7bd9b4b1..2acfbd35 100644
--- a/tests/lcitool/libvirt-ci/examples/manifest.yml
+++ b/tests/lcitool/libvirt-ci/examples/manifest.yml
@@ -135,7 +135,7 @@ targets:
       - arch: ppc64le
       - arch: s390x
 
-  fedora-35: x86_64
+  fedora-37: x86_64
 
   fedora-rawhide:
     jobs:
diff --git a/tests/lcitool/libvirt-ci/lcitool/ansible/playbooks/build/jobs/defaults.yml b/tests/lcitool/libvirt-ci/lcitool/ansible/playbooks/build/jobs/defaults.yml
index 25099c74..70691565 100644
--- a/tests/lcitool/libvirt-ci/lcitool/ansible/playbooks/build/jobs/defaults.yml
+++ b/tests/lcitool/libvirt-ci/lcitool/ansible/playbooks/build/jobs/defaults.yml
@@ -7,8 +7,8 @@ all_machines:
   - debian-10
   - debian-11
   - debian-sid
-  - fedora-35
   - fedora-36
+  - fedora-37
   - fedora-rawhide
   - freebsd-12
   - freebsd-13
@@ -22,8 +22,8 @@ rpm_machines:
   - almalinux-9
   - centos-stream-8
   - centos-stream-9
-  - fedora-35
   - fedora-36
+  - fedora-37
   - fedora-rawhide
 global_env: |
   . ~/lcitool_build_env
diff --git a/tests/lcitool/libvirt-ci/lcitool/ansible/playbooks/build/projects/libvirt-dbus.yml b/tests/lcitool/libvirt-ci/lcitool/ansible/playbooks/build/projects/libvirt-dbus.yml
index 79f81abb..273be6a8 100644
--- a/tests/lcitool/libvirt-ci/lcitool/ansible/playbooks/build/projects/libvirt-dbus.yml
+++ b/tests/lcitool/libvirt-ci/lcitool/ansible/playbooks/build/projects/libvirt-dbus.yml
@@ -19,8 +19,8 @@
       - debian-10
       - debian-11
       - debian-sid
-      - fedora-35
       - fedora-36
+      - fedora-37
       - fedora-rawhide
       - opensuse-leap-153
       - opensuse-tumbleweed
diff --git a/tests/lcitool/libvirt-ci/lcitool/ansible/playbooks/build/projects/libvirt-sandbox.yml b/tests/lcitool/libvirt-ci/lcitool/ansible/playbooks/build/projects/libvirt-sandbox.yml
index bcf62b91..231c65bf 100644
--- a/tests/lcitool/libvirt-ci/lcitool/ansible/playbooks/build/projects/libvirt-sandbox.yml
+++ b/tests/lcitool/libvirt-ci/lcitool/ansible/playbooks/build/projects/libvirt-sandbox.yml
@@ -6,8 +6,8 @@
       - debian-10
       - debian-11
       - debian-sid
-      - fedora-35
       - fedora-36
+      - fedora-37
       - fedora-rawhide
       - opensuse-leap-153
       - opensuse-tumbleweed
@@ -25,6 +25,6 @@
 - import_tasks: 'jobs/autotools-rpm-job.yml'
   vars:
     machines:
-      - fedora-35
       - fedora-36
+      - fedora-37
       - fedora-rawhide
diff --git a/tests/lcitool/libvirt-ci/lcitool/ansible/playbooks/build/projects/libvirt-tck.yml b/tests/lcitool/libvirt-ci/lcitool/ansible/playbooks/build/projects/libvirt-tck.yml
index e4beddd7..6451b440 100644
--- a/tests/lcitool/libvirt-ci/lcitool/ansible/playbooks/build/projects/libvirt-tck.yml
+++ b/tests/lcitool/libvirt-ci/lcitool/ansible/playbooks/build/projects/libvirt-tck.yml
@@ -14,6 +14,6 @@
 - import_tasks: 'jobs/perl-modulebuild-rpm-job.yml'
   vars:
     machines:
-      - fedora-35
       - fedora-36
+      - fedora-37
       - fedora-rawhide
diff --git a/tests/lcitool/libvirt-ci/lcitool/ansible/playbooks/build/projects/libvirt.yml b/tests/lcitool/libvirt-ci/lcitool/ansible/playbooks/build/projects/libvirt.yml
index 1b120441..7ec9d975 100644
--- a/tests/lcitool/libvirt-ci/lcitool/ansible/playbooks/build/projects/libvirt.yml
+++ b/tests/lcitool/libvirt-ci/lcitool/ansible/playbooks/build/projects/libvirt.yml
@@ -19,8 +19,8 @@
       - debian-10
       - debian-11
       - debian-sid
-      - fedora-35
       - fedora-36
+      - fedora-37
       - fedora-rawhide
       - opensuse-leap-153
       - opensuse-tumbleweed
diff --git a/tests/lcitool/libvirt-ci/lcitool/ansible/playbooks/build/projects/virt-viewer.yml b/tests/lcitool/libvirt-ci/lcitool/ansible/playbooks/build/projects/virt-viewer.yml
index 209885a1..eda0c0e6 100644
--- a/tests/lcitool/libvirt-ci/lcitool/ansible/playbooks/build/projects/virt-viewer.yml
+++ b/tests/lcitool/libvirt-ci/lcitool/ansible/playbooks/build/projects/virt-viewer.yml
@@ -13,6 +13,6 @@
     # The spec file for virt-viewer requires a very recent version
     # of spice-gtk, so we have to skip this job on older distros
     machines:
-      - fedora-35
       - fedora-36
+      - fedora-37
       - fedora-rawhide
diff --git a/tests/lcitool/libvirt-ci/lcitool/application.py b/tests/lcitool/libvirt-ci/lcitool/application.py
index 97cb0884..090c1b07 100644
--- a/tests/lcitool/libvirt-ci/lcitool/application.py
+++ b/tests/lcitool/libvirt-ci/lcitool/application.py
@@ -13,12 +13,13 @@ from pkg_resources import resource_filename
 from lcitool import util, LcitoolError
 from lcitool.config import Config
 from lcitool.inventory import Inventory
-from lcitool.package import package_names_by_type
+from lcitool.packages import Packages
 from lcitool.projects import Projects
+from lcitool.targets import Targets, BuildTarget
 from lcitool.formatters import DockerfileFormatter, ShellVariablesFormatter, JSONVariablesFormatter, ShellBuildEnvFormatter
-from lcitool.singleton import Singleton
 from lcitool.manifest import Manifest
 
+
 log = logging.getLogger(__name__)
 
 
@@ -44,7 +45,7 @@ class ApplicationError(LcitoolError):
         super().__init__(message, "Application")
 
 
-class Application(metaclass=Singleton):
+class Application:
     def __init__(self):
         # make sure the lcitool cache dir exists
         cache_dir_path = util.get_cache_dir()
@@ -68,11 +69,13 @@ class Application(metaclass=Singleton):
 
         base = resource_filename(__name__, "ansible")
         config = Config()
-        inventory = Inventory()
+        targets = Targets()
+        inventory = Inventory(targets, config)
+        packages = Packages()
         projects = Projects()
 
         hosts_expanded = inventory.expand_hosts(hosts_pattern)
-        projects_expanded = Projects().expand_names(projects_pattern)
+        projects_expanded = projects.expand_names(projects_pattern)
 
         if git_revision is not None:
             tokens = git_revision.split("/")
@@ -102,38 +105,16 @@ class Application(metaclass=Singleton):
         ansible_runner = AnsibleWrapper()
 
         for host in hosts_expanded:
-            facts = inventory.host_facts[host]
-            target = facts["target"]
-
             # packages are evaluated on a target level and since the
             # host->target mapping is N-1, we can skip hosts belonging to a
             # target group for which we already evaluated the package list
-            if target in group_vars:
+            target_name = inventory.get_host_target_name(host)
+            if target_name in group_vars:
                 continue
 
-            # resolve the package mappings to actual package names
-            internal_wanted_projects = ["base", "developer", "vm"]
-            if config.values["install"]["cloud_init"]:
-                internal_wanted_projects.append("cloud-init")
-
-            selected_projects = internal_wanted_projects + projects_expanded
-            pkgs_install = projects.get_packages(selected_projects, facts)
-            pkgs_early_install = projects.get_packages(["early_install"], facts)
-            pkgs_remove = projects.get_packages(["unwanted"], facts)
-            package_names = package_names_by_type(pkgs_install)
-            package_names_remove = package_names_by_type(pkgs_remove)
-            package_names_early_install = package_names_by_type(pkgs_early_install)
-
-            # merge the package lists to the Ansible group vars
-            packages = {}
-            packages["packages"] = package_names["native"]
-            packages["pypi_packages"] = package_names["pypi"]
-            packages["cpan_packages"] = package_names["cpan"]
-            packages["unwanted_packages"] = package_names_remove["native"]
-            packages["early_install_packages"] = package_names_early_install["native"]
-
-            group_vars[target] = packages
-            group_vars[target].update(inventory.target_facts[target])
+            target = BuildTarget(targets, packages, target_name)
+            group_vars[target_name] = inventory.get_group_vars(target, projects,
+                                                               projects_expanded)
 
         ansible_runner.prepare_env(playbookdir=playbook_base,
                                    inventories=[inventory.ansible_inventory],
@@ -149,17 +130,19 @@ class Application(metaclass=Singleton):
     def _action_hosts(self, args):
         self._entrypoint_debug(args)
 
-        inventory = Inventory()
+        config = Config()
+        targets = Targets()
+        inventory = Inventory(targets, config)
         for host in sorted(inventory.hosts):
             print(host)
 
     def _action_targets(self, args):
         self._entrypoint_debug(args)
 
-        inventory = Inventory()
-        for target in sorted(inventory.targets):
+        targets = Targets()
+        for target in sorted(targets.targets):
             if args.containerized:
-                facts = inventory.target_facts[target]
+                facts = targets.target_facts[target]
 
                 if facts["packaging"]["format"] not in ["apk", "deb", "rpm"]:
                     continue
@@ -180,7 +163,9 @@ class Application(metaclass=Singleton):
         self._entrypoint_debug(args)
 
         facts = {}
-        inventory = Inventory()
+        config = Config()
+        targets = Targets()
+        inventory = Inventory(targets, config)
         host = args.host
         target = args.target
 
@@ -193,10 +178,10 @@ class Application(metaclass=Singleton):
                     "to your inventory or use '--target <target>'"
                 )
 
-            if target not in inventory.targets:
+            if target not in targets.targets:
                 raise ApplicationError(f"Unsupported target OS '{target}'")
 
-            facts = inventory.target_facts[target]
+            facts = targets.target_facts[target]
         else:
             if target is not None:
                 raise ApplicationError(
@@ -236,16 +221,19 @@ class Application(metaclass=Singleton):
     def _action_variables(self, args):
         self._entrypoint_debug(args)
 
-        projects_expanded = Projects().expand_names(args.projects)
+        targets = Targets()
+        packages = Packages()
+        projects = Projects()
+        projects_expanded = projects.expand_names(args.projects)
 
         if args.format == "shell":
-            formatter = ShellVariablesFormatter()
+            formatter = ShellVariablesFormatter(projects)
         else:
-            formatter = JSONVariablesFormatter()
+            formatter = JSONVariablesFormatter(projects)
 
-        variables = formatter.format(args.target,
-                                     projects_expanded,
-                                     args.cross_arch)
+        target = BuildTarget(targets, packages, args.target, args.cross_arch)
+        variables = formatter.format(target,
+                                     projects_expanded)
 
         # No comments in json !
         if args.format != "json":
@@ -262,12 +250,16 @@ class Application(metaclass=Singleton):
     def _action_dockerfile(self, args):
         self._entrypoint_debug(args)
 
-        projects_expanded = Projects().expand_names(args.projects)
+        targets = Targets()
+        packages = Packages()
+        projects = Projects()
+        projects_expanded = projects.expand_names(args.projects)
+        target = BuildTarget(targets, packages, args.target, args.cross_arch)
 
-        dockerfile = DockerfileFormatter(args.base,
-                                         args.layers).format(args.target,
-                                                             projects_expanded,
-                                                             args.cross_arch)
+        dockerfile = DockerfileFormatter(projects,
+                                         args.base,
+                                         args.layers).format(target,
+                                                             projects_expanded)
 
         cliargv = [args.action]
         if args.base is not None:
@@ -283,11 +275,14 @@ class Application(metaclass=Singleton):
     def _action_buildenvscript(self, args):
         self._entrypoint_debug(args)
 
-        projects_expanded = Projects().expand_names(args.projects)
+        targets = Targets()
+        packages = Packages()
+        projects = Projects()
+        projects_expanded = projects.expand_names(args.projects)
+        target = BuildTarget(targets, packages, args.target, args.cross_arch)
 
-        buildenvscript = ShellBuildEnvFormatter().format(args.target,
-                                                         projects_expanded,
-                                                         args.cross_arch)
+        buildenvscript = ShellBuildEnvFormatter(projects).format(target,
+                                                                 projects_expanded)
 
         cliargv = [args.action]
         if args.cross_arch:
@@ -302,7 +297,10 @@ class Application(metaclass=Singleton):
         if args.base_dir is not None:
             base_path = Path(args.base_dir)
         ci_path = Path(args.ci_dir)
-        manifest = Manifest(args.manifest, args.quiet, ci_path, base_path)
+        targets = Targets()
+        packages = Packages()
+        projects = Projects()
+        manifest = Manifest(targets, packages, projects, args.manifest, args.quiet, ci_path, base_path)
         manifest.generate(args.dry_run)
 
     def run(self, args):
diff --git a/tests/lcitool/libvirt-ci/lcitool/config.py b/tests/lcitool/libvirt-ci/lcitool/config.py
index b83899b4..d42cda8c 100644
--- a/tests/lcitool/libvirt-ci/lcitool/config.py
+++ b/tests/lcitool/libvirt-ci/lcitool/config.py
@@ -12,7 +12,6 @@ from pathlib import Path
 from pkg_resources import resource_filename
 
 from lcitool import util, LcitoolError
-from lcitool.singleton import Singleton
 
 log = logging.getLogger(__name__)
 
@@ -47,7 +46,7 @@ class ValidationError(ConfigError):
         super().__init__(message)
 
 
-class Config(metaclass=Singleton):
+class Config:
 
     @property
     def values(self):
@@ -60,11 +59,15 @@ class Config(metaclass=Singleton):
 
     def __init__(self):
         self._values = None
+        self._config_file_dir = util.get_config_dir()
         self._config_file_paths = [
-            Path(util.get_config_dir(), fname) for fname in
+            self.get_config_path(fname) for fname in
             ["config.yml", "config.yaml"]
         ]
 
+    def get_config_path(self, *args):
+        return Path(self._config_file_dir, *args)
+
     def _load_config(self):
         # Load the template config containing the defaults first, this must
         # always succeed.
@@ -149,7 +152,7 @@ class Config(metaclass=Singleton):
 
     def _validate(self):
         if self._values is None:
-            paths = ", ".join([str(p) for p in self._config_file_paths()])
+            paths = ", ".join([str(p) for p in self._config_file_paths])
             raise ValidationError(f"Missing or empty configuration file, tried {paths}")
 
         self._validate_section("install", ["root_password"])
diff --git a/tests/lcitool/libvirt-ci/lcitool/configs/kickstart.cfg b/tests/lcitool/libvirt-ci/lcitool/configs/kickstart.cfg
index cc3e103f..51db9963 100644
--- a/tests/lcitool/libvirt-ci/lcitool/configs/kickstart.cfg
+++ b/tests/lcitool/libvirt-ci/lcitool/configs/kickstart.cfg
@@ -38,7 +38,7 @@ rootpw --plaintext root
 # remaining space to the root partition
 ignoredisk --only-use=vda
 zerombr
-clearpart --none
+clearpart --drives=vda --all --disklabel=msdos
 part / --fstype=ext4 --size=2048 --grow
 part swap --fstype=swap --size=256
 
diff --git a/tests/lcitool/libvirt-ci/lcitool/facts/mappings.yml b/tests/lcitool/libvirt-ci/lcitool/facts/mappings.yml
index 06c8032a..c54241e4 100644
--- a/tests/lcitool/libvirt-ci/lcitool/facts/mappings.yml
+++ b/tests/lcitool/libvirt-ci/lcitool/facts/mappings.yml
@@ -1291,7 +1291,6 @@ mappings:
     CentOSStream8: netcf-devel
     Debian10: libnetcf-dev
     Debian11: libnetcf-dev
-    Fedora35: netcf-devel
     Ubuntu1804: libnetcf-dev
     Ubuntu2004: libnetcf-dev
     cross-policy-default: skip
@@ -1639,7 +1638,6 @@ mappings:
 
   publican:
     deb: publican
-    Fedora35: publican
     Fedora36: publican
 
   pulseaudio:
@@ -2203,8 +2201,9 @@ pypi_mappings:
   python3-dbus:
     MacOS: dbus-python
 
+  # higher versions are rejected by python3-sphinx-rtd-theme
   python3-docutils:
-    default: docutils
+    default: docutils<0.18
 
   python3-gobject:
     MacOS: PyGObject
diff --git a/tests/lcitool/libvirt-ci/lcitool/facts/projects/libvirt-php.yml b/tests/lcitool/libvirt-ci/lcitool/facts/projects/libvirt-php.yml
index f10c6894..4dcfde49 100644
--- a/tests/lcitool/libvirt-ci/lcitool/facts/projects/libvirt-php.yml
+++ b/tests/lcitool/libvirt-ci/lcitool/facts/projects/libvirt-php.yml
@@ -10,7 +10,6 @@ packages:
   - libxml2
   - make
   - php
-  - php-imagick
   - pkg-config
   - rpmbuild
   - xmllint
diff --git a/tests/lcitool/libvirt-ci/lcitool/facts/projects/qemu.yml b/tests/lcitool/libvirt-ci/lcitool/facts/projects/qemu.yml
index 425459c5..117307ee 100644
--- a/tests/lcitool/libvirt-ci/lcitool/facts/projects/qemu.yml
+++ b/tests/lcitool/libvirt-ci/lcitool/facts/projects/qemu.yml
@@ -89,6 +89,7 @@ packages:
  - pulseaudio
  - python3
  - python3-PyYAML
+ - python3-docutils
  - python3-numpy
  - python3-opencv
  - python3-pillow
diff --git a/tests/lcitool/libvirt-ci/lcitool/facts/targets/fedora-35.yml b/tests/lcitool/libvirt-ci/lcitool/facts/targets/fedora-37.yml
similarity index 82%
rename from lcitool/facts/targets/fedora-35.yml
rename to lcitool/facts/targets/fedora-37.yml
index 2e8e320c..5513995c 100644
--- a/tests/lcitool/libvirt-ci/lcitool/facts/targets/fedora-35.yml
+++ b/tests/lcitool/libvirt-ci/lcitool/facts/targets/fedora-37.yml
@@ -1,7 +1,7 @@
 ---
 os:
   name: 'Fedora'
-  version: '35'
+  version: '37'
 
 packaging:
   format: 'rpm'
@@ -21,7 +21,7 @@ ansible_python_package: python3
 ansible_python_interpreter: /usr/bin/python3
 
 install:
-  url: https://download.fedoraproject.org/pub/fedora/linux/releases/35/Everything/x86_64/os
+  url: https://download.fedoraproject.org/pub/fedora/linux/releases/37/Everything/x86_64/os
 
 containers:
-  base: registry.fedoraproject.org/fedora:35
+  base: registry.fedoraproject.org/fedora:37
diff --git a/tests/lcitool/libvirt-ci/lcitool/formatters.py b/tests/lcitool/libvirt-ci/lcitool/formatters.py
index 114499a6..f799b466 100644
--- a/tests/lcitool/libvirt-ci/lcitool/formatters.py
+++ b/tests/lcitool/libvirt-ci/lcitool/formatters.py
@@ -7,13 +7,12 @@
 import abc
 import json
 import logging
+import shlex
 
 from pkg_resources import resource_filename
 
 from lcitool import util, LcitoolError
-from lcitool.inventory import Inventory
-from lcitool.projects import Projects
-from lcitool.package import package_names_by_type
+from lcitool.packages import package_names_by_type
 
 
 log = logging.getLogger(__name__)
@@ -50,6 +49,9 @@ class Formatter(metaclass=abc.ABCMeta):
     This an abstract base class that each formatter must subclass.
     """
 
+    def __init__(self, projects):
+        self._projects = projects
+
     @abc.abstractmethod
     def format(self):
         """
@@ -70,28 +72,27 @@ class Formatter(metaclass=abc.ABCMeta):
             return c.read().rstrip()
 
     def _generator_build_varmap(self,
-                                facts,
-                                selected_projects,
-                                cross_arch):
-        projects = Projects()
+                                target,
+                                selected_projects):
+        projects = self._projects
 
         # we need the 'base' internal project here, but packages for internal
         # projects are not resolved via the public API, so it requires special
         # handling
         pkgs = {}
-        pkgs.update(projects.internal_projects["base"].get_packages(facts, cross_arch))
+        pkgs.update(projects.internal["base"].get_packages(target))
 
         # we can now load packages for the rest of the projects
-        pkgs.update(projects.get_packages(selected_projects, facts, cross_arch))
+        pkgs.update(projects.get_packages(selected_projects, target))
         package_names = package_names_by_type(pkgs)
 
         varmap = {
-            "packaging_command": facts["packaging"]["command"],
-            "paths_ccache": facts["paths"]["ccache"],
-            "paths_make": facts["paths"]["make"],
-            "paths_ninja": facts["paths"]["ninja"],
-            "paths_python": facts["paths"]["python"],
-            "paths_pip3": facts["paths"]["pip3"],
+            "packaging_command": target.facts["packaging"]["command"],
+            "paths_ccache": target.facts["paths"]["ccache"],
+            "paths_make": target.facts["paths"]["make"],
+            "paths_ninja": target.facts["paths"]["ninja"],
+            "paths_python": target.facts["paths"]["python"],
+            "paths_pip3": target.facts["paths"]["pip3"],
 
             "cross_arch": None,
             "cross_abi": None,
@@ -104,44 +105,22 @@ class Formatter(metaclass=abc.ABCMeta):
             "cpan_pkgs": package_names["cpan"],
         }
 
-        if cross_arch:
-            varmap["cross_arch"] = cross_arch
-            varmap["cross_abi"] = util.native_arch_to_abi(cross_arch)
+        if target.cross_arch:
+            varmap["cross_arch"] = target.cross_arch
+            varmap["cross_abi"] = util.native_arch_to_abi(target.cross_arch)
 
-            if facts["packaging"]["format"] == "deb":
-                cross_arch_deb = util.native_arch_to_deb_arch(cross_arch)
+            if target.facts["packaging"]["format"] == "deb":
+                cross_arch_deb = util.native_arch_to_deb_arch(target.cross_arch)
                 varmap["cross_arch_deb"] = cross_arch_deb
 
         log.debug(f"Generated varmap: {varmap}")
         return varmap
 
-    def _generator_prepare(self, target, selected_projects, cross_arch):
-        log.debug(f"Generating varmap for "
-                  f"target='{target}', "
-                  f"projects='{selected_projects}', "
-                  f"cross_arch='{cross_arch}'")
-
-        name = self.__class__.__name__.lower()
-
-        try:
-            facts = Inventory().target_facts[target]
-        except KeyError:
-            raise FormatterError(f"Invalid target '{target}'")
-
-        # We can only generate Dockerfiles for Linux
-        if (name == "dockerfileformatter" and
-            facts["packaging"]["format"] not in ["apk", "deb", "rpm"]):
-            raise FormatterError(f"Target {target} doesn't support this generator")
-
-        varmap = self._generator_build_varmap(facts,
-                                              selected_projects,
-                                              cross_arch)
-        return facts, cross_arch, varmap
-
 
 class BuildEnvFormatter(Formatter):
 
-    def __init__(self, indent=0, pkgcleanup=False, nosync=False):
+    def __init__(self, inventory, indent=0, pkgcleanup=False, nosync=False):
+        super().__init__(inventory)
         self._indent = indent
         self._pkgcleanup = pkgcleanup
         self._nosync = nosync
@@ -151,23 +130,22 @@ class BuildEnvFormatter(Formatter):
             return strings[0]
 
         align = " \\\n" + (" " * (self._indent + len(command + " ")))
+        strings = [shlex.quote(x) for x in strings]
         return align[1:] + align.join(strings)
 
     def _generator_build_varmap(self,
-                                facts,
-                                selected_projects,
-                                cross_arch):
-        varmap = super()._generator_build_varmap(facts,
-                                                 selected_projects,
-                                                 cross_arch)
+                                target,
+                                selected_projects):
+        varmap = super()._generator_build_varmap(target,
+                                                 selected_projects)
 
         varmap["nosync"] = ""
         if self._nosync:
-            if facts["packaging"]["format"] == "deb":
+            if target.facts["packaging"]["format"] == "deb":
                 varmap["nosync"] = "eatmydata "
-            elif facts["packaging"]["format"] == "rpm" and facts["os"]["name"] == "Fedora":
+            elif target.facts["packaging"]["format"] == "rpm" and target.facts["os"]["name"] == "Fedora":
                 varmap["nosync"] = "nosync "
-            elif facts["packaging"]["format"] == "apk":
+            elif target.facts["packaging"]["format"] == "apk":
                 # TODO: 'libeatmydata' package is present in 'testing' repo
                 # for Alpine Edge. Once it graduates to 'main' repo we
                 # should use it here, and see later comment about adding
@@ -176,14 +154,14 @@ class BuildEnvFormatter(Formatter):
                 pass
 
         nosync = varmap["nosync"]
-        varmap["pkgs"] = self._align(nosync + facts["packaging"]["command"],
+        varmap["pkgs"] = self._align(nosync + target.facts["packaging"]["command"],
                                      varmap["pkgs"])
 
         if varmap["cross_pkgs"]:
-            varmap["cross_pkgs"] = self._align(nosync + facts["packaging"]["command"],
+            varmap["cross_pkgs"] = self._align(nosync + target.facts["packaging"]["command"],
                                                varmap["cross_pkgs"])
         if varmap["pypi_pkgs"]:
-            varmap["pypi_pkgs"] = self._align(nosync + facts["paths"]["pip3"],
+            varmap["pypi_pkgs"] = self._align(nosync + target.facts["paths"]["pip3"],
                                               varmap["pypi_pkgs"])
         if varmap["cpan_pkgs"]:
             varmap["cpan_pkgs"] = self._align(nosync + "cpanm",
@@ -191,7 +169,7 @@ class BuildEnvFormatter(Formatter):
 
         return varmap
 
-    def _format_commands_ccache(self, cross_arch, varmap):
+    def _format_commands_ccache(self, target, varmap):
         commands = []
         compilers = set()
 
@@ -213,14 +191,15 @@ class BuildEnvFormatter(Formatter):
             ])
 
             for compiler in sorted(compilers):
-                if cross_arch:
+                if target.cross_arch:
                     compiler = "{cross_abi}-" + compiler
                 commands.extend([
                     "ln -s {paths_ccache} /usr/libexec/ccache-wrappers/" + compiler,
                 ])
         return commands
 
-    def _format_commands_pkglist(self, facts):
+    def _format_commands_pkglist(self, target):
+        facts = target.facts
         commands = []
         if facts["packaging"]["format"] == "apk":
             commands.extend(["apk list | sort > /packages.txt"])
@@ -232,7 +211,8 @@ class BuildEnvFormatter(Formatter):
             commands.extend(["rpm -qa | sort > /packages.txt"])
         return commands
 
-    def _format_commands_native(self, facts, cross_arch, varmap):
+    def _format_commands_native(self, target, varmap):
+        facts = target.facts
         commands = []
         osname = facts["os"]["name"]
         osversion = facts["os"]["version"]
@@ -356,9 +336,9 @@ class BuildEnvFormatter(Formatter):
                         "{nosync}{packaging_command} clean all -y",
                     ])
 
-        if not cross_arch:
-            commands.extend(self._format_commands_pkglist(facts))
-            commands.extend(self._format_commands_ccache(None, varmap))
+        if not target.cross_arch:
+            commands.extend(self._format_commands_pkglist(target))
+            commands.extend(self._format_commands_ccache(target, varmap))
 
         commands = [c.format(**varmap) for c in commands]
 
@@ -386,7 +366,8 @@ class BuildEnvFormatter(Formatter):
 
         return env
 
-    def _format_commands_foreign(self, facts, cross_arch, varmap):
+    def _format_commands_foreign(self, target, varmap):
+        facts = target.facts
         cross_commands = []
 
         if facts["packaging"]["format"] == "deb":
@@ -394,7 +375,7 @@ class BuildEnvFormatter(Formatter):
                 "export DEBIAN_FRONTEND=noninteractive",
                 "dpkg --add-architecture {cross_arch_deb}",
             ])
-            if cross_arch == "riscv64":
+            if target.cross_arch == "riscv64":
                 cross_commands.extend([
                     "{nosync}{packaging_command} install debian-ports-archive-keyring",
                     "{nosync}echo 'deb http://ftp.ports.debian.org/debian-ports/ sid main' > /etc/apt/sources.list.d/ports.list",
@@ -420,7 +401,7 @@ class BuildEnvFormatter(Formatter):
                     "{nosync}{packaging_command} clean all -y",
                 ])
 
-        if not cross_arch.startswith("mingw"):
+        if not target.cross_arch.startswith("mingw"):
             cross_commands.extend([
                 "mkdir -p /usr/local/share/meson/cross",
                 "echo \"{cross_meson}\" > /usr/local/share/meson/cross/{cross_abi}",
@@ -429,14 +410,14 @@ class BuildEnvFormatter(Formatter):
             cross_meson = self._get_meson_cross(varmap["cross_abi"])
             varmap["cross_meson"] = cross_meson.replace("\n", "\\n\\\n")
 
-        cross_commands.extend(self._format_commands_pkglist(facts))
-        cross_commands.extend(self._format_commands_ccache(cross_arch, varmap))
+        cross_commands.extend(self._format_commands_pkglist(target))
+        cross_commands.extend(self._format_commands_ccache(target, varmap))
 
         cross_commands = [c.format(**varmap) for c in cross_commands]
 
         return cross_commands
 
-    def _format_env_foreign(self, cross_arch, varmap):
+    def _format_env_foreign(self, target, varmap):
         env = {}
         env["ABI"] = varmap["cross_abi"]
 
@@ -444,7 +425,7 @@ class BuildEnvFormatter(Formatter):
             env["CONFIGURE_OPTS"] = "--host=" + varmap["cross_abi"]
 
         if "meson" in varmap["mappings"]:
-            if cross_arch.startswith("mingw"):
+            if target.cross_arch.startswith("mingw"):
                 env["MESON_OPTS"] = "--cross-file=/usr/share/mingw/toolchain-" + varmap["cross_arch"] + ".meson"
             else:
                 env["MESON_OPTS"] = "--cross-file=" + varmap["cross_abi"]
@@ -454,8 +435,9 @@ class BuildEnvFormatter(Formatter):
 
 class DockerfileFormatter(BuildEnvFormatter):
 
-    def __init__(self, base=None, layers="all"):
-        super().__init__(indent=len("RUN "),
+    def __init__(self, inventory, base=None, layers="all"):
+        super().__init__(inventory,
+                         indent=len("RUN "),
                          pkgcleanup=True,
                          nosync=True)
         self._base = base
@@ -469,17 +451,17 @@ class DockerfileFormatter(BuildEnvFormatter):
             lines.append(f"\nENV {key} \"{val}\"")
         return "".join(lines)
 
-    def _format_section_base(self, facts):
+    def _format_section_base(self, target):
         strings = []
         if self._base:
             base = self._base
         else:
-            base = facts["containers"]["base"]
+            base = target.facts["containers"]["base"]
         strings.append(f"FROM {base}")
         return strings
 
-    def _format_section_native(self, facts, cross_arch, varmap):
-        groups = self._format_commands_native(facts, cross_arch, varmap)
+    def _format_section_native(self, target, varmap):
+        groups = self._format_commands_native(target, varmap)
 
         strings = []
         for commands in groups:
@@ -489,25 +471,25 @@ class DockerfileFormatter(BuildEnvFormatter):
         strings.append(self._format_env(env))
         return strings
 
-    def _format_section_foreign(self, facts, cross_arch, varmap):
-        commands = self._format_commands_foreign(facts, cross_arch, varmap)
+    def _format_section_foreign(self, target, varmap):
+        commands = self._format_commands_foreign(target, varmap)
 
         strings = ["\nRUN " + " && \\\n    ".join(commands)]
 
-        env = self._format_env_foreign(cross_arch, varmap)
+        env = self._format_env_foreign(target, varmap)
         strings.append(self._format_env(env))
         return strings
 
-    def _format_dockerfile(self, target, project, facts, cross_arch, varmap):
+    def _format_dockerfile(self, target, project, varmap):
         strings = []
-        strings.extend(self._format_section_base(facts))
+        strings.extend(self._format_section_base(target))
         if self._layers in ["all", "native"]:
-            strings.extend(self._format_section_native(facts, cross_arch, varmap))
-        if cross_arch and self._layers in ["all", "foreign"]:
-            strings.extend(self._format_section_foreign(facts, cross_arch, varmap))
+            strings.extend(self._format_section_native(target, varmap))
+        if target.cross_arch and self._layers in ["all", "foreign"]:
+            strings.extend(self._format_section_foreign(target, varmap))
         return strings
 
-    def format(self, target, selected_projects, cross_arch):
+    def format(self, target, selected_projects):
         """
         Generates and formats a Dockerfile.
 
@@ -521,17 +503,18 @@ class DockerfileFormatter(BuildEnvFormatter):
         """
 
         log.debug(f"Generating Dockerfile for projects '{selected_projects}' "
-                  f"on target '{target}' (cross_arch={cross_arch})")
+                  f"on target {target}")
+
+        # We can only generate Dockerfiles for Linux
+        if (target.facts["packaging"]["format"] not in ["apk", "deb", "rpm"]):
+            raise DockerfileError(f"Target {target} doesn't support this generator")
 
         try:
-            facts, cross_arch, varmap = self._generator_prepare(target,
-                                                                selected_projects,
-                                                                cross_arch)
+            varmap = self._generator_build_varmap(target, selected_projects)
         except FormatterError as ex:
             raise DockerfileError(str(ex))
 
-        return '\n'.join(self._format_dockerfile(target, selected_projects,
-                                                 facts, cross_arch, varmap))
+        return '\n'.join(self._format_dockerfile(target, selected_projects, varmap))
 
 
 class VariablesFormatter(Formatter):
@@ -559,7 +542,7 @@ class VariablesFormatter(Formatter):
     def _format_variables(varmap):
         pass
 
-    def format(self, target, selected_projects, cross_arch):
+    def format(self, target, selected_projects):
         """
         Generates and formats environment variables as KEY=VAL pairs.
 
@@ -572,12 +555,10 @@ class VariablesFormatter(Formatter):
         """
 
         log.debug(f"Generating variables for projects '{selected_projects} on "
-                  f"target '{target}' (cross_arch={cross_arch})")
+                  f"target {target}")
 
         try:
-            _, _, varmap = self._generator_prepare(target,
-                                                   selected_projects,
-                                                   cross_arch)
+            varmap = self._generator_build_varmap(target, selected_projects)
         except FormatterError as ex:
             raise VariablesError(str(ex))
 
@@ -608,8 +589,9 @@ class JSONVariablesFormatter(VariablesFormatter):
 
 class ShellBuildEnvFormatter(BuildEnvFormatter):
 
-    def __init__(self, base=None, layers="all"):
-        super().__init__(indent=len("    "),
+    def __init__(self, inventory, base=None, layers="all"):
+        super().__init__(inventory,
+                         indent=len("    "),
                          pkgcleanup=False,
                          nosync=False)
 
@@ -621,25 +603,25 @@ class ShellBuildEnvFormatter(BuildEnvFormatter):
             exp.append(f"export {key}=\"{val}\"")
         return "\n" + "\n".join(exp)
 
-    def _format_buildenv(self, target, project, facts, cross_arch, varmap):
+    def _format_buildenv(self, target, project, varmap):
         strings = [
             "function install_buildenv() {",
         ]
-        groups = self._format_commands_native(facts, cross_arch, varmap)
+        groups = self._format_commands_native(target, varmap)
         for commands in groups:
             strings.extend(["    " + c for c in commands])
-        if cross_arch:
-            for command in self._format_commands_foreign(facts, cross_arch, varmap):
+        if target.cross_arch:
+            for command in self._format_commands_foreign(target, varmap):
                 strings.append("    " + command)
         strings.append("}")
 
         strings.append(self._format_env(self._format_env_native(varmap)))
-        if cross_arch:
+        if target.cross_arch:
             strings.append(self._format_env(
-                self._format_env_foreign(cross_arch, varmap)))
+                self._format_env_foreign(target, varmap)))
         return strings
 
-    def format(self, target, selected_projects, cross_arch):
+    def format(self, target, selected_projects):
         """
         Generates and formats a Shell script for preparing a build env.
 
@@ -653,14 +635,11 @@ class ShellBuildEnvFormatter(BuildEnvFormatter):
         """
 
         log.debug(f"Generating Shell Build Env for projects '{selected_projects}' "
-                  f"on target '{target}' (cross_arch={cross_arch})")
+                  f"on target {target}")
 
         try:
-            facts, cross_arch, varmap = self._generator_prepare(target,
-                                                                selected_projects,
-                                                                cross_arch)
+            varmap = self._generator_build_varmap(target, selected_projects)
         except FormatterError as ex:
             raise ShellBuildEnvError(str(ex))
 
-        return '\n'.join(self._format_buildenv(target, selected_projects,
-                                               facts, cross_arch, varmap))
+        return '\n'.join(self._format_buildenv(target, selected_projects, varmap))
diff --git a/tests/lcitool/libvirt-ci/lcitool/inventory.py b/tests/lcitool/libvirt-ci/lcitool/inventory.py
index 9752ca60..fe2b929b 100644
--- a/tests/lcitool/libvirt-ci/lcitool/inventory.py
+++ b/tests/lcitool/libvirt-ci/lcitool/inventory.py
@@ -4,15 +4,10 @@
 #
 # SPDX-License-Identifier: GPL-2.0-or-later
 
-import copy
 import logging
-import yaml
-
-from pathlib import Path
-from pkg_resources import resource_filename
 
 from lcitool import util, LcitoolError
-from lcitool.singleton import Singleton
+from lcitool.packages import package_names_by_type
 
 log = logging.getLogger(__name__)
 
@@ -24,7 +19,7 @@ class InventoryError(LcitoolError):
         super().__init__(message, "Inventory")
 
 
-class Inventory(metaclass=Singleton):
+class Inventory():
 
     @property
     def ansible_inventory(self):
@@ -32,16 +27,6 @@ class Inventory(metaclass=Singleton):
             self._ansible_inventory = self._get_ansible_inventory()
         return self._ansible_inventory
 
-    @property
-    def target_facts(self):
-        if self._target_facts is None:
-            self._target_facts = self._load_target_facts()
-        return self._target_facts
-
-    @property
-    def targets(self):
-        return list(self.target_facts.keys())
-
     @property
     def host_facts(self):
         if self._host_facts is None:
@@ -52,22 +37,18 @@ class Inventory(metaclass=Singleton):
     def hosts(self):
         return list(self.host_facts.keys())
 
-    def __init__(self):
-        self._target_facts = None
+    def __init__(self, targets, config):
+        self._targets = targets
+        self._config = config
         self._host_facts = None
         self._ansible_inventory = None
 
-    @staticmethod
-    def _read_facts_from_file(yaml_path):
-        log.debug(f"Loading facts from '{yaml_path}'")
-        with open(yaml_path, "r") as infile:
-            return yaml.safe_load(infile)
-
     def _get_ansible_inventory(self):
         from lcitool.ansible_wrapper import AnsibleWrapper, AnsibleWrapperError
 
         inventory_sources = []
-        inventory_path = Path(util.get_config_dir(), "inventory")
+        inventory_path = self._config.get_config_path("inventory")
+        log.debug(f"Using '{inventory_path}' for lcitool inventory")
         if inventory_path.exists():
             inventory_sources.append(inventory_path)
 
@@ -76,7 +57,7 @@ class Inventory(metaclass=Singleton):
 
         ansible_runner = AnsibleWrapper()
         ansible_runner.prepare_env(inventories=inventory_sources,
-                                   group_vars=self.target_facts)
+                                   group_vars=self._targets.target_facts)
 
         log.debug(f"Running ansible-inventory on '{inventory_sources}'")
         try:
@@ -100,56 +81,6 @@ class Inventory(metaclass=Singleton):
 
         return inventory
 
-    @staticmethod
-    def _validate_target_facts(target_facts, target):
-        fname = target + ".yml"
-
-        actual_osname = target_facts["os"]["name"].lower()
-        if not target.startswith(actual_osname + "-"):
-            raise InventoryError(f'OS name "{target_facts["os"]["name"]}" does not match file name {fname}')
-        target = target[len(actual_osname) + 1:]
-
-        actual_version = target_facts["os"]["version"].lower()
-        expected_version = target.replace("-", "")
-        if expected_version != actual_version:
-            raise InventoryError(f'OS version "{target_facts["os"]["version"]}" does not match version in file name {fname} ({expected_version})')
-
-    def _load_target_facts(self):
-        def merge_dict(source, dest):
-            for key in source.keys():
-                if key not in dest:
-                    dest[key] = copy.deepcopy(source[key])
-                    continue
-
-                if isinstance(source[key], list) or isinstance(dest[key], list):
-                    raise InventoryError("cannot merge lists")
-                if isinstance(source[key], dict) != isinstance(dest[key], dict):
-                    raise InventoryError("cannot merge dictionaries with non-dictionaries")
-                if isinstance(source[key], dict):
-                    merge_dict(source[key], dest[key])
-
-        facts = {}
-        targets_path = Path(resource_filename(__name__, "facts/targets/"))
-        targets_all_path = Path(targets_path, "all.yml")
-
-        # first load the shared facts from targets/all.yml
-        shared_facts = self._read_facts_from_file(targets_all_path)
-
-        # then load the rest of the facts
-        for entry in targets_path.iterdir():
-            if not entry.is_file() or entry.suffix != ".yml" or entry.name == "all.yml":
-                continue
-
-            target = entry.stem
-            facts[target] = self._read_facts_from_file(entry)
-            self._validate_target_facts(facts[target], target)
-            facts[target]["target"] = target
-
-            # missing per-distro facts fall back to shared facts
-            merge_dict(shared_facts, facts[target])
-
-        return facts
-
     def _load_host_facts(self):
         facts = {}
         groups = {}
@@ -184,7 +115,7 @@ class Inventory(metaclass=Singleton):
 
         _rec(self.ansible_inventory["all"], "all")
 
-        targets = set(self.targets)
+        targets = set(self._targets.targets)
         for host_name, host_groups in groups.items():
             host_targets = host_groups.intersection(targets)
 
@@ -209,3 +140,29 @@ class Inventory(metaclass=Singleton):
         except Exception as ex:
             log.debug(f"Failed to load expand '{pattern}'")
             raise InventoryError(f"Failed to expand '{pattern}': {ex}")
+
+    def get_host_target_name(self, host):
+        return self.host_facts[host]["target"]
+
+    def get_group_vars(self, target, projects, projects_expanded):
+        # resolve the package mappings to actual package names
+        internal_wanted_projects = ["base", "developer", "vm"]
+        if self._config.values["install"]["cloud_init"]:
+            internal_wanted_projects.append("cloud-init")
+
+        selected_projects = internal_wanted_projects + projects_expanded
+        pkgs_install = projects.get_packages(selected_projects, target)
+        pkgs_early_install = projects.get_packages(["early_install"], target)
+        pkgs_remove = projects.get_packages(["unwanted"], target)
+        package_names = package_names_by_type(pkgs_install)
+        package_names_remove = package_names_by_type(pkgs_remove)
+        package_names_early_install = package_names_by_type(pkgs_early_install)
+
+        # merge the package lists to the Ansible group vars
+        group_vars = dict(target.facts)
+        group_vars["packages"] = package_names["native"]
+        group_vars["pypi_packages"] = package_names["pypi"]
+        group_vars["cpan_packages"] = package_names["cpan"]
+        group_vars["unwanted_packages"] = package_names_remove["native"]
+        group_vars["early_install_packages"] = package_names_early_install["native"]
+        return group_vars
diff --git a/tests/lcitool/libvirt-ci/lcitool/manifest.py b/tests/lcitool/libvirt-ci/lcitool/manifest.py
index c4cb8c0a..2ad53582 100644
--- a/tests/lcitool/libvirt-ci/lcitool/manifest.py
+++ b/tests/lcitool/libvirt-ci/lcitool/manifest.py
@@ -9,8 +9,8 @@ import yaml
 from pathlib import Path
 
 from lcitool.formatters import DockerfileFormatter, ShellVariablesFormatter, ShellBuildEnvFormatter
-from lcitool.inventory import Inventory
 from lcitool import gitlab, util, LcitoolError
+from lcitool.targets import BuildTarget
 
 log = logging.getLogger(__name__)
 
@@ -24,7 +24,10 @@ class ManifestError(LcitoolError):
 
 class Manifest:
 
-    def __init__(self, configfp, quiet=False, cidir=Path("ci"), basedir=None):
+    def __init__(self, targets, packages, projects, configfp, quiet=False, cidir=Path("ci"), basedir=None):
+        self._targets = targets
+        self._packages = packages
+        self._projects = projects
         self.configpath = configfp.name
         self.values = yaml.safe_load(configfp)
         self.quiet = quiet
@@ -88,7 +91,6 @@ class Manifest:
             targets = self.values["targets"] = {}
         have_containers = False
         have_cirrus = False
-        inventory = Inventory()
         for target, targetinfo in targets.items():
             if type(targetinfo) == str:
                 targets[target] = {"jobs": [{"arch": targetinfo}]}
@@ -99,7 +101,7 @@ class Manifest:
             jobsinfo = targetinfo["jobs"]
 
             try:
-                facts = inventory.target_facts[target]
+                facts = self._targets.target_facts[target]
             except KeyError:
                 raise ValueError(f"Invalid target '{target}'")
 
@@ -205,27 +207,26 @@ class Manifest:
                 if not dryrun:
                     header = util.generate_file_header(["manifest",
                                                         self.configpath])
-                    payload = formatter.format(target,
-                                               wantprojects,
-                                               arch)
+                    payload = formatter.format(BuildTarget(self._targets, self._packages, target, arch),
+                                               wantprojects)
                     util.atomic_write(filename, header + payload + "\n")
 
         return generated
 
     def _generate_containers(self, dryrun):
-        formatter = DockerfileFormatter()
+        formatter = DockerfileFormatter(self._projects)
         return self._generate_formatter(dryrun,
                                         "containers", "Dockerfile",
                                         formatter, "containers")
 
     def _generate_cirrus(self, dryrun):
-        formatter = ShellVariablesFormatter()
+        formatter = ShellVariablesFormatter(self._projects)
         return self._generate_formatter(dryrun,
                                         "cirrus", "vars",
                                         formatter, "cirrus")
 
     def _generate_buildenv(self, dryrun):
-        formatter = ShellBuildEnvFormatter()
+        formatter = ShellBuildEnvFormatter(self._projects)
         return self._generate_formatter(dryrun,
                                         "buildenv", "sh",
                                         formatter, "containers")
@@ -416,7 +417,6 @@ class Manifest:
 
     def _generate_build_jobs(self, targettype, cross, jobfunc):
         jobs = []
-        inventory = Inventory()
         for target, targetinfo in self.values["targets"].items():
             if not targetinfo["enabled"]:
                 continue
@@ -424,7 +424,7 @@ class Manifest:
                 continue
 
             try:
-                facts = inventory.target_facts[target]
+                facts = self._targets.target_facts[target]
             except KeyError:
                 raise ManifestError(f"Invalid target '{target}'")
 
diff --git a/tests/lcitool/libvirt-ci/lcitool/package.py b/tests/lcitool/libvirt-ci/lcitool/packages.py
similarity index 61%
rename from lcitool/package.py
rename to lcitool/packages.py
index d267e2f2..6f3c3139 100644
--- a/tests/lcitool/libvirt-ci/lcitool/package.py
+++ b/tests/lcitool/libvirt-ci/lcitool/packages.py
@@ -30,7 +30,7 @@ Exported classes:
     - CrossPackage
     - PyPIPackage
     - CPANPackage
-    - PackageFactory
+    - Packages
 
 Exported functions:
     - package_names_by_type
@@ -39,6 +39,9 @@ Exported functions:
 
 import abc
 import logging
+import yaml
+
+from pkg_resources import resource_filename
 
 from lcitool import util, LcitoolError
 
@@ -90,8 +93,8 @@ class Package(metaclass=abc.ABCMeta):
         - PyPIPackage
         - CPANPackage
 
-    Do not instantiate any of the specific package subclasses, instead, use an
-    instance of the PackageFactory class which does that for you transparently.
+    Do not instantiate any of the specific package subclasses, instead, use
+    the Packages class which does that for you transparently.
     Then use this public interface to interact with the instance itself.
 
     Attributes:
@@ -99,7 +102,7 @@ class Package(metaclass=abc.ABCMeta):
         :ivar mapping: the generic package name that will resolve to @name
     """
 
-    def __init__(self, pkg_mapping):
+    def __init__(self, mappings, pkg_mapping, keys, target):
         """
         Initialize the package with a generic package name
 
@@ -107,9 +110,11 @@ class Package(metaclass=abc.ABCMeta):
         """
 
         self.mapping = pkg_mapping
-        self.name = None
+        self.name = self._eval(mappings, target, keys)
+        if self.name is None:
+            raise PackageEval(f"No mapping for '{pkg_mapping}'")
 
-    def _eval(self, mappings, keys=["default"]):
+    def _eval(self, mappings, target, keys):
         """
         Resolves package mapping to the actual name of the package.
 
@@ -138,32 +143,26 @@ class CrossPackage(Package):
     def __init__(self,
                  mappings,
                  pkg_mapping,
-                 pkg_format,
                  base_keys,
-                 cross_arch):
-
-        super().__init__(pkg_mapping)
-
-        self.name = self._eval(mappings, pkg_format, base_keys, cross_arch)
-        if self.name is None:
-            raise PackageEval(f"No mapping for '{pkg_mapping}'")
-
-    def _eval(self, mappings, pkg_format, base_keys, cross_arch):
-        cross_keys = ["cross-" + cross_arch + "-" + k for k in base_keys]
+                 target):
+        cross_keys = ["cross-" + target.cross_arch + "-" + k for k in base_keys]
 
-        if pkg_format == "deb":
+        if target.facts["packaging"]["format"] == "deb":
             # For Debian-based distros, the name of the foreign package
             # is usually the same as the native package, but there might
             # be architecture-specific overrides, so we have to look both
             # at the neutral keys and at the specific ones
-            arch_keys = [cross_arch + "-" + k for k in base_keys]
+            arch_keys = [target.cross_arch + "-" + k for k in base_keys]
             cross_keys.extend(arch_keys + base_keys)
 
-        pkg_name = super()._eval(mappings, keys=cross_keys)
+        super().__init__(mappings, pkg_mapping, cross_keys, target)
+
+    def _eval(self, mappings, target, keys):
+        pkg_name = super()._eval(mappings, target, keys)
         if pkg_name is None:
             return None
 
-        if pkg_format == "deb":
+        if target.facts["packaging"]["format"] == "deb":
             # For Debian-based distros, the name of the foreign package
             # is obtained by appending the foreign architecture (in
             # Debian format) to the name of the native package.
@@ -171,7 +170,7 @@ class CrossPackage(Package):
             # The exception to this is cross-compilers, where we have
             # to install the package for the native architecture in
             # order to be able to build for the foreign architecture
-            cross_arch_deb = util.native_arch_to_deb_arch(cross_arch)
+            cross_arch_deb = util.native_arch_to_deb_arch(target.cross_arch)
             if self.mapping not in ["gcc", "g++"]:
                 pkg_name = pkg_name + ":" + cross_arch_deb
         return pkg_name
@@ -182,154 +181,150 @@ class NativePackage(Package):
     def __init__(self,
                  mappings,
                  pkg_mapping,
-                 base_keys):
-
-        super().__init__(pkg_mapping)
-
-        self.name = self._eval(mappings, base_keys)
-        if self.name is None:
-            raise PackageEval(f"No mapping for '{pkg_mapping}'")
-
-    def _eval(self, mappings, base_keys):
+                 base_keys,
+                 target):
         native_arch = util.get_native_arch()
         native_keys = [native_arch + "-" + k for k in base_keys] + base_keys
-
-        return super()._eval(mappings, keys=native_keys)
+        super().__init__(mappings, pkg_mapping, native_keys, target)
 
 
 class PyPIPackage(Package):
-
-    def __init__(self,
-                 mappings,
-                 pkg_mapping,
-                 base_keys):
-
-        super().__init__(pkg_mapping)
-
-        self.name = self._eval(mappings, keys=base_keys)
-        if self.name is None:
-            raise PackageEval(f"No mapping for '{pkg_mapping}'")
+    pass
 
 
 class CPANPackage(Package):
-
-    def __init__(self,
-                 mappings,
-                 pkg_mapping,
-                 base_keys):
-
-        super().__init__(pkg_mapping)
-
-        self.name = self._eval(mappings, keys=base_keys)
-        if self.name is None:
-            raise PackageEval(f"No mapping for '{pkg_mapping}'")
+    pass
 
 
-class PackageFactory:
+class Packages:
     """
-    Factory producing Package instances.
-
-    Creates Package class instances based on the generic package mapping name
-    which will be resolved to the actual package name the moment a Package
-    instance is created by this factory.
+    Database of package mappings.  Package class representing the actual
+    package name are created based on the generic package mapping.
 
     """
 
-    def __init__(self, mappings, facts):
-        """
-        Initialize package factory model.
-
-        :param mappings: dictionary of ALL existing package mappings, i.e.
-                         including Python and CPAN ones
-        :param facts: dictionary of target OS facts
-        """
-
-        def _generate_base_keys(facts):
-            base_keys = [
-                # keys are ordered by priority
-                facts["os"]["name"] + facts["os"]["version"],
-                facts["os"]["name"],
-                facts["packaging"]["format"],
-                "default"
-            ]
-            return base_keys
-
-        self._mappings = mappings["mappings"]
-        self._pypi_mappings = mappings["pypi_mappings"]
-        self._cpan_mappings = mappings["cpan_mappings"]
-        self._facts = facts
-        self._base_keys = _generate_base_keys(facts)
-
-    def _get_cross_policy(self, pkg_mapping):
-        for k in ["cross-policy-" + k for k in self._base_keys]:
-            if k in self._mappings[pkg_mapping]:
-                cross_policy = self._mappings[pkg_mapping][k]
+    def __init__(self):
+        self._mappings = None
+        self._pypi_mappings = None
+        self._cpan_mappings = None
+
+    @staticmethod
+    def _base_keys(target):
+        return [
+            target.facts["os"]["name"] + target.facts["os"]["version"],
+            target.facts["os"]["name"],
+            target.facts["packaging"]["format"],
+            "default"
+        ]
+
+    def _get_cross_policy(self, pkg_mapping, target):
+        base_keys = self._base_keys(target)
+        for k in ["cross-policy-" + k for k in base_keys]:
+            if k in self.mappings[pkg_mapping]:
+                cross_policy = self.mappings[pkg_mapping][k]
                 if cross_policy not in ["native", "foreign", "skip"]:
                     raise Exception(
                         f"Unexpected cross arch policy {cross_policy} for "
                         f"{pkg_mapping}"
                     )
                 return cross_policy
-        return None
+        return "native"
 
-    def _get_native_package(self, pkg_mapping):
-        return NativePackage(self._mappings, pkg_mapping, self._base_keys)
+    def _get_native_package(self, pkg_mapping, target):
+        base_keys = self._base_keys(target)
+        return NativePackage(self.mappings, pkg_mapping, base_keys, target)
 
-    def _get_pypi_package(self, pkg_mapping):
-        return PyPIPackage(self._pypi_mappings, pkg_mapping, self._base_keys)
+    def _get_pypi_package(self, pkg_mapping, target):
+        base_keys = self._base_keys(target)
+        return PyPIPackage(self.pypi_mappings, pkg_mapping, base_keys, target)
 
-    def _get_cpan_package(self, pkg_mapping):
-        return CPANPackage(self._cpan_mappings, pkg_mapping, self._base_keys)
+    def _get_cpan_package(self, pkg_mapping, target):
+        base_keys = self._base_keys(target)
+        return CPANPackage(self.cpan_mappings, pkg_mapping, base_keys, target)
 
-    def _get_noncross_package(self, pkg_mapping):
+    def _get_noncross_package(self, pkg_mapping, target):
         package_resolvers = [self._get_native_package,
                              self._get_pypi_package,
                              self._get_cpan_package]
 
         for resolver in package_resolvers:
             try:
-                return resolver(pkg_mapping)
+                return resolver(pkg_mapping, target)
             except PackageEval:
                 continue
 
         # This package doesn't exist on the given platform
         return None
 
-    def _get_cross_package(self, pkg_mapping, cross_arch):
+    def _get_cross_package(self, pkg_mapping, target):
 
         # query the cross policy for the mapping to see whether we need
         # a cross- or non-cross version of a package
-        cross_policy = self._get_cross_policy(pkg_mapping)
+        cross_policy = self._get_cross_policy(pkg_mapping, target)
         if cross_policy == "skip":
             return None
 
-        elif cross_policy == "native" or cross_policy is None:
-            return self._get_noncross_package(pkg_mapping)
+        elif cross_policy == "native":
+            return self._get_noncross_package(pkg_mapping, target)
 
         try:
-            return CrossPackage(self._mappings, pkg_mapping,
-                                self._facts["packaging"]["format"],
-                                self._base_keys, cross_arch)
+            base_keys = self._base_keys(target)
+            return CrossPackage(self.mappings, pkg_mapping, base_keys, target)
         except PackageEval:
             pass
 
         # This package doesn't exist on the given platform
         return None
 
-    def get_package(self, pkg_mapping, cross_arch=None):
+    @property
+    def mappings(self):
+        if self._mappings is None:
+            self._load_mappings()
+
+        return self._mappings
+
+    @property
+    def pypi_mappings(self):
+        if self._mappings is None:
+            self._load_mappings()
+
+        return self._pypi_mappings
+
+    @property
+    def cpan_mappings(self):
+        if self._mappings is None:
+            self._load_mappings()
+
+        return self._cpan_mappings
+
+    def get_package(self, pkg_mapping, target):
         """
         Resolves the generic mapping name and returns a Package instance.
 
         :param pkg_mapping: generic package mapping name
-        :param cross_arch: cross architecture string (if needed)
+        :param target: target to resolve the package for
         :return: instance of Package subclass or None if package mapping could
                  not be resolved
         """
 
-        if pkg_mapping not in self._mappings:
+        if pkg_mapping not in self.mappings:
             raise PackageMissing(f"Package {pkg_mapping} not present in mappings")
 
-        if cross_arch is None:
-            return self._get_noncross_package(pkg_mapping)
+        if target.cross_arch is None:
+            return self._get_noncross_package(pkg_mapping, target)
         else:
-            return self._get_cross_package(pkg_mapping, cross_arch)
+            return self._get_cross_package(pkg_mapping, target)
+
+    def _load_mappings(self):
+        mappings_path = resource_filename(__name__,
+                                          "facts/mappings.yml")
+
+        try:
+            with open(mappings_path, "r") as infile:
+                mappings = yaml.safe_load(infile)
+                self._mappings = mappings["mappings"]
+                self._pypi_mappings = mappings["pypi_mappings"]
+                self._cpan_mappings = mappings["cpan_mappings"]
+        except Exception as ex:
+            log.debug("Can't load mappings")
+            raise PackageError(f"Can't load mappings: {ex}")
diff --git a/tests/lcitool/libvirt-ci/lcitool/projects.py b/tests/lcitool/libvirt-ci/lcitool/projects.py
index 06d34803..28e83a27 100644
--- a/tests/lcitool/libvirt-ci/lcitool/projects.py
+++ b/tests/lcitool/libvirt-ci/lcitool/projects.py
@@ -11,8 +11,7 @@ from pathlib import Path
 from pkg_resources import resource_filename
 
 from lcitool import util, LcitoolError
-from lcitool.package import PackageFactory, PyPIPackage, CPANPackage
-from lcitool.singleton import Singleton
+from lcitool.packages import PyPIPackage, CPANPackage
 
 log = logging.getLogger(__name__)
 
@@ -29,79 +28,58 @@ class ProjectError(LcitoolError):
         super().__init__(message, "Project")
 
 
-class Projects(metaclass=Singleton):
+class Projects:
     """
     Attributes:
         :ivar names: list of all project names
+        :ivar public: dictionary from project names to ``Project`` objects for public projects
+        :ivar internal: dictionary from project names to ``Project`` objects for internal projects
     """
 
     @property
-    def projects(self):
-        if self._projects is None:
-            self._projects = self._load_projects()
-        return self._projects
+    def public(self):
+        if self._public is None:
+            self._load_public()
+        return self._public
 
     @property
     def names(self):
-        return list(self.projects.keys())
+        return list(self.public.keys())
 
     @property
-    def internal_projects(self):
-        if self._internal_projects is None:
-            self._internal_projects = self._load_internal_projects()
-        return self._internal_projects
-
-    @property
-    def mappings(self):
-
-        # lazy load mappings
-        if self._mappings is None:
-            self._mappings = self._load_mappings()
-        return self._mappings
+    def internal(self):
+        if self._internal is None:
+            self._load_internal()
+        return self._internal
 
     def __init__(self):
-        self._projects = None
-        self._internal_projects = None
-        self._mappings = None
+        self._public = None
+        self._internal = None
 
-    @staticmethod
-    def _load_projects_from_path(path):
+    def _load_projects_from_path(self, path):
         projects = {}
 
         for item in path.iterdir():
             if not item.is_file() or item.suffix != ".yml":
                 continue
 
-            projects[item.stem] = Project(item.stem, item)
+            projects[item.stem] = Project(self, item.stem, item)
 
         return projects
 
-    @staticmethod
-    def _load_projects():
+    def _load_public(self):
         source = Path(resource_filename(__name__, "facts/projects"))
-        projects = Projects._load_projects_from_path(source)
+        projects = self._load_projects_from_path(source)
 
         if util.get_extra_data_dir() is not None:
             source = Path(util.get_extra_data_dir()).joinpath("projects")
-            projects.update(Projects._load_projects_from_path(source))
+            projects.update(self._load_projects_from_path(source))
 
-        return projects
+        self._public = projects
 
-    @staticmethod
-    def _load_internal_projects():
+    def _load_internal(self):
         source = Path(resource_filename(__name__, "facts/projects/internal"))
-        return Projects._load_projects_from_path(source)
-
-    def _load_mappings(self):
-        mappings_path = resource_filename(__name__,
-                                          "facts/mappings.yml")
-
-        try:
-            with open(mappings_path, "r") as infile:
-                return yaml.safe_load(infile)
-        except Exception as ex:
-            log.debug("Can't load mappings")
-            raise ProjectError(f"Can't load mappings: {ex}")
+        self._internal = self._load_projects_from_path(source)
 
     def expand_names(self, pattern):
         try:
@@ -110,18 +88,46 @@ class Projects(metaclass=Singleton):
             log.debug(f"Failed to expand '{pattern}'")
             raise ProjectError(f"Failed to expand '{pattern}': {ex}")
 
-    def get_packages(self, projects, facts, cross_arch=None):
+    def get_packages(self, projects, target):
         packages = {}
 
         for proj in projects:
             try:
-                obj = self.projects[proj]
+                obj = self.public[proj]
             except KeyError:
-                obj = self.internal_projects[proj]
-            packages.update(obj.get_packages(facts, cross_arch))
+                obj = self.internal[proj]
+            packages.update(obj.get_packages(target))
 
         return packages
 
+    def eval_generic_packages(self, target, generic_packages):
+        pkgs = {}
+        needs_pypi = False
+        needs_cpan = False
+
+        for mapping in generic_packages:
+            pkg = target.get_package(mapping)
+            if pkg is None:
+                continue
+            pkgs[pkg.mapping] = pkg
+
+            if isinstance(pkg, PyPIPackage):
+                needs_pypi = True
+            elif isinstance(pkg, CPANPackage):
+                needs_cpan = True
+
+        # The get_packages eval_generic_packages cycle is deliberate and
+        # harmless since we'll only ever hit it with the following internal
+        # projects
+        if needs_pypi:
+            proj = self.internal["python-pip"]
+            pkgs.update(proj.get_packages(target))
+        if needs_cpan:
+            proj = self.internal["perl-cpan"]
+            pkgs.update(proj.get_packages(target))
+
+        return pkgs
+
 
 class Project:
     """
@@ -129,6 +135,7 @@ class Project:
         :ivar name: project name
         :ivar generic_packages: list of generic packages needed by the project
                                 to build successfully
+        :ivar projects: parent ``Projects`` instance
     """
 
     @property
@@ -139,7 +146,8 @@ class Project:
             self._generic_packages = self._load_generic_packages()
         return self._generic_packages
 
-    def __init__(self, name, path):
+    def __init__(self, projects, name, path):
+        self.projects = projects
         self.name = name
         self.path = path
         self._generic_packages = None
@@ -156,49 +164,21 @@ class Project:
             log.debug(f"Can't load pacakges for '{self.name}'")
             raise ProjectError(f"Can't load packages for '{self.name}': {ex}")
 
-    def _eval_generic_packages(self, facts, cross_arch=None):
-        pkgs = {}
-        factory = PackageFactory(Projects().mappings, facts)
-        needs_pypi = False
-        needs_cpan = False
-
-        for mapping in self.generic_packages:
-            pkg = factory.get_package(mapping, cross_arch)
-            if pkg is None:
-                continue
-            pkgs[pkg.mapping] = pkg
-
-            if isinstance(pkg, PyPIPackage):
-                needs_pypi = True
-            elif isinstance(pkg, CPANPackage):
-                needs_cpan = True
-
-        # The get_packages _eval_generic_packages cycle is deliberate and
-        # harmless since we'll only ever hit it with the following internal
-        # projects
-        if needs_pypi:
-            proj = Projects().internal_projects["python-pip"]
-            pkgs.update(proj.get_packages(facts, cross_arch))
-        if needs_cpan:
-            proj = Projects().internal_projects["perl-cpan"]
-            pkgs.update(proj.get_packages(facts, cross_arch))
-
-        return pkgs
-
-    def get_packages(self, facts, cross_arch=None):
-        osname = facts["os"]["name"]
-        osversion = facts["os"]["version"]
+    def get_packages(self, target):
+        osname = target.facts["os"]["name"]
+        osversion = target.facts["os"]["version"]
         target_name = f"{osname.lower()}-{osversion.lower()}"
-        if cross_arch is None:
+        if target.cross_arch is None:
             target_name = f"{target_name}-x86_64"
         else:
             try:
-                util.validate_cross_platform(cross_arch, osname)
+                util.validate_cross_platform(target.cross_arch, osname)
             except ValueError as ex:
                 raise ProjectError(ex)
-            target_name = f"{target_name}-{cross_arch}"
+            target_name = f"{target_name}-{target.cross_arch}"
 
         # lazy evaluation + caching of package names for a given distro
         if self._target_packages.get(target_name) is None:
-            self._target_packages[target_name] = self._eval_generic_packages(facts, cross_arch)
+            self._target_packages[target_name] = self.projects.eval_generic_packages(target,
+                                                                                     self.generic_packages)
         return self._target_packages[target_name]
diff --git a/tests/lcitool/libvirt-ci/lcitool/singleton.py b/tests/lcitool/libvirt-ci/lcitool/singleton.py
deleted file mode 100644
index 46d1379f..00000000
--- a/tests/lcitool/libvirt-ci/lcitool/singleton.py
+++ /dev/null
@@ -1,14 +0,0 @@
-# singleton.py - module singleton class definition
-#
-# Copyright (C) 2021 Red Hat, Inc.
-#
-# SPDX-License-Identifier: GPL-2.0-or-later
-
-class Singleton(type):
-    _instances = {}
-
-    def __call__(cls, *args, **kwargs):
-        if cls not in cls._instances:
-            instance = super(Singleton, cls).__call__(*args, **kwargs)
-            cls._instances[cls] = instance
-        return cls._instances[cls]
diff --git a/tests/lcitool/libvirt-ci/lcitool/targets.py b/tests/lcitool/libvirt-ci/lcitool/targets.py
new file mode 100644
index 00000000..ef6deab1
--- /dev/null
+++ b/tests/lcitool/libvirt-ci/lcitool/targets.py
@@ -0,0 +1,108 @@
+# targets.py - module containing accessors to per-target information
+#
+# Copyright (C) 2022 Red Hat, Inc.
+#
+# SPDX-License-Identifier: GPL-2.0-or-later
+
+import logging
+import yaml
+
+from pathlib import Path
+from pkg_resources import resource_filename
+
+from lcitool import util, LcitoolError
+
+
+log = logging.getLogger(__name__)
+
+
+class TargetsError(LcitoolError):
+    """Global exception type for the targets module."""
+
+    def __init__(self, message):
+        super().__init__(message, "Targets")
+
+
+class Targets():
+
+    @property
+    def target_facts(self):
+        if self._target_facts is None:
+            self._target_facts = self._load_target_facts()
+        return self._target_facts
+
+    @property
+    def targets(self):
+        return list(self.target_facts.keys())
+
+    def __init__(self):
+        self._target_facts = None
+
+    @staticmethod
+    def _read_facts_from_file(yaml_path):
+        log.debug(f"Loading facts from '{yaml_path}'")
+        with open(yaml_path, "r") as infile:
+            return yaml.safe_load(infile)
+
+    @staticmethod
+    def _validate_target_facts(target_facts, target):
+        fname = target + ".yml"
+
+        actual_osname = target_facts["os"]["name"].lower()
+        if not target.startswith(actual_osname + "-"):
+            raise TargetsError(f'OS name "{target_facts["os"]["name"]}" does not match file name {fname}')
+        target = target[len(actual_osname) + 1:]
+
+        actual_version = target_facts["os"]["version"].lower()
+        expected_version = target.replace("-", "")
+        if expected_version != actual_version:
+            raise TargetsError(f'OS version "{target_facts["os"]["version"]}" does not match version in file name {fname} ({expected_version})')
+
+    def _load_target_facts(self):
+        facts = {}
+        targets_path = Path(resource_filename(__name__, "facts/targets/"))
+        targets_all_path = Path(targets_path, "all.yml")
+
+        # first load the shared facts from targets/all.yml
+        shared_facts = self._read_facts_from_file(targets_all_path)
+
+        # then load the rest of the facts
+        for entry in targets_path.iterdir():
+            if not entry.is_file() or entry.suffix != ".yml" or entry.name == "all.yml":
+                continue
+
+            target = entry.stem
+            facts[target] = self._read_facts_from_file(entry)
+            self._validate_target_facts(facts[target], target)
+            facts[target]["target"] = target
+
+            # missing per-distro facts fall back to shared facts
+            util.merge_dict(shared_facts, facts[target])
+
+        return facts
+
+
+class BuildTarget:
+    """
+    Attributes:
+        :ivar _targets: object to retrieve the target facts
+        :ivar name: target name
+        :ivar cross_arch: cross compilation architecture
+    """
+
+    def __init__(self, targets, packages, name, cross_arch=None):
+        if name not in targets.target_facts:
+            raise TargetsError(f"Target not found: {name}")
+        self._packages = packages
+        self.name = name
+        self.cross_arch = cross_arch
+        self.facts = targets.target_facts[self.name]
+
+    def __str__(self):
+        if self.cross_arch:
+            return f"{self.name} (cross_arch={self.cross_arch}"
+        else:
+            return self.name
+
+    def get_package(self, name):
+        return self._packages.get_package(name, self)
diff --git a/tests/lcitool/libvirt-ci/lcitool/util.py b/tests/lcitool/libvirt-ci/lcitool/util.py
index d2577917..c231df00 100644
--- a/tests/lcitool/libvirt-ci/lcitool/util.py
+++ b/tests/lcitool/libvirt-ci/lcitool/util.py
@@ -4,6 +4,7 @@
 #
 # SPDX-License-Identifier: GPL-2.0-or-later
 
+import copy
 import fnmatch
 import logging
 import os
@@ -202,6 +203,20 @@ def get_config_dir():
     return Path(config_dir, "lcitool")
 
 
+def merge_dict(source, dest):
+    for key in source.keys():
+        if key not in dest:
+            dest[key] = copy.deepcopy(source[key])
+            continue
+
+        if isinstance(source[key], list) or isinstance(dest[key], list):
+            raise ValueError("cannot merge lists")
+        if isinstance(source[key], dict) != isinstance(dest[key], dict):
+            raise ValueError("cannot merge dictionaries with non-dictionaries")
+        if isinstance(source[key], dict):
+            merge_dict(source[key], dest[key])
+
+
 extra_data_dir = None
 
 
diff --git a/tests/lcitool/libvirt-ci/test-requirements.txt b/tests/lcitool/libvirt-ci/test-requirements.txt
index 8e3fd1fd..b816c69f 100644
--- a/tests/lcitool/libvirt-ci/test-requirements.txt
+++ b/tests/lcitool/libvirt-ci/test-requirements.txt
@@ -1,5 +1,7 @@
 -r requirements.txt
--r vm-requirements.txt
+
+ansible
+ansible-runner
 
 flake8
 pytest
diff --git a/tests/lcitool/libvirt-ci/tests/conftest.py b/tests/lcitool/libvirt-ci/tests/conftest.py
index 751eb405..61a7b436 100644
--- a/tests/lcitool/libvirt-ci/tests/conftest.py
+++ b/tests/lcitool/libvirt-ci/tests/conftest.py
@@ -1,5 +1,16 @@
 import pytest
 
+from pathlib import Path
+
+from lcitool.config import Config
+from lcitool.inventory import Inventory
+from lcitool.packages import Packages
+from lcitool.projects import Projects
+from lcitool.targets import Targets
+from lcitool import util
+
+import test_utils.utils as test_utils
+
 
 def pytest_addoption(parser):
     parser.addoption(
@@ -13,3 +24,54 @@ def pytest_addoption(parser):
 def pytest_configure(config):
     opts = ["regenerate_output"]
     pytest.custom_args = {opt: config.getoption(opt) for opt in opts}
+
+
+# These needs to be a global in order to compute ALL_PROJECTS and ALL_TARGETS
+# at collection time.  Tests do not access it and use the fixtures below.
+_PROJECTS = Projects()
+_TARGETS = Targets()
+
+ALL_PROJECTS = sorted(_PROJECTS.names + list(_PROJECTS.internal.keys()))
+ALL_TARGETS = sorted(_TARGETS.targets)
+
+
+@pytest.fixture
+def config(monkeypatch, request):
+    if 'config_filename' in request.fixturenames:
+        config_filename = request.getfixturevalue('config_filename')
+        actual_path = Path(test_utils.test_data_indir(request.module.__file__), config_filename)
+
+        # we have to monkeypatch the '_config_file_paths' attribute, since we don't
+        # support custom inventory paths
+        config = Config()
+        monkeypatch.setattr(config, "_config_file_paths", [actual_path])
+    else:
+        actual_dir = Path(test_utils.test_data_indir(request.module.__file__))
+        monkeypatch.setattr(util, "get_config_dir", lambda: actual_dir)
+        config = Config()
+
+    return config
+
+
+@pytest.fixture
+def inventory(monkeypatch, targets, config):
+    inventory = Inventory(targets, config)
+
+    monkeypatch.setattr(inventory, "_get_libvirt_inventory",
+                        lambda: {"all": {"children": {}}})
+    return inventory
+
+
+@pytest.fixture(scope="module")
+def packages():
+    return Packages()
+
+
+@pytest.fixture
+def projects():
+    return _PROJECTS
+
+
+@pytest.fixture
+def targets():
+    return _TARGETS
diff --git a/tests/lcitool/libvirt-ci/tests/data/formatters/out/alpine-315-all-projects.Dockerfile b/tests/lcitool/libvirt-ci/tests/data/formatters/out/alpine-315-all-projects.Dockerfile
index 13e1a542..5fabdb95 100644
--- a/tests/lcitool/libvirt-ci/tests/data/formatters/out/alpine-315-all-projects.Dockerfile
+++ b/tests/lcitool/libvirt-ci/tests/data/formatters/out/alpine-315-all-projects.Dockerfile
@@ -156,7 +156,6 @@ RUN apk update && \
         perl-xml-xpath \
         perl-yaml \
         php8-dev \
-        php8-pecl-imagick \
         pixman-dev \
         pkgconf \
         polkit \
diff --git a/tests/lcitool/libvirt-ci/tests/data/formatters/out/alpine-316-all-projects.Dockerfile b/tests/lcitool/libvirt-ci/tests/data/formatters/out/alpine-316-all-projects.Dockerfile
index 7eef4d51..077eacee 100644
--- a/tests/lcitool/libvirt-ci/tests/data/formatters/out/alpine-316-all-projects.Dockerfile
+++ b/tests/lcitool/libvirt-ci/tests/data/formatters/out/alpine-316-all-projects.Dockerfile
@@ -156,7 +156,6 @@ RUN apk update && \
         perl-xml-xpath \
         perl-yaml \
         php8-dev \
-        php8-pecl-imagick \
         pixman-dev \
         pkgconf \
         polkit \
diff --git a/tests/lcitool/libvirt-ci/tests/data/formatters/out/alpine-edge-all-projects.Dockerfile b/tests/lcitool/libvirt-ci/tests/data/formatters/out/alpine-edge-all-projects.Dockerfile
index 67b591ea..0d0f7527 100644
--- a/tests/lcitool/libvirt-ci/tests/data/formatters/out/alpine-edge-all-projects.Dockerfile
+++ b/tests/lcitool/libvirt-ci/tests/data/formatters/out/alpine-edge-all-projects.Dockerfile
@@ -156,7 +156,6 @@ RUN apk update && \
         perl-xml-xpath \
         perl-yaml \
         php8-dev \
-        php8-pecl-imagick \
         pixman-dev \
         pkgconf \
         polkit \
diff --git a/tests/lcitool/libvirt-ci/tests/data/formatters/out/debian-10-all-projects.Dockerfile b/tests/lcitool/libvirt-ci/tests/data/formatters/out/debian-10-all-projects.Dockerfile
index 0d0b3e57..9d784d24 100644
--- a/tests/lcitool/libvirt-ci/tests/data/formatters/out/debian-10-all-projects.Dockerfile
+++ b/tests/lcitool/libvirt-ci/tests/data/formatters/out/debian-10-all-projects.Dockerfile
@@ -221,7 +221,6 @@ RUN export DEBIAN_FRONTEND=noninteractive && \
                       perl \
                       perl-base \
                       php-dev \
-                      php-imagick \
                       pkgconf \
                       policykit-1 \
                       publican \
diff --git a/tests/lcitool/libvirt-ci/tests/data/formatters/out/debian-11-all-projects.Dockerfile b/tests/lcitool/libvirt-ci/tests/data/formatters/out/debian-11-all-projects.Dockerfile
index 7ab4c2ca..ee5166c6 100644
--- a/tests/lcitool/libvirt-ci/tests/data/formatters/out/debian-11-all-projects.Dockerfile
+++ b/tests/lcitool/libvirt-ci/tests/data/formatters/out/debian-11-all-projects.Dockerfile
@@ -224,7 +224,6 @@ RUN export DEBIAN_FRONTEND=noninteractive && \
                       perl \
                       perl-base \
                       php-dev \
-                      php-imagick \
                       pkgconf \
                       policykit-1 \
                       publican \
diff --git a/tests/lcitool/libvirt-ci/tests/data/formatters/out/debian-sid-all-projects.Dockerfile b/tests/lcitool/libvirt-ci/tests/data/formatters/out/debian-sid-all-projects.Dockerfile
index 8ca21e51..09dac3eb 100644
--- a/tests/lcitool/libvirt-ci/tests/data/formatters/out/debian-sid-all-projects.Dockerfile
+++ b/tests/lcitool/libvirt-ci/tests/data/formatters/out/debian-sid-all-projects.Dockerfile
@@ -224,7 +224,6 @@ RUN export DEBIAN_FRONTEND=noninteractive && \
                       perl \
                       perl-base \
                       php-dev \
-                      php-imagick \
                       pkgconf \
                       policykit-1 \
                       publican \
diff --git a/tests/lcitool/libvirt-ci/tests/data/formatters/out/fedora-36-all-projects.Dockerfile b/tests/lcitool/libvirt-ci/tests/data/formatters/out/fedora-36-all-projects.Dockerfile
index 51b2b96b..cb416150 100644
--- a/tests/lcitool/libvirt-ci/tests/data/formatters/out/fedora-36-all-projects.Dockerfile
+++ b/tests/lcitool/libvirt-ci/tests/data/formatters/out/fedora-36-all-projects.Dockerfile
@@ -214,7 +214,6 @@ exec "$@"' > /usr/bin/nosync && \
                perl-generators \
                perl-podlators \
                php-devel \
-               php-pecl-imagick \
                pixman-devel \
                pkgconfig \
                polkit \
diff --git a/tests/lcitool/libvirt-ci/tests/data/formatters/out/fedora-35-all-projects.Dockerfile b/tests/lcitool/libvirt-ci/tests/data/formatters/out/fedora-37-all-projects.Dockerfile
similarity index 98%
rename from tests/data/formatters/out/fedora-35-all-projects.Dockerfile
rename to tests/data/formatters/out/fedora-37-all-projects.Dockerfile
index 35789b6d..e477d3f3 100644
--- a/tests/lcitool/libvirt-ci/tests/data/formatters/out/fedora-35-all-projects.Dockerfile
+++ b/tests/lcitool/libvirt-ci/tests/data/formatters/out/fedora-37-all-projects.Dockerfile
@@ -1,4 +1,4 @@
-FROM registry.fedoraproject.org/fedora:35
+FROM registry.fedoraproject.org/fedora:37
 
 RUN dnf install -y nosync && \
     echo -e '#!/bin/sh\n\
@@ -163,7 +163,6 @@ exec "$@"' > /usr/bin/nosync && \
                nbdkit \
                ncurses-devel \
                net-snmp-devel \
-               netcf-devel \
                nettle-devel \
                nfs-utils \
                ninja-build \
@@ -215,11 +214,9 @@ exec "$@"' > /usr/bin/nosync && \
                perl-generators \
                perl-podlators \
                php-devel \
-               php-pecl-imagick \
                pixman-devel \
                pkgconfig \
                polkit \
-               publican \
                pulseaudio-libs-devel \
                python3 \
                python3-PyYAML \
diff --git a/tests/lcitool/libvirt-ci/tests/data/formatters/out/fedora-rawhide-all-projects.Dockerfile b/tests/lcitool/libvirt-ci/tests/data/formatters/out/fedora-rawhide-all-projects.Dockerfile
index bc47cf9f..35852443 100644
--- a/tests/lcitool/libvirt-ci/tests/data/formatters/out/fedora-rawhide-all-projects.Dockerfile
+++ b/tests/lcitool/libvirt-ci/tests/data/formatters/out/fedora-rawhide-all-projects.Dockerfile
@@ -215,7 +215,6 @@ exec "$@"' > /usr/bin/nosync && \
                perl-generators \
                perl-podlators \
                php-devel \
-               php-pecl-imagick \
                pixman-devel \
                pkgconfig \
                polkit \
diff --git a/tests/lcitool/libvirt-ci/tests/data/formatters/out/opensuse-leap-153-all-projects.Dockerfile b/tests/lcitool/libvirt-ci/tests/data/formatters/out/opensuse-leap-153-all-projects.Dockerfile
index 16bb069b..c6f116cc 100644
--- a/tests/lcitool/libvirt-ci/tests/data/formatters/out/opensuse-leap-153-all-projects.Dockerfile
+++ b/tests/lcitool/libvirt-ci/tests/data/formatters/out/opensuse-leap-153-all-projects.Dockerfile
@@ -193,7 +193,6 @@ RUN zypper update -y && \
            perl-YAML \
            perl-base \
            php-devel \
-           php-imagick \
            pkgconfig \
            polkit \
            python3-Pillow \
diff --git a/tests/lcitool/libvirt-ci/tests/data/formatters/out/opensuse-leap-154-all-projects.Dockerfile b/tests/lcitool/libvirt-ci/tests/data/formatters/out/opensuse-leap-154-all-projects.Dockerfile
index 499ec816..23749f23 100644
--- a/tests/lcitool/libvirt-ci/tests/data/formatters/out/opensuse-leap-154-all-projects.Dockerfile
+++ b/tests/lcitool/libvirt-ci/tests/data/formatters/out/opensuse-leap-154-all-projects.Dockerfile
@@ -194,7 +194,6 @@ RUN zypper update -y && \
            perl-YAML \
            perl-base \
            php-devel \
-           php-imagick \
            pkgconfig \
            polkit \
            python3-Pillow \
diff --git a/tests/lcitool/libvirt-ci/tests/data/formatters/out/opensuse-tumbleweed-all-projects.Dockerfile b/tests/lcitool/libvirt-ci/tests/data/formatters/out/opensuse-tumbleweed-all-projects.Dockerfile
index 84e898ae..1e357bfe 100644
--- a/tests/lcitool/libvirt-ci/tests/data/formatters/out/opensuse-tumbleweed-all-projects.Dockerfile
+++ b/tests/lcitool/libvirt-ci/tests/data/formatters/out/opensuse-tumbleweed-all-projects.Dockerfile
@@ -194,7 +194,6 @@ RUN zypper dist-upgrade -y && \
            perl-YAML \
            perl-base \
            php-devel \
-           php-imagick \
            pkgconfig \
            polkit \
            python3-Pillow \
diff --git a/tests/lcitool/libvirt-ci/tests/data/formatters/out/ubuntu-1804-all-projects.Dockerfile b/tests/lcitool/libvirt-ci/tests/data/formatters/out/ubuntu-1804-all-projects.Dockerfile
index 9adad777..2b8715e8 100644
--- a/tests/lcitool/libvirt-ci/tests/data/formatters/out/ubuntu-1804-all-projects.Dockerfile
+++ b/tests/lcitool/libvirt-ci/tests/data/formatters/out/ubuntu-1804-all-projects.Dockerfile
@@ -217,7 +217,6 @@ RUN export DEBIAN_FRONTEND=noninteractive && \
                       perl \
                       perl-base \
                       php-dev \
-                      php-imagick \
                       pkgconf \
                       policykit-1 \
                       publican \
diff --git a/tests/lcitool/libvirt-ci/tests/data/formatters/out/ubuntu-2004-all-projects.Dockerfile b/tests/lcitool/libvirt-ci/tests/data/formatters/out/ubuntu-2004-all-projects.Dockerfile
index 2e26434d..ab976b62 100644
--- a/tests/lcitool/libvirt-ci/tests/data/formatters/out/ubuntu-2004-all-projects.Dockerfile
+++ b/tests/lcitool/libvirt-ci/tests/data/formatters/out/ubuntu-2004-all-projects.Dockerfile
@@ -221,7 +221,6 @@ RUN export DEBIAN_FRONTEND=noninteractive && \
                       perl \
                       perl-base \
                       php-dev \
-                      php-imagick \
                       pkgconf \
                       policykit-1 \
                       publican \
diff --git a/tests/lcitool/libvirt-ci/tests/data/formatters/out/ubuntu-2204-all-projects.Dockerfile b/tests/lcitool/libvirt-ci/tests/data/formatters/out/ubuntu-2204-all-projects.Dockerfile
index 560b45b9..b9c457b1 100644
--- a/tests/lcitool/libvirt-ci/tests/data/formatters/out/ubuntu-2204-all-projects.Dockerfile
+++ b/tests/lcitool/libvirt-ci/tests/data/formatters/out/ubuntu-2204-all-projects.Dockerfile
@@ -224,7 +224,6 @@ RUN export DEBIAN_FRONTEND=noninteractive && \
                       perl \
                       perl-base \
                       php-dev \
-                      php-imagick \
                       pkgconf \
                       policykit-1 \
                       publican \
diff --git a/tests/lcitool/libvirt-ci/tests/data/inventory/in/config.yml b/tests/lcitool/libvirt-ci/tests/data/inventory/in/config.yml
new file mode 100644
index 00000000..0eafdbee
--- /dev/null
+++ b/tests/lcitool/libvirt-ci/tests/data/inventory/in/config.yml
@@ -0,0 +1,3 @@
+install:
+  cloud_init: true
+  root_password: foo
diff --git a/tests/lcitool/libvirt-ci/tests/data/inventory/in/inventory/sample b/tests/lcitool/libvirt-ci/tests/data/inventory/in/inventory/sample
new file mode 100644
index 00000000..83b103cd
--- /dev/null
+++ b/tests/lcitool/libvirt-ci/tests/data/inventory/in/inventory/sample
@@ -0,0 +1,12 @@
+[centos-stream-8]
+centos-stream-8-1
+centos-stream-8-2
+some-other-centos-stream-8
+
+[fedora-37]
+fedora-test-1
+fedora-test-2   fully_managed=True
+
+[debian-10]
+192.168.1.30
+
diff --git a/tests/lcitool/libvirt-ci/tests/data/packages/out/fedora-35.yml b/tests/lcitool/libvirt-ci/tests/data/packages/out/fedora-37.yml
similarity index 99%
rename from tests/data/packages/out/fedora-35.yml
rename to tests/data/packages/out/fedora-37.yml
index 2ad108ae..465fbe19 100644
--- a/tests/lcitool/libvirt-ci/tests/data/packages/out/fedora-35.yml
+++ b/tests/lcitool/libvirt-ci/tests/data/packages/out/fedora-37.yml
@@ -159,7 +159,6 @@ native:
 - ncurses-devel
 - net-snmp-devel
 - net-tools
-- netcf-devel
 - nettle-devel
 - nfs-utils
 - ninja-build
@@ -216,7 +215,6 @@ native:
 - pixman-devel
 - pkgconfig
 - polkit
-- publican
 - pulseaudio-libs-devel
 - python3
 - python3-PyYAML
diff --git a/tests/lcitool/libvirt-ci/tests/test_config.py b/tests/lcitool/libvirt-ci/tests/test_config.py
index 049340f1..9a2fa7fc 100644
--- a/tests/lcitool/libvirt-ci/tests/test_config.py
+++ b/tests/lcitool/libvirt-ci/tests/test_config.py
@@ -9,20 +9,11 @@ import pytest
 import test_utils.utils as test_utils
 
 from pathlib import Path
-from lcitool.config import Config, ValidationError
-from lcitool.singleton import Singleton
-
-
-@pytest.fixture(autouse=True)
-def destroy_config():
-    # The following makes sure the Config singleton is deleted after each test
-    # See https://docs.pytest.org/en/6.2.x/fixture.html#teardown-cleanup-aka-fixture-finalization
-    yield
-    del Singleton._instances[Config]
+from lcitool.config import ValidationError
 
 
 @pytest.mark.parametrize(
-    "filename",
+    "config_filename",
     [
         "full.yml",
         "minimal.yml",
@@ -30,21 +21,15 @@ def destroy_config():
         "unknown_key.yml",
     ],
 )
-def test_config(monkeypatch, filename):
-    actual_path = Path(test_utils.test_data_indir(__file__), filename)
-    expected_path = Path(test_utils.test_data_outdir(__file__), filename)
-
-    config = Config()
+def test_config(config, config_filename):
+    expected_path = Path(test_utils.test_data_outdir(__file__), config_filename)
 
-    # we have to monkeypatch the '_config_file_paths' attribute, since we don't
-    # support custom inventory paths
-    monkeypatch.setattr(config, "_config_file_paths", [actual_path])
     actual = config.values
     test_utils.assert_yaml_matches_file(actual, expected_path)
 
 
 @pytest.mark.parametrize(
-    "filename",
+    "config_filename",
     [
         "empty.yml",
         "missing_mandatory_section.yml",
@@ -52,11 +37,6 @@ def test_config(monkeypatch, filename):
         "missing_gitlab_section_with_gitlab_flavor.yml",
     ],
 )
-def test_config_invalid(monkeypatch, filename):
-    actual_path = Path(test_utils.test_data_indir(__file__), filename)
-
-    config = Config()
-    monkeypatch.setattr(config, "_config_file_paths", [actual_path])
-
+def test_config_invalid(config, config_filename):
     with pytest.raises(ValidationError):
         config.values
diff --git a/tests/lcitool/libvirt-ci/tests/test_formatters.py b/tests/lcitool/libvirt-ci/tests/test_formatters.py
index 8abb4b3c..49b4a3c0 100644
--- a/tests/lcitool/libvirt-ci/tests/test_formatters.py
+++ b/tests/lcitool/libvirt-ci/tests/test_formatters.py
@@ -9,8 +9,7 @@ import pytest
 import test_utils.utils as test_utils
 from pathlib import Path
 
-from lcitool.inventory import Inventory
-from lcitool.projects import Projects
+from lcitool.targets import BuildTarget
 from lcitool.formatters import ShellVariablesFormatter, JSONVariablesFormatter, DockerfileFormatter, ShellBuildEnvFormatter
 
 
@@ -41,56 +40,62 @@ layer_scenarios = [
 
 
 @pytest.mark.parametrize("project,target,arch", scenarios)
-def test_dockerfiles(project, target, arch, request):
-    gen = DockerfileFormatter()
-    actual = gen.format(target, [project], arch)
+def test_dockerfiles(packages, projects, targets, project, target, arch, request):
+    gen = DockerfileFormatter(projects)
+    target_obj = BuildTarget(targets, packages, target, arch)
+    actual = gen.format(target_obj, [project])
     expected_path = Path(test_utils.test_data_outdir(__file__), request.node.callspec.id + ".Dockerfile")
     test_utils.assert_matches_file(actual, expected_path)
 
 
 @pytest.mark.parametrize("project,target,arch,base,layers", layer_scenarios)
-def test_dockerfile_layers(project, target, arch, base, layers, request):
-    gen = DockerfileFormatter(base, layers)
-    actual = gen.format(target, [project], arch)
+def test_dockerfile_layers(packages, projects, targets, project, target, arch, base, layers, request):
+    gen = DockerfileFormatter(projects, base, layers)
+    target_obj = BuildTarget(targets, packages, target, arch)
+    actual = gen.format(target_obj, [project])
     expected_path = Path(test_utils.test_data_outdir(__file__), request.node.callspec.id + ".Dockerfile")
     test_utils.assert_matches_file(actual, expected_path)
 
 
 @pytest.mark.parametrize("project,target,arch", scenarios)
-def test_variables_shell(project, target, arch, request):
-    gen = ShellVariablesFormatter()
-    actual = gen.format(target, [project], arch)
+def test_variables_shell(packages, projects, targets, project, target, arch, request):
+    gen = ShellVariablesFormatter(projects)
+    target_obj = BuildTarget(targets, packages, target, arch)
+    actual = gen.format(target_obj, [project])
     expected_path = Path(test_utils.test_data_outdir(__file__), request.node.callspec.id + ".vars")
     test_utils.assert_matches_file(actual, expected_path)
 
 
 @pytest.mark.parametrize("project,target,arch", scenarios)
-def test_variables_json(project, target, arch, request):
-    gen = JSONVariablesFormatter()
-    actual = gen.format(target, [project], arch)
+def test_variables_json(packages, projects, targets, project, target, arch, request):
+    gen = JSONVariablesFormatter(projects)
+    target_obj = BuildTarget(targets, packages, target, arch)
+    actual = gen.format(target_obj, [project])
     expected_path = Path(test_utils.test_data_outdir(__file__), request.node.callspec.id + ".json")
     test_utils.assert_matches_file(actual, expected_path)
 
 
 @pytest.mark.parametrize("project,target,arch", scenarios)
-def test_prepbuildenv(project, target, arch, request):
-    gen = ShellBuildEnvFormatter()
-    actual = gen.format(target, [project], arch)
+def test_prepbuildenv(packages, projects, targets, project, target, arch, request):
+    gen = ShellBuildEnvFormatter(projects)
+    target_obj = BuildTarget(targets, packages, target, arch)
+    actual = gen.format(target_obj, [project])
     expected_path = Path(test_utils.test_data_outdir(__file__), request.node.callspec.id + ".sh")
     test_utils.assert_matches_file(actual, expected_path)
 
 
-def test_all_projects_dockerfiles():
-    inventory = Inventory()
-    all_projects = Projects().names
+def test_all_projects_dockerfiles(packages, projects, targets):
+    all_projects = projects.names
 
-    for target in sorted(inventory.targets):
-        facts = inventory.target_facts[target]
+    for target in sorted(targets.targets):
+        target_obj = BuildTarget(targets, packages, target)
+
+        facts = target_obj.facts
 
         if facts["packaging"]["format"] not in ["apk", "deb", "rpm"]:
             continue
 
-        gen = DockerfileFormatter()
-        actual = gen.format(target, all_projects, None)
+        gen = DockerfileFormatter(projects)
+        actual = gen.format(target_obj, all_projects)
         expected_path = Path(test_utils.test_data_outdir(__file__), f"{target}-all-projects.Dockerfile")
         test_utils.assert_matches_file(actual, expected_path)
diff --git a/tests/lcitool/libvirt-ci/tests/test_inventory.py b/tests/lcitool/libvirt-ci/tests/test_inventory.py
new file mode 100644
index 00000000..f8e6e21f
--- /dev/null
+++ b/tests/lcitool/libvirt-ci/tests/test_inventory.py
@@ -0,0 +1,50 @@
+# test_inventory: test lcitool Ansible inventory
+#
+# Copyright (C) 2022 Red Hat, Inc.
+#
+# SPDX-License-Identifier: GPL-2.0-or-later
+
+import pytest
+
+from lcitool.inventory import InventoryError
+from lcitool.targets import BuildTarget
+
+
+pytestmark = pytest.mark.filterwarnings("ignore:'pipes' is deprecated:DeprecationWarning")
+
+
+@pytest.mark.parametrize("host,target,fully_managed", [
+    pytest.param("centos-stream-8-1", "centos-stream-8", False, id="centos-stream-8-1"),
+    pytest.param("192.168.1.30", "debian-10", False, id="debian-10"),
+    pytest.param("fedora-test-2", "fedora-37", True, id="fedora-test-2"),
+])
+def test_host_facts(inventory, targets, host, target, fully_managed):
+    host_facts = inventory.host_facts[host]
+    assert host_facts["target"] == target
+    for key, value in targets.target_facts[target].items():
+        assert host_facts[key] == value
+    assert host_facts.get("fully_managed", False) == fully_managed
+
+
+def test_expand_hosts(inventory):
+    assert sorted(inventory.expand_hosts("*centos*")) == [
+        "centos-stream-8-1",
+        "centos-stream-8-2",
+        "some-other-centos-stream-8"
+    ]
+    with pytest.raises(InventoryError):
+        inventory.expand_hosts("debian-10")
+
+
+def test_host_target_name(inventory):
+    assert inventory.get_host_target_name("fedora-test-1") == "fedora-37"
+
+
+def test_group_vars(inventory, targets, packages, projects):
+    target = BuildTarget(targets, packages, "fedora-37")
+    group_vars = inventory.get_group_vars(target, projects, ["nbdkit"])
+    assert "nano" in group_vars["unwanted_packages"]
+    assert "python3-libselinux" in group_vars["early_install_packages"]
+
+    for key, value in target.facts.items():
+        assert group_vars[key] == value
diff --git a/tests/lcitool/libvirt-ci/tests/test_manifest.py b/tests/lcitool/libvirt-ci/tests/test_manifest.py
index 7c923e0b..33ef4e3e 100644
--- a/tests/lcitool/libvirt-ci/tests/test_manifest.py
+++ b/tests/lcitool/libvirt-ci/tests/test_manifest.py
@@ -14,7 +14,7 @@ from lcitool import util
 from lcitool.manifest import Manifest
 
 
-def test_generate(monkeypatch):
+def test_generate(targets, packages, projects, monkeypatch):
     manifest_path = Path(test_utils.test_data_indir(__file__), "manifest.yml")
 
     # Squish the header that contains argv with paths we don't
@@ -67,7 +67,7 @@ def test_generate(monkeypatch):
         m.setattr(Path, 'glob', fake_glob)
 
         with open(manifest_path, "r") as fp:
-            manifest = Manifest(fp, quiet=True)
+            manifest = Manifest(targets, packages, projects, fp, quiet=True)
 
         manifest.generate()
 
@@ -134,7 +134,7 @@ def test_generate(monkeypatch):
     finally:
         if pytest.custom_args["regenerate_output"]:
             with open(manifest_path, "r") as fp:
-                manifest = Manifest(fp, quiet=True,
+                manifest = Manifest(targets, packages, projects, fp, quiet=True,
                                     basedir=Path(test_utils.test_data_outdir(__file__)))
 
             manifest.generate()
diff --git a/tests/lcitool/libvirt-ci/tests/test_packages.py b/tests/lcitool/libvirt-ci/tests/test_packages.py
index dad37611..ec4492e3 100644
--- a/tests/lcitool/libvirt-ci/tests/test_packages.py
+++ b/tests/lcitool/libvirt-ci/tests/test_packages.py
@@ -13,12 +13,11 @@ from functools import total_ordering
 
 from pathlib import Path
 from lcitool import util
-from lcitool.inventory import Inventory
-from lcitool.projects import Project, Projects, ProjectError
-from lcitool.package import NativePackage, CrossPackage, PyPIPackage, CPANPackage
+from lcitool.projects import Project, ProjectError
+from lcitool.packages import NativePackage, CrossPackage, PyPIPackage, CPANPackage
+from lcitool.targets import BuildTarget
 
-
-ALL_TARGETS = sorted(Inventory().targets)
+from conftest import ALL_TARGETS
 
 
 def get_non_cross_targets():
@@ -43,14 +42,14 @@ def packages_as_dict(raw_pkgs):
 
 
 @pytest.fixture
-def test_project():
-    return Project("packages",
+def test_project(projects):
+    return Project(projects, "packages",
                    Path(test_utils.test_data_indir(__file__), "packages.yml"))
 
 
-def test_verify_all_mappings_and_packages():
+def test_verify_all_mappings_and_packages(packages):
     expected_path = Path(test_utils.test_data_indir(__file__), "packages.yml")
-    actual = {"packages": sorted(Projects().mappings["mappings"].keys())}
+    actual = {"packages": sorted(packages.mappings.keys())}
 
     test_utils.assert_yaml_matches_file(actual, expected_path)
 
@@ -66,14 +65,14 @@ cross_params = [
 
 
 @pytest.mark.parametrize("target,arch", native_params + cross_params)
-def test_package_resolution(test_project, target, arch):
+def test_package_resolution(targets, packages, test_project, target, arch):
     if arch is None:
         outfile = f"{target}.yml"
     else:
         outfile = f"{target}-cross-{arch}.yml"
     expected_path = Path(test_utils.test_data_outdir(__file__), outfile)
-    pkgs = test_project.get_packages(Inventory().target_facts[target],
-                                     cross_arch=arch)
+    target_obj = BuildTarget(targets, packages, target, arch)
+    pkgs = test_project.get_packages(target_obj)
     actual = packages_as_dict(pkgs)
 
     test_utils.assert_yaml_matches_file(actual, expected_path)
@@ -83,10 +82,10 @@ def test_package_resolution(test_project, target, arch):
     "target",
     [pytest.param(target, id=target) for target in get_non_cross_targets()],
 )
-def test_unsupported_cross_platform(test_project, target):
+def test_unsupported_cross_platform(targets, packages, test_project, target):
     with pytest.raises(ProjectError):
-        test_project.get_packages(Inventory().target_facts[target],
-                                  cross_arch="s390x")
+        target_obj = BuildTarget(targets, packages, target, "s390x")
+        test_project.get_packages(target_obj)
 
 
 @pytest.mark.parametrize(
@@ -96,10 +95,10 @@ def test_unsupported_cross_platform(test_project, target):
         pytest.param("fedora-rawhide", "s390x", id="fedora-rawhide-cross-s390x"),
     ],
 )
-def test_cross_platform_arch_mismatch(test_project, target, arch):
+def test_cross_platform_arch_mismatch(targets, packages, test_project, target, arch):
     with pytest.raises(ProjectError):
-        test_project.get_packages(Inventory().target_facts[target],
-                                  cross_arch=arch)
+        target_obj = BuildTarget(targets, packages, target, arch)
+        test_project.get_packages(target_obj)
 
 
 @total_ordering
@@ -124,11 +123,11 @@ class MappingKey(namedtuple('MappingKey', ['components', 'priority'])):
         return self.components < other.components
 
 
-def mapping_keys_product():
+def mapping_keys_product(targets):
     basekeys = set()
 
     basekeys.add(MappingKey(("default", ), 0))
-    for target, facts in Inventory().target_facts.items():
+    for target, facts in targets.target_facts.items():
         fmt = facts["packaging"]["format"]
         name = facts["os"]["name"]
         ver = facts["os"]["version"]
@@ -148,10 +147,11 @@ def mapping_keys_product():
     return basekeys + archkeys + crossarchkeys + crosspolicykeys
 
 
-def test_project_mappings_sorting():
-    mappings = Projects().mappings["mappings"]
+@pytest.mark.parametrize("key", ["mappings", "pypi_mappings", "cpan_mappings"])
+def test_project_mappings_sorting(targets, packages, key):
+    mappings = getattr(packages, key)
 
-    all_expect_keys = mapping_keys_product()
+    all_expect_keys = mapping_keys_product(targets)
     for package, entries in mappings.items():
         got_keys = list(entries.keys())
         expect_keys = list(filter(lambda k: k in got_keys, all_expect_keys))
diff --git a/tests/lcitool/libvirt-ci/tests/test_projects.py b/tests/lcitool/libvirt-ci/tests/test_projects.py
index 760d331c..84493834 100644
--- a/tests/lcitool/libvirt-ci/tests/test_projects.py
+++ b/tests/lcitool/libvirt-ci/tests/test_projects.py
@@ -6,37 +6,25 @@
 
 import pytest
 
-from lcitool.projects import Projects
-from lcitool.inventory import Inventory
+from lcitool.targets import BuildTarget
 
+from conftest import ALL_PROJECTS
 
-projects = Projects()
-ALL_PROJECTS = sorted(projects.names + list(projects.internal_projects.keys()))
 
-
-@pytest.mark.parametrize(
-    "name",
-    ALL_PROJECTS
-)
-def test_project_packages(name):
+@pytest.fixture(params=ALL_PROJECTS)
+def project(request, projects):
     try:
-        project = projects.projects[name]
+        return projects.public[request.param]
     except KeyError:
-        project = projects.internal_projects[name]
-    target = Inventory().targets[0]
-    facts = Inventory().target_facts[target]
-    project.get_packages(facts)
+        return projects.internal[request.param]
 
 
-@pytest.mark.parametrize(
-    "name",
-    ALL_PROJECTS
-)
-def test_project_package_sorting(name):
-    try:
-        project = projects.projects[name]
-    except KeyError:
-        project = projects.internal_projects[name]
+def test_project_packages(targets, packages, project):
+    target = BuildTarget(targets, packages, targets.targets[0])
+    project.get_packages(target)
+
+
+def test_project_package_sorting(project):
     pkgs = project._load_generic_packages()
 
     otherpkgs = sorted(pkgs)
diff --git a/tests/lcitool/libvirt-ci/tests/test_misc.py b/tests/lcitool/libvirt-ci/tests/test_targets.py
similarity index 77%
rename from tests/test_misc.py
rename to tests/test_targets.py
index 5f1af1e9..61985f7f 100644
--- a/tests/lcitool/libvirt-ci/tests/test_misc.py
+++ b/tests/lcitool/libvirt-ci/tests/test_targets.py
@@ -1,4 +1,4 @@
-# test_misc: test uncategorized aspects of lcitool
+# test_targets: test lcitool target facts
 #
 # Copyright (C) 2022 Red Hat, Inc.
 #
@@ -6,17 +6,14 @@
 
 import pytest
 
-from lcitool.inventory import Inventory
-
-inventory = Inventory()
-ALL_TARGETS = sorted(inventory.targets)
+from conftest import ALL_TARGETS
 
 
 @pytest.mark.parametrize("target", ALL_TARGETS)
-def test_group_vars(target):
+def test_group_vars(targets, target):
     """Check selected group_vars fields for correctness."""
 
-    facts = inventory.target_facts[target]
+    facts = targets.target_facts[target]
     split = target.split('-', maxsplit=1)
     target_os = split[0]
     target_version = split[1].replace("-", "")
diff --git a/tests/lcitool/mappings.yml b/tests/lcitool/mappings.yml
new file mode 100644
index 0000000000..4b4b44adf1
--- /dev/null
+++ b/tests/lcitool/mappings.yml
@@ -0,0 +1,60 @@
+mappings:
+  flake8:
+    OpenSUSELeap153:
+
+  meson:
+    OpenSUSELeap153:
+
+  python3:
+    OpenSUSELeap153: python39-base
+
+  python3-PyYAML:
+    OpenSUSELeap153:
+
+  python3-devel:
+    OpenSUSELeap153: python39-devel
+
+  python3-docutils:
+    OpenSUSELeap153:
+
+  python3-numpy:
+    OpenSUSELeap153:
+
+  python3-opencv:
+    OpenSUSELeap153:
+
+  python3-pillow:
+    OpenSUSELeap153:
+
+  python3-pip:
+    OpenSUSELeap153: python39-pip
+
+  python3-pillow:
+    OpenSUSELeap153:
+
+  python3-selinux:
+    OpenSUSELeap153:
+
+  python3-setuptools:
+    OpenSUSELeap153: python39-setuptools
+
+  python3-sphinx:
+    OpenSUSELeap153:
+
+  python3-sphinx-rtd-theme:
+    OpenSUSELeap153:
+
+  python3-venv:
+    OpenSUSELeap153: python39-base
+
+  python3-wheel:
+    OpenSUSELeap153: python39-pip
+
+pypi_mappings:
+  # Request more recent version
+  meson:
+    default: meson==0.63.2
+
+  # Drop packages that need devel headers
+  python3-numpy:
+    OpenSUSELeap153:
diff --git a/tests/lcitool/refresh b/tests/lcitool/refresh
index fa966e4009..7a4cd6fd32 100755
--- a/tests/lcitool/refresh
+++ b/tests/lcitool/refresh
@@ -108,10 +108,10 @@ try:
     # Standard native builds
     #
     generate_dockerfile("alpine", "alpine-316")
-    generate_dockerfile("centos8", "centos-stream-8")
+    generate_dockerfile("centos9", "centos-stream-9")
     generate_dockerfile("debian-amd64", "debian-11",
                         trailer="".join(debian11_extras))
-    generate_dockerfile("fedora", "fedora-35")
+    generate_dockerfile("fedora", "fedora-37")
     generate_dockerfile("opensuse-leap", "opensuse-leap-153")
     generate_dockerfile("ubuntu2004", "ubuntu-2004",
                         trailer="".join(ubuntu2004_tsanhack))
@@ -161,12 +161,12 @@ try:
                         trailer=cross_build("s390x-linux-gnu-",
                                             "s390x-softmmu,s390x-linux-user"))
 
-    generate_dockerfile("fedora-win32-cross", "fedora-35",
+    generate_dockerfile("fedora-win32-cross", "fedora-37",
                         cross="mingw32",
                         trailer=cross_build("i686-w64-mingw32-",
                                             "i386-softmmu"))
 
-    generate_dockerfile("fedora-win64-cross", "fedora-35",
+    generate_dockerfile("fedora-win64-cross", "fedora-37",
                         cross="mingw64",
                         trailer=cross_build("x86_64-w64-mingw32-",
                                             "x86_64-softmmu"))
diff --git a/tests/lcitool/targets/centos-stream-8.yml b/tests/lcitool/targets/centos-stream-8.yml
new file mode 100644
index 0000000000..6b11160fd1
--- /dev/null
+++ b/tests/lcitool/targets/centos-stream-8.yml
@@ -0,0 +1,3 @@
+paths:
+  pip3: /usr/bin/pip3.8
+  python: /usr/bin/python3.8
diff --git a/tests/lcitool/targets/opensuse-leap-153.yml b/tests/lcitool/targets/opensuse-leap-153.yml
new file mode 100644
index 0000000000..683016e007
--- /dev/null
+++ b/tests/lcitool/targets/opensuse-leap-153.yml
@@ -0,0 +1,3 @@
+paths:
+  pip3: /usr/bin/pip3.9
+  python: /usr/bin/python3.9
diff --git a/tests/qemu-iotests/061 b/tests/qemu-iotests/061
index 509ad247cd..168a5831dd 100755
--- a/tests/qemu-iotests/061
+++ b/tests/qemu-iotests/061
@@ -326,12 +326,14 @@ $QEMU_IMG amend -o "data_file=foo" "$TEST_IMG"
 echo
 _make_test_img -o "compat=1.1,data_file=$TEST_IMG.data" 64M
 $QEMU_IMG amend -o "data_file=foo" "$TEST_IMG"
-_img_info --format-specific
+$QEMU_IO -c "read 0 4k" "$TEST_IMG" 2>&1 | _filter_testdir | _filter_imgfmt
+$QEMU_IO -c "open -o data-file.filename=$TEST_IMG.data,file.filename=$TEST_IMG" -c "read 0 4k" | _filter_qemu_io
 TEST_IMG="data-file.filename=$TEST_IMG.data,file.filename=$TEST_IMG" _img_info --format-specific --image-opts
 
 echo
 $QEMU_IMG amend -o "data_file=" --image-opts "data-file.filename=$TEST_IMG.data,file.filename=$TEST_IMG"
-_img_info --format-specific
+$QEMU_IO -c "read 0 4k" "$TEST_IMG" 2>&1 | _filter_testdir | _filter_imgfmt
+$QEMU_IO -c "open -o data-file.filename=$TEST_IMG.data,file.filename=$TEST_IMG" -c "read 0 4k" | _filter_qemu_io
 TEST_IMG="data-file.filename=$TEST_IMG.data,file.filename=$TEST_IMG" _img_info --format-specific --image-opts
 
 echo
diff --git a/tests/qemu-iotests/061.out b/tests/qemu-iotests/061.out
index 139fc68177..24c33add7c 100644
--- a/tests/qemu-iotests/061.out
+++ b/tests/qemu-iotests/061.out
@@ -545,7 +545,9 @@ Formatting 'TEST_DIR/t.IMGFMT', fmt=IMGFMT size=67108864
 qemu-img: data-file can only be set for images that use an external data file
 
 Formatting 'TEST_DIR/t.IMGFMT', fmt=IMGFMT size=67108864 data_file=TEST_DIR/t.IMGFMT.data
-qemu-img: Could not open 'TEST_DIR/t.IMGFMT': Could not open 'foo': No such file or directory
+qemu-io: can't open device TEST_DIR/t.IMGFMT: Could not open 'foo': No such file or directory
+read 4096/4096 bytes at offset 0
+4 KiB, X ops; XX:XX:XX.X (XXX YYY/sec and XXX ops/sec)
 image: TEST_DIR/t.IMGFMT
 file format: IMGFMT
 virtual size: 64 MiB (67108864 bytes)
@@ -560,7 +562,9 @@ Format specific information:
     corrupt: false
     extended l2: false
 
-qemu-img: Could not open 'TEST_DIR/t.IMGFMT': 'data-file' is required for this image
+qemu-io: can't open device TEST_DIR/t.IMGFMT: 'data-file' is required for this image
+read 4096/4096 bytes at offset 0
+4 KiB, X ops; XX:XX:XX.X (XXX YYY/sec and XXX ops/sec)
 image: TEST_DIR/t.IMGFMT
 file format: IMGFMT
 virtual size: 64 MiB (67108864 bytes)
diff --git a/tests/qemu-iotests/244 b/tests/qemu-iotests/244
index 3e61fa25bb..bb9cc6512f 100755
--- a/tests/qemu-iotests/244
+++ b/tests/qemu-iotests/244
@@ -215,9 +215,22 @@ $QEMU_IMG convert -f $IMGFMT -O $IMGFMT -n -C "$TEST_IMG.src" "$TEST_IMG"
 $QEMU_IMG compare -f $IMGFMT -F $IMGFMT "$TEST_IMG.src" "$TEST_IMG"
 
 # blkdebug doesn't support copy offloading, so this tests the error path
-$QEMU_IMG amend -f $IMGFMT -o "data_file=blkdebug::$TEST_IMG.data" "$TEST_IMG"
-$QEMU_IMG convert -f $IMGFMT -O $IMGFMT -n -C "$TEST_IMG.src" "$TEST_IMG"
-$QEMU_IMG compare -f $IMGFMT -F $IMGFMT "$TEST_IMG.src" "$TEST_IMG"
+test_img_with_blkdebug="json:{
+    'driver': 'qcow2',
+    'file': {
+        'driver': 'file',
+        'filename': '$TEST_IMG'
+    },
+    'data-file': {
+        'driver': 'blkdebug',
+        'image': {
+            'driver': 'file',
+            'filename': '$TEST_IMG.data'
+        }
+    }
+}"
+$QEMU_IMG convert -f $IMGFMT -O $IMGFMT -n -C "$TEST_IMG.src" "$test_img_with_blkdebug"
+$QEMU_IMG compare -f $IMGFMT -F $IMGFMT "$TEST_IMG.src" "$test_img_with_blkdebug"
 
 echo
 echo "=== Flushing should flush the data file ==="
diff --git a/tests/qemu-iotests/270 b/tests/qemu-iotests/270
index 74352342db..c37b674aa2 100755
--- a/tests/qemu-iotests/270
+++ b/tests/qemu-iotests/270
@@ -60,8 +60,16 @@ _make_test_img -o cluster_size=2M,data_file="$TEST_IMG.orig" \
 # "write" 2G of data without using any space.
 # (qemu-img create does not like it, though, because null-co does not
 # support image creation.)
-$QEMU_IMG amend -o data_file="json:{'driver':'null-co',,'size':'4294967296'}" \
-    "$TEST_IMG"
+test_img_with_null_data="json:{
+    'driver': '$IMGFMT',
+    'file': {
+        'filename': '$TEST_IMG'
+    },
+    'data-file': {
+        'driver': 'null-co',
+        'size':'4294967296'
+    }
+}"
 
 # This gives us a range of:
 #   2^31 - 512 + 768 - 1 = 2^31 + 255 > 2^31
@@ -74,7 +82,7 @@ $QEMU_IMG amend -o data_file="json:{'driver':'null-co',,'size':'4294967296'}" \
 # on L2 boundaries, we need large L2 tables; hence the cluster size of
 # 2 MB.  (Anything from 256 kB should work, though, because then one L2
 # table covers 8 GB.)
-$QEMU_IO -c "write 768 $((2 ** 31 - 512))" "$TEST_IMG" | _filter_qemu_io
+$QEMU_IO -c "write 768 $((2 ** 31 - 512))" "$test_img_with_null_data" | _filter_qemu_io
 
 _check_test_img
 
diff --git a/tests/vm/centos b/tests/vm/centos
index 097a9ca14d..d25c8f8b5b 100755
--- a/tests/vm/centos
+++ b/tests/vm/centos
@@ -26,8 +26,8 @@ class CentosVM(basevm.BaseVM):
         export SRC_ARCHIVE=/dev/vdb;
         sudo chmod a+r $SRC_ARCHIVE;
         tar -xf $SRC_ARCHIVE;
-        make docker-test-block@centos8 {verbose} J={jobs} NETWORK=1;
-        make docker-test-quick@centos8 {verbose} J={jobs} NETWORK=1;
+        make docker-test-block@centos9 {verbose} J={jobs} NETWORK=1;
+        make docker-test-quick@centos9 {verbose} J={jobs} NETWORK=1;
     """
 
     def build_image(self, img):
