ix init script duplication and CPIO creation issues

- Remove duplicate /sbin/init copying from initramfs_setup_zinit()
- Only /init should be config/init (initramfs setup script)
- No /sbin/init needed - config/init calls 'switch_root /mnt/root /sbin/zinit init'
- Remove unsupported cpio --owner option that broke CPIO creation
- Fix validation to not expect /sbin/init file
- Correct boot flow: /init → switch_root → /sbin/zinit init
- Remove strip and UPX compression from zinit binary copying
- UPX compression was corrupting the zinit binary causing segfaults after switch_root
- Keep zinit unmodified as it's
This commit is contained in:
2025-09-05 11:43:25 +02:00
parent 38dee2de74
commit 8c3868b242
102 changed files with 589 additions and 375 deletions

View File

@@ -0,0 +1,5 @@
# Stage completion marker
STAGE_NAME="alpine_configure"
COMPLETED_AT="2025-09-05T08:50:10+00:00"
COMPLETED_BY="root"
DETAILS="Duration: 0s, Function: stage_alpine_configure"

View File

@@ -0,0 +1,5 @@
# Stage completion marker
STAGE_NAME="alpine_extract"
COMPLETED_AT="2025-09-05T08:50:10+00:00"
COMPLETED_BY="root"
DETAILS="Duration: 0s, Function: stage_alpine_extract"

View File

@@ -0,0 +1,5 @@
# Stage completion marker
STAGE_NAME="alpine_firmware"
COMPLETED_AT="2025-09-05T08:50:17+00:00"
COMPLETED_BY="root"
DETAILS="Duration: 3s, Function: stage_alpine_firmware"

View File

@@ -0,0 +1,5 @@
# Stage completion marker
STAGE_NAME="alpine_packages"
COMPLETED_AT="2025-09-05T08:50:14+00:00"
COMPLETED_BY="root"
DETAILS="Duration: 4s, Function: stage_alpine_packages"

View File

@@ -0,0 +1,5 @@
# Stage completion marker
STAGE_NAME="boot_tests"
COMPLETED_AT="2025-09-05T09:00:10+00:00"
COMPLETED_BY="root"
DETAILS="Duration: 0s, Function: stage_boot_tests"

View File

@@ -0,0 +1,5 @@
# Stage completion marker
STAGE_NAME="cleanup"
COMPLETED_AT="2025-09-05T09:35:22+00:00"
COMPLETED_BY="root"
DETAILS="Duration: 0s, Function: stage_cleanup"

View File

@@ -0,0 +1,5 @@
# Stage completion marker
STAGE_NAME="components_build"
COMPLETED_AT="2025-09-05T08:52:40+00:00"
COMPLETED_BY="root"
DETAILS="Duration: 143s, Function: stage_components_build"

View File

@@ -0,0 +1,5 @@
# Stage completion marker
STAGE_NAME="components_copy"
COMPLETED_AT="2025-09-05T09:35:21+00:00"
COMPLETED_BY="root"
DETAILS="Duration: 82s, Function: stage_components_copy"

View File

@@ -0,0 +1,5 @@
# Stage completion marker
STAGE_NAME="components_verify"
COMPLETED_AT="2025-09-05T08:52:40+00:00"
COMPLETED_BY="root"
DETAILS="Duration: 0s, Function: stage_components_verify"

View File

@@ -0,0 +1,5 @@
# Stage completion marker
STAGE_NAME="init_script"
COMPLETED_AT="2025-09-05T09:33:59+00:00"
COMPLETED_BY="root"
DETAILS="Duration: 0s, Function: stage_init_script"

View File

@@ -0,0 +1,5 @@
# Stage completion marker
STAGE_NAME="initramfs_create"
COMPLETED_AT="2025-09-05T09:35:47+00:00"
COMPLETED_BY="root"
DETAILS="Duration: 25s, Function: stage_initramfs_create"

View File

@@ -0,0 +1,5 @@
# Stage completion marker
STAGE_NAME="initramfs_test"
COMPLETED_AT="2025-09-05T09:35:48+00:00"
COMPLETED_BY="root"
DETAILS="Duration: 1s, Function: stage_initramfs_test"

View File

@@ -0,0 +1,5 @@
# Stage completion marker
STAGE_NAME="kernel_build"
COMPLETED_AT="2025-09-05T09:36:19+00:00"
COMPLETED_BY="root"
DETAILS="Duration: 31s, Function: stage_kernel_build"

View File

@@ -0,0 +1,5 @@
# Stage completion marker
STAGE_NAME="kernel_modules"
COMPLETED_AT="2025-09-05T08:57:54+00:00"
COMPLETED_BY="root"
DETAILS="Duration: 314s, Function: stage_kernel_modules"

View File

@@ -0,0 +1,5 @@
# Stage completion marker
STAGE_NAME="modules_copy"
COMPLETED_AT="2025-09-05T09:35:22+00:00"
COMPLETED_BY="root"
DETAILS="Duration: 0s, Function: stage_modules_copy"

View File

@@ -0,0 +1,5 @@
# Stage completion marker
STAGE_NAME="modules_setup"
COMPLETED_AT="2025-09-05T09:35:22+00:00"
COMPLETED_BY="root"
DETAILS="Duration: 1s, Function: stage_modules_setup"

View File

@@ -0,0 +1,5 @@
# Stage completion marker
STAGE_NAME="validation"
COMPLETED_AT="2025-09-05T09:35:22+00:00"
COMPLETED_BY="root"
DETAILS="Duration: 0s, Function: stage_validation"

View File

@@ -0,0 +1,5 @@
# Stage completion marker
STAGE_NAME="zinit_setup"
COMPLETED_AT="2025-09-05T09:35:21+00:00"
COMPLETED_BY="root"
DETAILS="Duration: 0s, Function: stage_zinit_setup"

302
build.log
View File

@@ -1,302 +0,0 @@
==================================================
== ZERO-OS ALPINE INITRAMFS BUILDER ==
== ThreeFold Edition ==
==================================================
==================================================
SECTION: Setting up build environment
==================================================
[INFO] 2025-08-31 15:40:53 - Starting section: Setting up build environment
[INFO] 2025-08-31 15:40:53 - Project root: /home/delandtj/Threefold/zosbuilder
[INFO] 2025-08-31 15:40:53 - Alpine version: 3.22
[INFO] 2025-08-31 15:40:53 - Kernel version: 6.12.44
[INFO] 2025-08-31 15:40:53 - Rust target: x86_64-unknown-linux-musl
[INFO] 2025-08-31 15:40:53 - Optimization level: max
[INFO] 2025-08-31 15:40:53 - Skipping directory creation on host (container will create them)
[INFO] 2025-08-31 15:40:53 - All dependencies satisfied
==================================================
SECTION: Verifying Configuration Files
==================================================
[INFO] 2025-08-31 15:40:53 - Starting section: Verifying Configuration Files
[INFO] 2025-08-31 15:40:53 - ✓ Configuration found: packages.list
[INFO] 2025-08-31 15:40:53 - ✓ Configuration found: sources.conf
[INFO] 2025-08-31 15:40:53 - ✓ Configuration found: modules.conf
[INFO] 2025-08-31 15:40:53 - ✓ Configuration found: kernel.config
[INFO] 2025-08-31 15:40:53 - ✓ Configuration found: firmware.conf
[INFO] 2025-08-31 15:40:53 - All configuration files verified
[INFO] 2025-08-31 15:40:53 - Build environment setup complete
[INFO] 2025-08-31 15:40:53 - Starting container build
==================================================
SECTION: Detecting Container Runtime
==================================================
[INFO] 2025-08-31 15:40:53 - Starting section: Detecting Container Runtime
[INFO] 2025-08-31 15:40:53 - Using Podman as container runtime
==================================================
SECTION: Verifying Rootless Container Setup
==================================================
[INFO] 2025-08-31 15:40:53 - Starting section: Verifying Rootless Container Setup
[INFO] 2025-08-31 15:40:53 - Checking podman rootless configuration
[INFO] 2025-08-31 15:40:53 - Executing: podman system info
[INFO] 2025-08-31 15:40:53 - Testing rootless container execution
[INFO] 2025-08-31 15:40:53 - Executing: podman run --rm alpine:3.22 echo Rootless container test successful
[INFO] 2025-08-31 15:40:54 - Rootless container setup verified
==================================================
SECTION: Building Container Image
==================================================
[INFO] 2025-08-31 15:40:54 - Starting section: Building Container Image
[INFO] 2025-08-31 15:40:54 - Building container image: zero-os-builder:latest
[INFO] 2025-08-31 15:40:54 - Executing: podman build -t zero-os-builder:latest -f /home/delandtj/Threefold/zosbuilder/Dockerfile /home/delandtj/Threefold/zosbuilder
[INFO] 2025-08-31 15:40:54 - Container image built successfully: zero-os-builder:latest
==================================================
SECTION: Running Build in Container
==================================================
[INFO] 2025-08-31 15:40:54 - Starting section: Running Build in Container
[INFO] 2025-08-31 15:40:54 - Executing: chmod +x /home/delandtj/Threefold/zosbuilder/./scripts/build.sh
[INFO] 2025-08-31 15:40:54 - Executing build command in container: ./scripts/build.sh --skip-tests
[INFO] 2025-08-31 15:40:54 - Executing: podman run --rm --privileged -v /home/delandtj/Threefold/zosbuilder:/workspace -w /workspace zero-os-builder:latest ./scripts/build.sh --skip-tests
[ERROR] 2025-08-31 15:41:01 - Command failed: podman run --rm --privileged -v /home/delandtj/Threefold/zosbuilder:/workspace -w /workspace zero-os-builder:latest ./scripts/build.sh --skip-tests
[ERROR] 2025-08-31 15:41:01 - Output:
==================================================
== ZERO-OS ALPINE INITRAMFS BUILDER ==
== ThreeFold Edition ==
==================================================
==================================================
SECTION: Setting up build environment
==================================================
[INFO] 2025-08-31 13:40:54 - Starting section: Setting up build environment
[INFO] 2025-08-31 13:40:54 - Project root: /workspace
[INFO] 2025-08-31 13:40:54 - Alpine version: 3.22
[INFO] 2025-08-31 13:40:54 - Kernel version: 6.12.44
[INFO] 2025-08-31 13:40:54 - Rust target: x86_64-unknown-linux-musl
[INFO] 2025-08-31 13:40:54 - Optimization level: max
[INFO] 2025-08-31 13:40:54 - Executing: mkdir -p /workspace/initramfs
[INFO] 2025-08-31 13:40:54 - Executing: mkdir -p /workspace/components
[INFO] 2025-08-31 13:40:54 - Executing: mkdir -p /workspace/kernel
[INFO] 2025-08-31 13:40:54 - Executing: mkdir -p /workspace/dist
[INFO] 2025-08-31 13:40:54 - All dependencies satisfied
==================================================
SECTION: Verifying Configuration Files
==================================================
[INFO] 2025-08-31 13:40:54 - Starting section: Verifying Configuration Files
[INFO] 2025-08-31 13:40:54 - ✓ Configuration found: packages.list
[INFO] 2025-08-31 13:40:54 - ✓ Configuration found: sources.conf
[INFO] 2025-08-31 13:40:54 - ✓ Configuration found: modules.conf
[INFO] 2025-08-31 13:40:54 - ✓ Configuration found: kernel.config
[INFO] 2025-08-31 13:40:54 - ✓ Configuration found: firmware.conf
[INFO] 2025-08-31 13:40:54 - All configuration files verified
[INFO] 2025-08-31 13:40:54 - Build environment setup complete
[INFO] 2025-08-31 13:40:54 - Already in container, proceeding with build
==================================================
SECTION: Starting Zero OS Alpine Initramfs Build
==================================================
[INFO] 2025-08-31 13:40:54 - Starting section: Starting Zero OS Alpine Initramfs Build
==================================================
SECTION: Extracting Alpine Miniroot
==================================================
[INFO] 2025-08-31 13:40:54 - Starting section: Extracting Alpine Miniroot
[INFO] 2025-08-31 13:40:54 - Alpine version: 3.22
[INFO] 2025-08-31 13:40:54 - Architecture: x86_64
[INFO] 2025-08-31 13:40:54 - Target directory: /workspace/initramfs
[INFO] 2025-08-31 13:40:54 - Cleaning existing target directory
[INFO] 2025-08-31 13:40:54 - Executing: mkdir -p /workspace/initramfs
[INFO] 2025-08-31 13:40:54 - Downloading Alpine miniroot from: https://dl-cdn.alpinelinux.org/alpine/v3.22/releases/x86_64/alpine-minirootfs-3.22.0-x86_64.tar.gz
[INFO] 2025-08-31 13:40:54 - Executing: wget --progress=dot:giga -O /tmp/alpine-miniroot-3.22-x86_64.tar.gz https://dl-cdn.alpinelinux.org/alpine/v3.22/releases/x86_64/alpine-minirootfs-3.22.0-x86_64.tar.gz
[INFO] 2025-08-31 13:40:54 - Downloaded miniroot size: 3.5M
[INFO] 2025-08-31 13:40:54 - Extracting miniroot to: /workspace/initramfs
[INFO] 2025-08-31 13:40:54 - Executing: tar -xzf /tmp/alpine-miniroot-3.22-x86_64.tar.gz -C /workspace/initramfs
[INFO] 2025-08-31 13:40:54 - Executing: rm /tmp/alpine-miniroot-3.22-x86_64.tar.gz
[INFO] 2025-08-31 13:40:54 - Extracted Alpine release: 3.22.0
[INFO] 2025-08-31 13:40:54 - Alpine miniroot extraction complete
==================================================
SECTION: Configuring Alpine Repositories
==================================================
[INFO] 2025-08-31 13:40:54 - Starting section: Configuring Alpine Repositories
[INFO] 2025-08-31 13:40:54 - Configured Alpine repositories for version 3.22
==================================================
SECTION: Configuring Alpine System Settings
==================================================
[INFO] 2025-08-31 13:40:54 - Starting section: Configuring Alpine System Settings
[INFO] 2025-08-31 13:40:54 - chsh not available in minimal Alpine, skipping shell change
[INFO] 2025-08-31 13:40:54 - Root shell defaults to /bin/sh in Alpine
[INFO] 2025-08-31 13:40:54 - Alpine system configuration complete
==================================================
SECTION: Installing Alpine Packages
==================================================
[INFO] 2025-08-31 13:40:54 - Starting section: Installing Alpine Packages
==================================================
SECTION: Setting Up Alpine Chroot Environment
==================================================
[INFO] 2025-08-31 13:40:54 - Starting section: Setting Up Alpine Chroot Environment
[INFO] 2025-08-31 13:40:54 - Executing: mkdir -p /workspace/initramfs/proc
[INFO] 2025-08-31 13:40:54 - Executing: mkdir -p /workspace/initramfs/sys
[INFO] 2025-08-31 13:40:54 - Executing: mkdir -p /workspace/initramfs/dev
[INFO] 2025-08-31 13:40:54 - Executing: mkdir -p /workspace/initramfs/dev/pts
[INFO] 2025-08-31 13:40:54 - Executing: mkdir -p /workspace/initramfs/tmp
[INFO] 2025-08-31 13:40:54 - Executing: mkdir -p /workspace/initramfs/run
[INFO] 2025-08-31 13:40:54 - Mounting essential filesystems in chroot
[INFO] 2025-08-31 13:40:54 - Executing: mount --bind /proc /workspace/initramfs/proc
[INFO] 2025-08-31 13:40:54 - Executing: mount --bind /sys /workspace/initramfs/sys
[INFO] 2025-08-31 13:40:54 - Executing: mount --bind /dev /workspace/initramfs/dev
[INFO] 2025-08-31 13:40:54 - Executing: mount --bind /dev/pts /workspace/initramfs/dev/pts
[INFO] 2025-08-31 13:40:54 - Executing: cp -r /etc/resolv.conf /workspace/initramfs/etc/resolv.conf
[INFO] 2025-08-31 13:40:54 - Chroot environment setup complete
[INFO] 2025-08-31 13:40:54 - Updating package repositories
[INFO] 2025-08-31 13:40:54 - Executing: chroot /workspace/initramfs apk update
[INFO] 2025-08-31 13:40:55 - Installing 20 packages:
[INFO] 2025-08-31 13:40:55 - - alpine-baselayout
[INFO] 2025-08-31 13:40:55 - - busybox
[INFO] 2025-08-31 13:40:55 - - musl
[INFO] 2025-08-31 13:40:55 - - eudev
[INFO] 2025-08-31 13:40:55 - - eudev-hwids
[INFO] 2025-08-31 13:40:55 - - eudev-libs
[INFO] 2025-08-31 13:40:55 - - eudev-netifnames
[INFO] 2025-08-31 13:40:55 - - kmod
[INFO] 2025-08-31 13:40:55 - - util-linux
[INFO] 2025-08-31 13:40:55 - - iproute2
[INFO] 2025-08-31 13:40:55 - - ethtool
[INFO] 2025-08-31 13:40:55 - - btrfs-progs
[INFO] 2025-08-31 13:40:55 - - dosfstools
[INFO] 2025-08-31 13:40:55 - - zlib
[INFO] 2025-08-31 13:40:55 - - dhcpcd
[INFO] 2025-08-31 13:40:55 - - tcpdump
[INFO] 2025-08-31 13:40:55 - - bmon
[INFO] 2025-08-31 13:40:55 - - haveged
[INFO] 2025-08-31 13:40:55 - - openssh-server
[INFO] 2025-08-31 13:40:55 - - zellij
[INFO] 2025-08-31 13:40:55 - Installing packages with apk
[INFO] 2025-08-31 13:40:55 - Executing: chroot /workspace/initramfs apk add --no-cache --no-scripts --clean-protected alpine-baselayout busybox musl eudev eudev-hwids eudev-libs eudev-netifnames kmod util-linux iproute2 ethtool btrfs-progs dosfstools zlib dhcpcd tcpdump bmon haveged openssh-server zellij
==================================================
SECTION: Cleaning Up Alpine Chroot Environment
==================================================
[INFO] 2025-08-31 13:40:58 - Starting section: Cleaning Up Alpine Chroot Environment
[INFO] 2025-08-31 13:40:58 - Unmounting: /workspace/initramfs/dev/pts
[INFO] 2025-08-31 13:40:58 - Executing: umount /workspace/initramfs/dev/pts
[INFO] 2025-08-31 13:40:58 - Unmounting: /workspace/initramfs/dev
[INFO] 2025-08-31 13:40:58 - Executing: umount /workspace/initramfs/dev
[INFO] 2025-08-31 13:40:58 - Unmounting: /workspace/initramfs/sys
[INFO] 2025-08-31 13:40:58 - Executing: umount /workspace/initramfs/sys
[INFO] 2025-08-31 13:40:58 - Unmounting: /workspace/initramfs/proc
[INFO] 2025-08-31 13:40:58 - Executing: umount /workspace/initramfs/proc
[INFO] 2025-08-31 13:40:58 - Chroot cleanup complete
[INFO] 2025-08-31 13:40:58 - Package installation complete
==================================================
SECTION: Installing Alpine Firmware Packages
==================================================
[INFO] 2025-08-31 13:40:58 - Starting section: Installing Alpine Firmware Packages
==================================================
SECTION: Setting Up Alpine Chroot Environment
==================================================
[INFO] 2025-08-31 13:40:58 - Starting section: Setting Up Alpine Chroot Environment
[INFO] 2025-08-31 13:40:58 - Executing: mkdir -p /workspace/initramfs/proc
[INFO] 2025-08-31 13:40:58 - Executing: mkdir -p /workspace/initramfs/sys
[INFO] 2025-08-31 13:40:58 - Executing: mkdir -p /workspace/initramfs/dev
[INFO] 2025-08-31 13:40:58 - Executing: mkdir -p /workspace/initramfs/dev/pts
[INFO] 2025-08-31 13:40:58 - Executing: mkdir -p /workspace/initramfs/tmp
[INFO] 2025-08-31 13:40:58 - Executing: mkdir -p /workspace/initramfs/run
[INFO] 2025-08-31 13:40:58 - Mounting essential filesystems in chroot
[INFO] 2025-08-31 13:40:58 - Executing: mount --bind /proc /workspace/initramfs/proc
[INFO] 2025-08-31 13:40:58 - Executing: mount --bind /sys /workspace/initramfs/sys
[INFO] 2025-08-31 13:40:58 - Executing: mount --bind /dev /workspace/initramfs/dev
[INFO] 2025-08-31 13:40:58 - Executing: mount --bind /dev/pts /workspace/initramfs/dev/pts
[INFO] 2025-08-31 13:40:58 - Executing: cp -r /etc/resolv.conf /workspace/initramfs/etc/resolv.conf
[INFO] 2025-08-31 13:40:58 - Chroot environment setup complete
[INFO] 2025-08-31 13:40:58 - - linux-firmware-bnx2: Broadcom NetXtreme firmware
[INFO] 2025-08-31 13:40:58 - - linux-firmware-e100: Intel PRO/100 firmware
[INFO] 2025-08-31 13:40:58 - - linux-firmware-intel: Intel network and WiFi firmware (includes e1000e, igb, ixgbe, i40e, ice)
[INFO] 2025-08-31 13:40:58 - - linux-firmware-realtek: Realtek network firmware (r8169, etc.)
[INFO] 2025-08-31 13:40:58 - - linux-firmware-mellanox: Mellanox network firmware
[INFO] 2025-08-31 13:40:58 - - linux-firmware-qlogic: QLogic network firmware
[INFO] 2025-08-31 13:40:58 - Installing 6 firmware packages
[INFO] 2025-08-31 13:40:58 - Executing: chroot /workspace/initramfs apk add --no-cache linux-firmware-bnx2 linux-firmware-e100 linux-firmware-intel linux-firmware-realtek linux-firmware-mellanox linux-firmware-qlogic
[INFO] 2025-08-31 13:41:00 - Checking installed firmware files:
[INFO] 2025-08-31 13:41:00 - Firmware files: 196 (115M)
==================================================
SECTION: Cleaning Up Alpine Chroot Environment
==================================================
[INFO] 2025-08-31 13:41:00 - Starting section: Cleaning Up Alpine Chroot Environment
[INFO] 2025-08-31 13:41:00 - Unmounting: /workspace/initramfs/dev/pts
[INFO] 2025-08-31 13:41:00 - Executing: umount /workspace/initramfs/dev/pts
[INFO] 2025-08-31 13:41:00 - Unmounting: /workspace/initramfs/dev
[INFO] 2025-08-31 13:41:00 - Executing: umount /workspace/initramfs/dev
[INFO] 2025-08-31 13:41:00 - Unmounting: /workspace/initramfs/sys
[INFO] 2025-08-31 13:41:00 - Executing: umount /workspace/initramfs/sys
[INFO] 2025-08-31 13:41:00 - Unmounting: /workspace/initramfs/proc
[INFO] 2025-08-31 13:41:00 - Executing: umount /workspace/initramfs/proc
[INFO] 2025-08-31 13:41:00 - Chroot cleanup complete
[INFO] 2025-08-31 13:41:00 - Firmware installation complete: 196 files
==================================================
SECTION: Parsing Sources Configuration
==================================================
[INFO] 2025-08-31 13:41:00 - Starting section: Parsing Sources Configuration
[INFO] 2025-08-31 13:41:00 - Executing: mkdir -p /workspace/components
[INFO] 2025-08-31 13:41:00 - Processing components from: /workspace/config/sources.conf
[INFO] 2025-08-31 13:41:00 - Components directory: /workspace/components
[INFO] 2025-08-31 13:41:00 - Install directory: /workspace/initramfs
[INFO] 2025-08-31 13:41:00 - Building ThreeFold components (hardcoded for reliability)
[INFO] 2025-08-31 13:41:00 - Processing component 1: zinit (git)
==================================================
SECTION: Downloading Git Component: zinit
==================================================
[INFO] 2025-08-31 13:41:00 - Starting section: Downloading Git Component: zinit
[INFO] 2025-08-31 13:41:00 - Repository: https://github.com/threefoldtech/zinit
[INFO] 2025-08-31 13:41:00 - Version/Branch: master
[INFO] 2025-08-31 13:41:00 - Target directory: /workspace/components/zinit
[INFO] 2025-08-31 13:41:00 - Cloning zinit from https://github.com/threefoldtech/zinit
[INFO] 2025-08-31 13:41:00 - Executing: git clone --depth 1 --branch master https://github.com/threefoldtech/zinit /workspace/components/zinit
[INFO] 2025-08-31 13:41:00 - Executing: cd /workspace/components/zinit
[INFO] 2025-08-31 13:41:00 - Current commit: 1852135945d3db78a8a15c9df60adbccf275220e
[INFO] 2025-08-31 13:41:00 - Git component download complete: zinit
==================================================
SECTION: Building Component: zinit
==================================================
[INFO] 2025-08-31 13:41:00 - Starting section: Building Component: zinit
[INFO] 2025-08-31 13:41:00 - Executing: cd /workspace/components/zinit
[INFO] 2025-08-31 13:41:00 - Build function: build_zinit
[INFO] 2025-08-31 13:41:00 - Working directory: /workspace
[INFO] 2025-08-31 13:41:00 - Executing build function: build_zinit
==================================================
SECTION: Building zinit with musl target
==================================================
[INFO] 2025-08-31 13:41:00 - Starting section: Building zinit with musl target
==================================================
SECTION: Setting Up Rust Environment
==================================================
[INFO] 2025-08-31 13:41:00 - Starting section: Setting Up Rust Environment
[INFO] 2025-08-31 13:41:00 - Using system Rust (Alpine) with musl
[INFO] 2025-08-31 13:41:00 - Rust environment configured for musl builds
[INFO] 2025-08-31 13:41:00 - RUST_TARGET: x86_64-unknown-linux-musl
[INFO] 2025-08-31 13:41:00 - RUSTFLAGS: -C target-feature=+crt-static -C linker=musl-gcc
[INFO] 2025-08-31 13:41:00 - CC: musl-gcc
[INFO] 2025-08-31 13:41:00 - Building zinit from: /workspace/components/zinit
[INFO] 2025-08-31 13:41:00 - Executing: cd /workspace/components/zinit
[INFO] 2025-08-31 13:41:00 - Current directory: /workspace
[INFO] 2025-08-31 13:41:00 - Executing: cargo build --release --target x86_64-unknown-linux-musl
[ERROR] 2025-08-31 13:41:00 - Command failed: cargo build --release --target x86_64-unknown-linux-musl
[ERROR] 2025-08-31 13:41:00 - Output: error: could not find `Cargo.toml` in `/workspace` or any parent directory
[INFO] 2025-08-31 13:41:00 - Build process exiting with code: 1
[INFO] 2025-08-31 15:41:01 - Build process exiting with code: 1

View File

@@ -7,7 +7,6 @@ linux-firmware-bnx2:Broadcom NetXtreme firmware
linux-firmware-e100:Intel PRO/100 firmware
linux-firmware-intel:Intel network and WiFi firmware (includes e1000e, igb, ixgbe, i40e, ice)
linux-firmware-realtek:Realtek network firmware (r8169, etc.)
linux-firmware-mellanox:Mellanox network firmware
linux-firmware-qlogic:QLogic network firmware
# Storage firmware (if needed)

View File

@@ -1,20 +1,31 @@
#!/bin/sh
#!/bin/sh -x
# Alpine-based Zero-OS Init Script
# Maintains identical flow to original busybox version
if [ ! -c /dev/console ]; then
mknod /dev/console c 5 1
fi
# Redirect output to console explicitly
exec > /dev/console 2>&1
echo ""
echo "============================================"
echo "== ZERO-OS ALPINE INITRAMFS =="
echo "============================================"
echo "[+] creating ram filesystem"
target="/mnt/root"
mkdir -p $target
mount -t proc proc /proc
mount -t sysfs sysfs /sys
mount -t tmpfs tmpfs /mnt/root -o size=1536M
mount -t devtmpfs devtmpfs /dev
mkdir -p /dev/pts
mount -t devpts devpts /dev/pts
echo "[+] building ram filesystem"
target="/mnt/root"
# Copy Alpine filesystem to tmpfs (same as original)
echo " copying /bin..."
@@ -102,3 +113,5 @@ fi
echo "[+] switching root"
echo " exec switch_root /mnt/root /sbin/zinit init"
exec switch_root /mnt/root /sbin/zinit init
##

View File

@@ -1,4 +1,4 @@
#!/bin/sh
#!/bin/sh -x
# Alpine-based Zero-OS Init Script
# Maintains identical flow to original busybox version
@@ -8,6 +8,8 @@ echo "== ZERO-OS ALPINE INITRAMFS =="
echo "============================================"
echo "[+] creating ram filesystem"
target="/mnt/root"
mkdir -p $target
mount -t proc proc /proc
mount -t sysfs sysfs /sys
mount -t tmpfs tmpfs /mnt/root -o size=1536M
@@ -101,3 +103,5 @@ fi
echo "[+] switching root"
echo " exec switch_root /mnt/root /sbin/zinit init"
exec switch_root /mnt/root /sbin/zinit init
##

View File

@@ -20,7 +20,6 @@ libc-utils
linux-firmware-bnx2
linux-firmware-e100
linux-firmware-intel
linux-firmware-mellanox
linux-firmware-qlogic
linux-firmware-realtek
musl

View File

@@ -1 +0,0 @@
lc_ini_bundle_2010_1006.bin.zst

View File

@@ -1 +0,0 @@
lc_ini_bundle_2010_1006.bin.zst

View File

@@ -1 +0,0 @@
lc_ini_bundle_2010_1006.bin.zst

219
scripts/dev-container.sh Executable file
View File

@@ -0,0 +1,219 @@
#!/bin/bash
# Efficient development container workflow - persistent container for debugging
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROJECT_ROOT="$(dirname "$SCRIPT_DIR")"
# Container configuration
CONTAINER_NAME="zero-os-dev"
BUILDER_IMAGE="zero-os-builder:latest"
# Source common functions
source "${SCRIPT_DIR}/lib/common.sh"
function show_usage() {
cat << EOF
Zero OS Development Container Manager
Usage: $0 [COMMAND]
Commands:
start Start persistent development container
stop Stop development container
shell Enter development container shell
build Run build in persistent container
clean Clean and restart container
status Show container status
logs Show container logs
Environment Variables:
DEBUG Enable debug output (default: 1 for dev)
Examples:
$0 start # Start persistent container
$0 shell # Enter container for debugging
$0 build # Run build in persistent container
EOF
}
function dev_container_start() {
section_header "Starting Development Container"
# Check if container already exists
if podman container exists "$CONTAINER_NAME" 2>/dev/null; then
if podman container inspect "$CONTAINER_NAME" --format '{{.State.Status}}' | grep -q "running"; then
log_info "Development container already running"
return 0
else
log_info "Starting existing development container"
safe_execute podman start "$CONTAINER_NAME"
return 0
fi
fi
log_info "Creating new development container: ${CONTAINER_NAME}"
# Create persistent container with all necessary mounts and environment
safe_execute podman run -d \
--name "$CONTAINER_NAME" \
--privileged \
-v "${PROJECT_ROOT}:/workspace" \
-w /workspace \
-e DEBUG=1 \
-e ALPINE_VERSION=3.22 \
-e KERNEL_VERSION=6.12.44 \
-e RUST_TARGET=x86_64-unknown-linux-musl \
-e OPTIMIZATION_LEVEL=max \
"$BUILDER_IMAGE" \
sleep infinity
log_info "Development container started successfully"
log_info "Container name: ${CONTAINER_NAME}"
log_info "Access with: $0 shell"
}
function dev_container_stop() {
section_header "Stopping Development Container"
if podman container exists "$CONTAINER_NAME" 2>/dev/null; then
log_info "Stopping development container: ${CONTAINER_NAME}"
safe_execute podman stop "$CONTAINER_NAME"
log_info "Development container stopped"
else
log_info "Development container not found"
fi
}
function dev_container_shell() {
section_header "Entering Development Container Shell"
if ! podman container exists "$CONTAINER_NAME" 2>/dev/null; then
log_info "Development container not found, starting..."
dev_container_start
fi
if ! podman container inspect "$CONTAINER_NAME" --format '{{.State.Status}}' | grep -q "running"; then
log_info "Starting stopped development container"
safe_execute podman start "$CONTAINER_NAME"
fi
log_info "Entering container shell (exit with 'exit' or Ctrl+D)"
# Use direct execution for interactive shell (don't use safe_execute)
exec podman exec -it "$CONTAINER_NAME" /bin/bash
}
function dev_container_build() {
section_header "Running Build in Development Container"
if ! podman container exists "$CONTAINER_NAME" 2>/dev/null; then
log_info "Development container not found, starting..."
dev_container_start
fi
if ! podman container inspect "$CONTAINER_NAME" --format '{{.State.Status}}' | grep -q "running"; then
log_info "Starting stopped development container"
safe_execute podman start "$CONTAINER_NAME"
fi
log_info "Running build in persistent container (real-time output)"
log_info "Command: podman exec $CONTAINER_NAME ./scripts/build.sh $*"
# Use direct execution to show real-time output (bypass safe_execute)
podman exec "$CONTAINER_NAME" ./scripts/build.sh "$@"
local exit_code=$?
if [[ $exit_code -eq 0 ]]; then
log_info "Build completed successfully in container"
else
log_error "Build failed in container with exit code: $exit_code"
fi
return $exit_code
}
function dev_container_clean() {
section_header "Cleaning Development Container"
if podman container exists "$CONTAINER_NAME" 2>/dev/null; then
log_info "Removing existing development container"
safe_execute podman rm -f "$CONTAINER_NAME"
fi
log_info "Starting fresh development container"
dev_container_start
}
function dev_container_status() {
section_header "Development Container Status"
if podman container exists "$CONTAINER_NAME" 2>/dev/null; then
local status=$(podman container inspect "$CONTAINER_NAME" --format '{{.State.Status}}')
local created=$(podman container inspect "$CONTAINER_NAME" --format '{{.Created}}')
log_info "Container: ${CONTAINER_NAME}"
log_info "Status: ${status}"
log_info "Created: ${created}"
if [[ "$status" == "running" ]]; then
log_info "✓ Ready for development"
else
log_info "⚠ Container stopped - use '$0 start' to start"
fi
else
log_info "Development container not found"
log_info "Use '$0 start' to create"
fi
}
function dev_container_logs() {
section_header "Development Container Logs"
if podman container exists "$CONTAINER_NAME" 2>/dev/null; then
safe_execute podman logs "$CONTAINER_NAME"
else
log_error "Development container not found"
return 1
fi
}
# Main function
function main() {
local command="${1:-help}"
case "$command" in
start)
dev_container_start
;;
stop)
dev_container_stop
;;
shell)
dev_container_shell
;;
build)
shift
dev_container_build "$@"
;;
clean)
dev_container_clean
;;
status)
dev_container_status
;;
logs)
dev_container_logs
;;
help|--help|-h)
show_usage
;;
*)
log_error "Unknown command: $command"
show_usage
exit 1
;;
esac
}
main "$@"

View File

@@ -374,17 +374,18 @@ function alpine_install_firmware() {
local initramfs_dir="$1"
local firmware_conf="$2"
section_header "Installing Required Firmware Packages"
section_header "Installing Required Firmware Packages (Selective)"
# Use smart firmware selection from module analysis if available
local firmware_packages=()
if [[ -n "${REQUIRED_FIRMWARE_PACKAGES:-}" ]]; then
log_info "Using intelligent firmware selection based on required modules"
log_info "Using intelligent firmware selection based on COPIED modules only"
read -ra firmware_packages <<< "$REQUIRED_FIRMWARE_PACKAGES"
log_info "Required firmware packages (${#firmware_packages[@]}):"
for package in "${firmware_packages[@]}"; do
log_info " Required by modules: ${package}"
log_info " ${package}"
done
else
log_info "Falling back to firmware configuration file"

View File

@@ -205,20 +205,41 @@ function docker_commit_builder() {
log_info "Container committed successfully: ${new_tag}"
}
# Clean up container images
# Clean up container images and running containers
function docker_cleanup() {
local keep_builder="${1:-false}"
section_header "Cleaning Up Container Images"
section_header "Cleaning Up Containers and Images"
if [[ "$keep_builder" != "true" ]]; then
log_info "Cleaning up builder containers and images"
# Stop and remove any containers using the builder image
local containers_using_image=$(${CONTAINER_RUNTIME} ps -a --filter "ancestor=${BUILDER_IMAGE}" --format "{{.ID}}" 2>/dev/null || true)
if [[ -n "$containers_using_image" ]]; then
log_info "Stopping containers using builder image"
for container_id in $containers_using_image; do
log_info "Stopping container: $container_id"
${CONTAINER_RUNTIME} stop "$container_id" 2>/dev/null || true
${CONTAINER_RUNTIME} rm "$container_id" 2>/dev/null || true
done
fi
# Stop and remove development container if it exists
local dev_container="zero-os-dev"
if ${CONTAINER_RUNTIME} container exists "$dev_container" 2>/dev/null; then
log_info "Removing development container: $dev_container"
${CONTAINER_RUNTIME} rm -f "$dev_container" 2>/dev/null || true
fi
# Now remove the images
log_info "Removing builder images"
safe_execute ${CONTAINER_RUNTIME} rmi "${BUILDER_IMAGE}" || true
safe_execute ${CONTAINER_RUNTIME} rmi "${BUILDER_IMAGE}-cached" || true
${CONTAINER_RUNTIME} rmi "${BUILDER_IMAGE}" 2>/dev/null || log_warn "Could not remove ${BUILDER_IMAGE} (may not exist)"
${CONTAINER_RUNTIME} rmi "${BUILDER_IMAGE}-cached" 2>/dev/null || log_warn "Could not remove ${BUILDER_IMAGE}-cached (may not exist)"
fi
log_info "Pruning unused containers and images"
safe_execute ${CONTAINER_RUNTIME} system prune -f
${CONTAINER_RUNTIME} system prune -f 2>/dev/null || log_warn "Container prune failed"
log_info "Container cleanup complete"
}

View File

@@ -27,18 +27,9 @@ function initramfs_setup_zinit() {
return 1
fi
# Copy config/init as /sbin/init (not a symlink to zinit)
log_info "Installing config/init as /sbin/init"
safe_execute rm -f "${initramfs_dir}/sbin/init"
local config_init="${PROJECT_ROOT}/config/init"
if [[ -f "$config_init" ]]; then
safe_execute cp "$config_init" "${initramfs_dir}/sbin/init"
safe_execute chmod 755 "${initramfs_dir}/sbin/init"
log_info "✓ Installed config/init as /sbin/init"
else
log_error "config/init not found: $config_init"
return 1
fi
# Note: /sbin/init is not needed - config/init calls "switch_root /mnt/root /sbin/zinit init"
# So after switch_root, /sbin/zinit is the init system (not /sbin/init)
log_info "zinit will be called directly via '/sbin/zinit init' after switch_root"
# Copy zinit configuration (all YAML and scripts)
log_info "Installing zinit configuration"
@@ -117,29 +108,16 @@ function initramfs_copy_components() {
local copied_count=0
local missing_count=0
# Copy zinit to /sbin
# Copy zinit to /sbin (NO stripping/UPX - critical init system)
local zinit_binary="${components_dir}/zinit/target/x86_64-unknown-linux-musl/release/zinit"
if [[ -f "$zinit_binary" ]]; then
safe_mkdir "${initramfs_dir}/sbin"
safe_execute cp "$zinit_binary" "${initramfs_dir}/sbin/zinit"
safe_execute chmod +x "${initramfs_dir}/sbin/zinit"
# Strip and UPX compress zinit
local original_size=$(get_file_size "${initramfs_dir}/sbin/zinit")
if strip "${initramfs_dir}/sbin/zinit" 2>/dev/null || true; then
log_debug "Stripped zinit"
else
log_debug "zinit already stripped or strip failed"
fi
if command_exists "upx" && upx --best --force "${initramfs_dir}/sbin/zinit" >/dev/null 2>&1 || true; then
log_debug "UPX compressed zinit"
else
log_debug "UPX failed or already compressed"
fi
local final_size=$(get_file_size "${initramfs_dir}/sbin/zinit")
log_info "✓ Copied zinit ${original_size}${final_size} to /sbin/zinit"
# Keep zinit unmodified to prevent segfaults after switch_root
local size=$(get_file_size "${initramfs_dir}/sbin/zinit")
log_info "✓ Copied zinit (${size}) to /sbin/zinit (no optimization - critical binary)"
((copied_count++))
else
log_error "✗ zinit binary not found: ${zinit_binary}"
@@ -669,7 +647,6 @@ function initramfs_validate() {
# Check essential files and directories
local essential_items=(
"init"
"sbin/init"
"sbin/zinit"
"bin/busybox"
"etc/zinit"
@@ -699,12 +676,11 @@ function initramfs_validate() {
((errors++))
fi
# Check that /sbin/init is a script
if [[ -f "${initramfs_dir}/sbin/init" && -x "${initramfs_dir}/sbin/init" ]]; then
log_info " /sbin/init script found"
# Check that /sbin/init does NOT exist (zinit called directly)
if [[ -e "${initramfs_dir}/sbin/init" ]]; then
log_warn " /sbin/init exists but should not (zinit called directly)"
else
log_error " /sbin/init is missing or not executable"
((errors++))
log_info " /sbin/init correctly absent (zinit called directly)"
fi
# Check zinit configuration

View File

@@ -10,6 +10,20 @@ KERNEL_VERSION="${KERNEL_VERSION:-6.12.44}"
KERNEL_SOURCE_URL="${KERNEL_SOURCE_URL:-https://cdn.kernel.org/pub/linux/kernel}"
KERNEL_CONFIG_SOURCE="${KERNEL_CONFIG_SOURCE:-${PROJECT_ROOT}/configs/kernel-config-generic}"
# Get actual kernel version including LOCALVERSION from kernel config
function kernel_get_full_version() {
local base_version="${1:-$KERNEL_VERSION}"
local config_file="${2:-${PROJECT_ROOT}/config/kernel.config}"
# Extract LOCALVERSION from kernel config
local localversion=""
if [[ -f "$config_file" ]] && grep -q "^CONFIG_LOCALVERSION=" "$config_file"; then
localversion=$(grep "^CONFIG_LOCALVERSION=" "$config_file" | cut -d'"' -f2)
fi
echo "${base_version}${localversion}"
}
# Download kernel source
function kernel_download_source() {
local kernel_dir="$1"
@@ -176,10 +190,6 @@ function kernel_build_with_initramfs() {
local source_dir="${kernel_dir}/current"
safe_execute cd "$source_dir"
# Clean previous build
log_info "Cleaning previous kernel build"
safe_execute make clean
# Determine number of cores for parallel build
local cores=$(nproc)
local jobs=$((cores > 1 ? cores - 1 : 1)) # Leave one core free
@@ -215,13 +225,13 @@ function kernel_build_with_initramfs() {
fi
}
# Build modules for initramfs
# Build and install modules in container for proper dependency resolution
function kernel_build_modules() {
local kernel_dir="$1"
local modules_install_dir="$2"
local version="${3:-$KERNEL_VERSION}"
local initramfs_dir="$2"
local base_version="${3:-$KERNEL_VERSION}"
section_header "Building Kernel Modules"
section_header "Building Kernel Modules in Container"
local source_dir="${kernel_dir}/current"
@@ -232,6 +242,11 @@ function kernel_build_modules() {
safe_execute cd "$source_dir"
# Get the full kernel version including LOCALVERSION
local full_version=$(kernel_get_full_version "$base_version" "${PROJECT_ROOT}/config/kernel.config")
log_info "Base kernel version: ${base_version}"
log_info "Full kernel version: ${full_version}"
# Build modules
local cores=$(nproc)
local jobs=$((cores > 1 ? cores - 1 : 1))
@@ -239,19 +254,30 @@ function kernel_build_modules() {
log_info "Building kernel modules with ${jobs} parallel jobs"
safe_execute make -j${jobs} modules
# Install modules to staging area
log_info "Installing modules to: ${modules_install_dir}"
safe_mkdir "$modules_install_dir"
safe_execute make modules_install INSTALL_MOD_PATH="$modules_install_dir"
# Install modules in container for proper modinfo/depmod access
local container_modules_dir="/lib/modules"
log_info "Installing modules in container: ${container_modules_dir}"
safe_execute make modules_install INSTALL_MOD_PATH=/
# Run depmod to create module dependencies
local modules_dir="${modules_install_dir}/lib/modules/${version}"
if [[ -d "$modules_dir" ]]; then
log_info "Running depmod for module dependencies"
safe_execute depmod -a -b "$modules_install_dir" "$version"
# Run depmod in container context for proper dependency resolution
log_info "Running depmod in container for ${full_version}"
safe_execute depmod -a "$full_version"
# Verify module installation in container
if [[ -d "/lib/modules/${full_version}" ]]; then
local module_count=$(find "/lib/modules/${full_version}" -name "*.ko*" | wc -l)
log_info "Container modules installed: ${module_count} modules in /lib/modules/${full_version}"
# Export the container modules path for dependency resolution
export CONTAINER_MODULES_PATH="/lib/modules/${full_version}"
export KERNEL_FULL_VERSION="$full_version"
log_info "Module dependency resolution will use: ${CONTAINER_MODULES_PATH}"
else
log_error "Module installation in container failed"
return 1
fi
log_info "Kernel modules build complete"
log_info "Kernel modules build and container installation complete"
}
# Clean kernel build artifacts
@@ -278,4 +304,4 @@ function kernel_cleanup() {
# Export functions
export -f kernel_download_source kernel_apply_config kernel_modify_config_for_initramfs
export -f kernel_build_with_initramfs kernel_build_modules kernel_cleanup
export -f kernel_build_with_initramfs kernel_build_modules kernel_cleanup kernel_get_full_version

Some files were not shown because too many files have changed in this diff Show More