Compare commits

..

4 Commits

Author SHA1 Message Date
e57a6fb457 ci-test: restore tempfile as dev-dep for tests
Some checks failed
CI / build (push) Has been cancelled
CI / build (pull_request) Has been cancelled
2026-01-11 01:29:52 +01:00
42ef14e17a ci-test: make deb tests sequential 2026-01-11 01:29:52 +01:00
d89606ded2 ci-test: ensure unique directory name for parallel testing 2026-01-11 01:29:52 +01:00
acb8a6657a ci-test: test running only cross test 2026-01-11 01:29:52 +01:00
22 changed files with 385 additions and 1672 deletions

View File

@@ -23,7 +23,7 @@ jobs:
- uses: actions/checkout@v6 - uses: actions/checkout@v6
- uses: dtolnay/rust-toolchain@stable - uses: dtolnay/rust-toolchain@stable
with: with:
components: rustfmt, clippy components: rustfmt
- name: Check format - name: Check format
run: cargo fmt --check run: cargo fmt --check
- name: Install build dependencies - name: Install build dependencies
@@ -32,12 +32,6 @@ jobs:
sudo apt-get install -y pkg-config libssl-dev libgpg-error-dev libgpgme-dev sudo apt-get install -y pkg-config libssl-dev libgpg-error-dev libgpgme-dev
- name: Build - name: Build
run: cargo build run: cargo build
env:
RUSTFLAGS: -Dwarnings
- name: Lint
run: cargo clippy --all-targets --all-features
env:
RUSTFLAGS: -Dwarnings
- name: Install runtime system dependencies - name: Install runtime system dependencies
run: | run: |
sudo apt-get update sudo apt-get update
@@ -49,17 +43,3 @@ jobs:
env: env:
RUST_LOG: debug RUST_LOG: debug
run: timeout 30m cargo test -- --nocapture run: timeout 30m cargo test -- --nocapture
snap:
needs: build
runs-on: ubuntu-latest
outputs:
snap-file: ${{ steps.build-snap.outputs.snap }}
steps:
- uses: actions/checkout@v4
- uses: snapcore/action-build@v1
id: build-snap
- uses: actions/upload-artifact@v3
with:
name: snap
path: ${{ steps.build-snap.outputs.snap }}

View File

@@ -9,7 +9,6 @@ clap = { version = "4.5.51", features = ["cargo"] }
cmd_lib = "2.0.0" cmd_lib = "2.0.0"
flate2 = "1.1.5" flate2 = "1.1.5"
serde = { version = "1.0.228", features = ["derive"] } serde = { version = "1.0.228", features = ["derive"] }
libc = "0.2"
csv = "1.3.0" csv = "1.3.0"
reqwest = { version = "0.12.9", features = ["blocking", "json", "stream"] } reqwest = { version = "0.12.9", features = ["blocking", "json", "stream"] }
git2 = "0.20.2" git2 = "0.20.2"
@@ -29,8 +28,6 @@ serde_json = "1.0.145"
directories = "6.0.0" directories = "6.0.0"
ssh2 = "0.9.5" ssh2 = "0.9.5"
gpgme = "0.11" gpgme = "0.11"
serde_yaml = "0.9"
lazy_static = "1.4.0"
[dev-dependencies] [dev-dependencies]
test-log = "0.2.19" test-log = "0.2.19"

View File

@@ -24,10 +24,8 @@ Options:
Commands and workflows include: Commands and workflows include:
``` ```
Commands: Commands:
pull Pull a source package from the archive or git pull Get a source package from the archive or git
chlog Auto-generate changelog entry, editing it, committing it afterwards chlog Auto-generate changelog entry, editing it, committing it afterwards
build Build the source package (into a .dsc)
deb Build the source package into binary package (.deb)
help Print this message or the help of the given subcommand(s) help Print this message or the help of the given subcommand(s)
``` ```
@@ -82,7 +80,7 @@ Missing features:
- [ ] `pkh pull` - [ ] `pkh pull`
- [x] Obtain package sources from git - [x] Obtain package sources from git
- [x] Obtain package sources from the archive (fallback) - [x] Obtain package sources from the archive (fallback)
- [x] Obtain package source from PPA (--ppa) - [ ] Obtain package source from PPA (--ppa)
- [ ] Obtain a specific version of the package - [ ] Obtain a specific version of the package
- [x] Fetch the correct git branch for series on Ubuntu - [x] Fetch the correct git branch for series on Ubuntu
- [ ] Try to fetch the correct git branch for series on Debian, or fallback to the archive - [ ] Try to fetch the correct git branch for series on Debian, or fallback to the archive
@@ -98,7 +96,7 @@ Missing features:
- [ ] Three build modes: - [ ] Three build modes:
- [ ] Build locally (discouraged) - [ ] Build locally (discouraged)
- [x] Build using sbuild+unshare, with binary emulation (default) - [x] Build using sbuild+unshare, with binary emulation (default)
- [x] Cross-compilation - [ ] Cross-compilation
- [ ] Async build - [ ] Async build
- [ ] `pkh status` - [ ] `pkh status`
- [ ] Show build status - [ ] Show build status

View File

@@ -1,28 +0,0 @@
## Static data needed for pkh operations
## Instead of hardcoding the data in code, data files allow to quickly
## update and maintain such data in one unique place
## The goal is to have the minimal possible set of data necessary
## to grab the actual data. For example we don't want to store every Ubuntu
## or Debian series, but rather an URL where we can properly access that data.
dist_info:
local: /usr/share/distro-info/{dist}
network: https://salsa.debian.org/debian/distro-info-data/-/raw/main/
dist:
debian:
base_url: http://deb.debian.org/debian
archive_keyring: https://ftp-master.debian.org/keys/archive-key-{series_num}.asc
pockets:
- proposed-updates
- updates
series:
local: /usr/share/distro-info/debian.csv
network: https://salsa.debian.org/debian/distro-info-data/-/raw/main/debian.csv
ubuntu:
base_url: http://archive.ubuntu.com/ubuntu
archive_keyring: http://archive.ubuntu.com/ubuntu/project/ubuntu-archive-keyring.gpg
pockets:
- proposed
- updates
series:
local: /usr/share/distro-info/ubuntu.csv
network: https://salsa.debian.org/debian/distro-info-data/-/raw/main/ubuntu.csv

View File

@@ -1,14 +0,0 @@
# Quirks configuration for package-specific workarounds
# This file defines package-specific quirks that are applied during pull and deb operations
quirks:
# Add more packages and their quirks as needed
# example-package:
# pull:
# method: archive
# deb:
# extra_dependencies:
# - another-dependency
# parameters:
# key: value

View File

@@ -1,42 +0,0 @@
name: pkh
base: core24
summary: pkh is a packaging helper for Debian/Ubuntu packages
description: |
pkh aims at wrapping the different debian tools and workflows
into one tool, that would have the same interface for everything,
while being smarter at integrating all workflows.
adopt-info: pkh-part
confinement: devmode
apps:
pkh:
command: bin/pkh
parts:
pkh-part:
plugin: rust
source: .
override-pull: |
craftctl default
craftctl set version=$(git rev-parse --short=11 HEAD)
craftctl set grade="devel"
build-packages:
- pkg-config
- libssl-dev
- libgpg-error-dev
- libgpgme-dev
stage-packages:
- libgpgme11t64
- git
- curl
- pristine-tar
- sbuild
- mmdebstrap
- util-linux
- dpkg-dev
stage:
- -usr/lib/x86_64-linux-gnu/libicuio.so.74.2
- -usr/lib/x86_64-linux-gnu/libicutest.so.74.2
- -usr/lib/x86_64-linux-gnu/libicutu.so.74.2
- -usr/lib/x86_64-linux-gnu/libicui18n.so.74.2

View File

@@ -1,216 +0,0 @@
//! APT keyring management for mmdebstrap and PPA packages
//!
//! Provides functions to ensure that archive keyrings are available
//! for mmdebstrap operations and for PPA packages by downloading them.
use crate::context;
use crate::distro_info;
use serde::Deserialize;
use std::error::Error;
use std::path::{Path, PathBuf};
use std::sync::Arc;
/// Launchpad API response structure for PPA information
#[derive(Deserialize)]
struct LaunchpadPpaResponse {
signing_key_fingerprint: String,
}
/// Download a keyring to the application cache directory and return the path
///
/// This function downloads the keyring to a user-writable cache directory
/// instead of the system apt keyring directory, allowing non-root usage.
/// The returned path can be passed to mmdebstrap via --keyring.
///
/// For Debian keyrings (which are ASCII-armored .asc files), the key is
/// converted to binary GPG format using gpg --dearmor.
///
/// # Arguments
/// * `ctx` - Optional context to use
/// * `series` - The distribution series (e.g., "noble", "sid")
///
/// # Returns
/// The path to the downloaded keyring file (in binary GPG format)
pub async fn download_cache_keyring(
ctx: Option<Arc<context::Context>>,
series: &str,
) -> Result<PathBuf, Box<dyn Error>> {
let ctx = ctx.unwrap_or_else(context::current);
// Obtain keyring URL from distro_info
let keyring_url = distro_info::get_keyring_url(series).await?;
log::debug!("Downloading keyring from: {}", keyring_url);
// Get the application cache directory
let proj_dirs = directories::ProjectDirs::from("com", "pkh", "pkh")
.ok_or("Could not determine project directories")?;
let cache_dir = proj_dirs.cache_dir();
// Create cache directory if it doesn't exist
if !ctx.exists(cache_dir)? {
ctx.command("mkdir").arg("-p").arg(cache_dir).status()?;
}
// Extract the original filename from the keyring URL
let filename = keyring_url
.split('/')
.next_back()
.unwrap_or("pkh-{}.gpg")
.replace("{}", series);
let download_path = cache_dir.join(&filename);
// Download the keyring using curl
let mut curl_cmd = ctx.command("curl");
curl_cmd
.arg("-s")
.arg("-f")
.arg("-L")
.arg(&keyring_url)
.arg("--output")
.arg(&download_path);
let status = curl_cmd.status()?;
if !status.success() {
return Err(format!("Failed to download keyring from {}", keyring_url).into());
}
// If the downloaded file is an ASCII-armored key (.asc), convert it to binary GPG format
// mmdebstrap's --keyring option expects binary GPG keyrings
let keyring_path = if filename.ends_with(".asc") {
let binary_filename = filename.strip_suffix(".asc").unwrap_or(&filename);
let binary_path = cache_dir.join(format!("{}.gpg", binary_filename));
log::debug!("Converting ASCII-armored key to binary GPG format");
let mut gpg_cmd = ctx.command("gpg");
gpg_cmd
.arg("--dearmor")
.arg("--output")
.arg(&binary_path)
.arg(&download_path);
let status = gpg_cmd.status()?;
if !status.success() {
return Err("Failed to convert keyring to binary format"
.to_string()
.into());
}
// Remove the original .asc file
let _ = ctx.command("rm").arg("-f").arg(&download_path).status();
binary_path
} else {
download_path
};
log::info!(
"Successfully downloaded keyring for {} to {}",
series,
keyring_path.display()
);
Ok(keyring_path)
}
/// Download and import a PPA key using Launchpad API
///
/// # Arguments
/// * `ctx` - Optional context to use
/// * `ppa_owner` - PPA owner (username)
/// * `ppa_name` - PPA name
///
/// # Returns
/// Result indicating success or failure
pub async fn download_trust_ppa_key(
ctx: Option<Arc<context::Context>>,
ppa_owner: &str,
ppa_name: &str,
) -> Result<(), Box<dyn Error>> {
let ctx = ctx.unwrap_or_else(context::current);
// Create trusted.gpg.d directory if it doesn't exist
let trusted_gpg_d = "/etc/apt/trusted.gpg.d";
if !ctx.exists(Path::new(trusted_gpg_d))? {
ctx.command("mkdir").arg("-p").arg(trusted_gpg_d).status()?;
}
let key_filename = format!("{}-{}.asc", ppa_owner, ppa_name);
let key_path = format!("{}/{}", trusted_gpg_d, key_filename);
log::debug!(
"Retrieving PPA key for {}/{} using Launchpad API",
ppa_owner,
ppa_name
);
// Get PPA information from Launchpad API to get signing key fingerprint
// Use the correct devel API endpoint
let api_url = format!(
"https://api.launchpad.net/1.0/~{}/+archive/ubuntu/{}",
ppa_owner, ppa_name
);
log::debug!("Querying Launchpad API: {}", api_url);
let api_response = ctx
.command("curl")
.arg("-s")
.arg("-f")
.arg("-H")
.arg("Accept: application/json")
.arg(&api_url)
.output()?;
if !api_response.status.success() {
return Err(format!(
"Failed to query Launchpad API for PPA {}/{}",
ppa_owner, ppa_name
)
.into());
}
// Parse the JSON response to extract the signing key fingerprint
let api_response_str = String::from_utf8_lossy(&api_response.stdout);
let ppa_response: LaunchpadPpaResponse =
serde_json::from_str(&api_response_str).map_err(|e| {
format!(
"Failed to parse JSON response from Launchpad API for {}/{}: {}",
ppa_owner, ppa_name, e
)
})?;
let fingerprint = ppa_response.signing_key_fingerprint;
log::debug!("Found PPA signing key fingerprint: {}", fingerprint);
// Download the actual key from the keyserver using the fingerprint
let keyserver_url = format!(
"https://keyserver.ubuntu.com/pks/lookup?op=get&search=0x{}",
fingerprint
);
log::debug!("Downloading key from keyserver: {}", keyserver_url);
let mut curl_cmd = ctx.command("curl");
curl_cmd
.arg("-s")
.arg("-f")
.arg("-L")
.arg(&keyserver_url)
.arg("--output")
.arg(&key_path);
let status = curl_cmd.status()?;
if !status.success() {
return Err(format!(
"Failed to download PPA key from keyserver for fingerprint {}",
fingerprint
)
.into());
}
log::info!(
"Successfully downloaded and installed PPA key for {}/{} (fingerprint: {}) to {}",
ppa_owner,
ppa_name,
fingerprint,
key_path
);
Ok(())
}

View File

@@ -1,2 +1 @@
pub mod keyring;
pub mod sources; pub mod sources;

View File

@@ -208,9 +208,9 @@ impl UnshareDriver {
} }
cmd.arg("--").arg("bash").arg("-c").arg(format!( cmd.arg("--").arg("bash").arg("-c").arg(format!(
"mount -t proc proc /proc; mkdir /dev/pts; mount -t devpts devpts /dev/pts; touch /dev/ptmx; mount --bind /dev/pts/ptmx /dev/ptmx; {} {}", "mount -t proc proc /proc; mount -t devpts devpts /dev/pts; mount --bind /dev/pts/ptmx /dev/ptmx; {} {}",
program, program,
args.iter().map(|a| format!("\"{a}\"")).collect::<Vec<_>>().join(" ") args.join(" ")
)); ));
cmd cmd

View File

@@ -28,6 +28,7 @@ pub fn setup_environment(
} }
} }
env.insert("DEB_BUILD_PROFILES".to_string(), "cross".to_string()); env.insert("DEB_BUILD_PROFILES".to_string(), "cross".to_string());
env.insert("DEB_BUILD_OPTIONS".to_string(), "nocheck".to_string());
Ok(()) Ok(())
} }

View File

@@ -12,17 +12,11 @@ use xz2::read::XzDecoder;
pub struct EphemeralContextGuard { pub struct EphemeralContextGuard {
previous_context: String, previous_context: String,
chroot_path: PathBuf, chroot_path: PathBuf,
build_succeeded: bool,
} }
impl EphemeralContextGuard { impl EphemeralContextGuard {
/// Create a new ephemeral unshare context for the specified series /// Create a new ephemeral unshare context for the specified series
/// pub fn new(series: &str) -> Result<Self, Box<dyn Error>> {
/// # Arguments
/// * `series` - The distribution series (e.g., "noble", "sid")
/// * `arch` - Optional target architecture. If provided and different from host,
/// downloads a chroot for that architecture (uses qemu_binfmt transparently)
pub async fn new(series: &str, arch: Option<&str>) -> Result<Self, Box<dyn Error>> {
let current_context_name = context::manager().current_name(); let current_context_name = context::manager().current_name();
// Create a temporary directory for the chroot // Create a temporary directory for the chroot
@@ -30,14 +24,13 @@ impl EphemeralContextGuard {
let chroot_path = PathBuf::from(chroot_path_str); let chroot_path = PathBuf::from(chroot_path_str);
log::debug!( log::debug!(
"Creating new chroot for {} (arch: {:?}) at {}...", "Creating new chroot for {} at {}...",
series, series,
arch,
chroot_path.display() chroot_path.display()
); );
// Download and extract the chroot tarball // Download and extract the chroot tarball
Self::download_and_extract_chroot(series, arch, &chroot_path).await?; Self::download_and_extract_chroot(series, &chroot_path)?;
// Switch to an ephemeral context to build the package in the chroot // Switch to an ephemeral context to build the package in the chroot
context::manager().set_current_ephemeral(Context::new(ContextConfig::Unshare { context::manager().set_current_ephemeral(Context::new(ContextConfig::Unshare {
@@ -48,13 +41,11 @@ impl EphemeralContextGuard {
Ok(Self { Ok(Self {
previous_context: current_context_name, previous_context: current_context_name,
chroot_path, chroot_path,
build_succeeded: false,
}) })
} }
async fn download_and_extract_chroot( fn download_and_extract_chroot(
series: &str, series: &str,
arch: Option<&str>,
chroot_path: &PathBuf, chroot_path: &PathBuf,
) -> Result<(), Box<dyn Error>> { ) -> Result<(), Box<dyn Error>> {
// Get project directories for caching // Get project directories for caching
@@ -63,12 +54,8 @@ impl EphemeralContextGuard {
let cache_dir = proj_dirs.cache_dir(); let cache_dir = proj_dirs.cache_dir();
fs::create_dir_all(cache_dir)?; fs::create_dir_all(cache_dir)?;
// Create tarball filename based on series and architecture // Create tarball filename based on series
let tarball_filename = if let Some(a) = arch { let tarball_filename = format!("{}-buildd.tar.xz", series);
format!("{}-{}-buildd.tar.xz", series, a)
} else {
format!("{}-buildd.tar.xz", series)
};
let tarball_path = cache_dir.join(&tarball_filename); let tarball_path = cache_dir.join(&tarball_filename);
// Check for existing lockfile, and wait for a timeout if it exists // Check for existing lockfile, and wait for a timeout if it exists
@@ -105,36 +92,20 @@ impl EphemeralContextGuard {
// Download tarball if it doesn't exist // Download tarball if it doesn't exist
if !tarball_path.exists() { if !tarball_path.exists() {
log::debug!( log::debug!("Downloading chroot tarball for {}...", series);
"Downloading chroot tarball for {} (arch: {:?})...", Self::download_chroot_tarball(series, &tarball_path)?;
series,
arch
);
Self::download_chroot_tarball(series, arch, &tarball_path).await?;
} else { } else {
log::debug!( log::debug!("Using cached chroot tarball for {}", series);
"Using cached chroot tarball for {} (arch: {:?})",
series,
arch
);
} }
// Extract tarball to chroot directory // Extract tarball to chroot directory
log::debug!("Extracting chroot tarball to {}...", chroot_path.display()); log::debug!("Extracting chroot tarball to {}...", chroot_path.display());
Self::extract_tarball(&tarball_path, chroot_path)?; Self::extract_tarball(&tarball_path, chroot_path)?;
// Create device nodes in the chroot
log::debug!("Creating device nodes in chroot...");
Self::create_device_nodes(chroot_path)?;
Ok(()) Ok(())
} }
async fn download_chroot_tarball( fn download_chroot_tarball(series: &str, tarball_path: &Path) -> Result<(), Box<dyn Error>> {
series: &str,
arch: Option<&str>,
tarball_path: &Path,
) -> Result<(), Box<dyn Error>> {
let ctx = context::current(); let ctx = context::current();
// Create a lock file to make sure that noone tries to use the file while it's not fully downloaded // Create a lock file to make sure that noone tries to use the file while it's not fully downloaded
@@ -143,27 +114,16 @@ impl EphemeralContextGuard {
.arg(lockfile_path.to_string_lossy().to_string()) .arg(lockfile_path.to_string_lossy().to_string())
.status()?; .status()?;
// Download the keyring to the cache directory
let keyring_path =
crate::apt::keyring::download_cache_keyring(Some(ctx.clone()), series).await?;
// Use mmdebstrap to download the tarball to the cache directory // Use mmdebstrap to download the tarball to the cache directory
let mut cmd = ctx.command("mmdebstrap"); let status = ctx
cmd.arg("--variant=buildd") .command("mmdebstrap")
.arg("--variant=buildd")
.arg("--mode=unshare") .arg("--mode=unshare")
.arg("--include=mount,curl,ca-certificates") .arg("--include=mount")
.arg("--format=tar") .arg("--format=tar")
.arg(format!("--keyring={}", keyring_path.display())); .arg(series)
.arg(tarball_path.to_string_lossy().to_string())
// Add architecture if specified .status()?;
if let Some(a) = arch {
cmd.arg(format!("--arch={}", a));
}
cmd.arg(series)
.arg(tarball_path.to_string_lossy().to_string());
let status = cmd.status()?;
if !status.success() { if !status.success() {
// Remove file on error // Remove file on error
@@ -177,11 +137,7 @@ impl EphemeralContextGuard {
.arg("-f") .arg("-f")
.arg(lockfile_path.to_string_lossy().to_string()) .arg(lockfile_path.to_string_lossy().to_string())
.status(); .status();
return Err(format!( return Err(format!("Failed to download chroot tarball for series {}", series).into());
"Failed to download chroot tarball for series {} (arch: {:?})",
series, arch
)
.into());
} }
// Remove lockfile: tarball is fully downloaded // Remove lockfile: tarball is fully downloaded
@@ -211,69 +167,6 @@ impl EphemeralContextGuard {
Ok(()) Ok(())
} }
fn create_device_nodes(chroot_path: &Path) -> Result<(), Box<dyn Error>> {
let ctx = context::current();
let dev_null_path = chroot_path.join("dev/null");
let dev_zero_path = chroot_path.join("dev/zero");
// Ensure /dev directory exists
fs::create_dir_all(chroot_path.join("dev"))?;
// Remove existing device nodes if they exist
let _ = ctx
.command("rm")
.arg("-f")
.arg(dev_null_path.to_string_lossy().to_string())
.status();
let _ = ctx
.command("rm")
.arg("-f")
.arg(dev_zero_path.to_string_lossy().to_string())
.status();
// Check if we're running as root
let is_root = crate::utils::root::is_root()?;
// Create new device nodes using mknod (with sudo if not root)
let mut cmd_null = ctx.command(if is_root { "mknod" } else { "sudo" });
if !is_root {
cmd_null.arg("mknod");
}
let status_null = cmd_null
.arg("-m")
.arg("666")
.arg(dev_null_path.to_string_lossy().to_string())
.arg("c")
.arg("1")
.arg("3")
.status()?;
let mut cmd_zero = ctx.command(if is_root { "mknod" } else { "sudo" });
if !is_root {
cmd_zero.arg("mknod");
}
let status_zero = cmd_zero
.arg("-m")
.arg("666")
.arg(dev_zero_path.to_string_lossy().to_string())
.arg("c")
.arg("1")
.arg("5")
.status()?;
if !status_null.success() || !status_zero.success() {
return Err("Failed to create device nodes".into());
}
Ok(())
}
/// Mark the build as successful, which will trigger chroot cleanup on drop
pub fn mark_build_successful(&mut self) {
self.build_succeeded = true;
}
} }
impl Drop for EphemeralContextGuard { impl Drop for EphemeralContextGuard {
@@ -284,30 +177,14 @@ impl Drop for EphemeralContextGuard {
log::error!("Failed to restore context {}: {}", self.previous_context, e); log::error!("Failed to restore context {}: {}", self.previous_context, e);
} }
// Remove chroot directory only if build succeeded // Remove chroot directory
if self.build_succeeded { // We use the restored context to execute the cleanup command
log::debug!( let result = context::current()
"Build succeeded, removing chroot directory: {}",
self.chroot_path.display()
);
// Check if we're running as root to avoid unnecessary sudo
let is_root = crate::utils::root::is_root().unwrap_or(false);
let result = if is_root {
context::current()
.command("rm")
.arg("-rf")
.arg(&self.chroot_path)
.status()
} else {
context::current()
.command("sudo") .command("sudo")
.arg("rm") .arg("rm")
.arg("-rf") .arg("-rf")
.arg(&self.chroot_path) .arg(&self.chroot_path)
.status() .status();
};
match result { match result {
Ok(status) => { Ok(status) => {
@@ -316,11 +193,6 @@ impl Drop for EphemeralContextGuard {
"Failed to remove chroot directory {}", "Failed to remove chroot directory {}",
self.chroot_path.display() self.chroot_path.display()
); );
} else {
log::debug!(
"Successfully removed chroot directory: {}",
self.chroot_path.display()
);
} }
} }
Err(e) => { Err(e) => {
@@ -331,11 +203,5 @@ impl Drop for EphemeralContextGuard {
); );
} }
} }
} else {
log::debug!(
"Build did not succeed or was not marked as successful, keeping chroot directory: {}",
self.chroot_path.display()
);
}
} }
} }

View File

@@ -2,7 +2,6 @@
/// Directly calling 'debian/rules' in current context /// Directly calling 'debian/rules' in current context
use crate::context; use crate::context;
use crate::deb::find_dsc_file; use crate::deb::find_dsc_file;
use log::warn;
use std::collections::HashMap; use std::collections::HashMap;
use std::error::Error; use std::error::Error;
use std::path::Path; use std::path::Path;
@@ -10,16 +9,13 @@ use std::path::Path;
use crate::apt; use crate::apt;
use crate::deb::cross; use crate::deb::cross;
#[allow(clippy::too_many_arguments)] pub fn build(
pub async fn build(
package: &str, package: &str,
version: &str, version: &str,
arch: &str, arch: &str,
series: &str, series: &str,
build_root: &str, build_root: &str,
cross: bool, cross: bool,
ppa: Option<&[&str]>,
inject_packages: Option<&[&str]>,
) -> Result<(), Box<dyn Error>> { ) -> Result<(), Box<dyn Error>> {
// Environment // Environment
let mut env = HashMap::<String, String>::new(); let mut env = HashMap::<String, String>::new();
@@ -28,107 +24,23 @@ pub async fn build(
let ctx = context::current(); let ctx = context::current();
// Parallel building: find local number of cores, and use that
let num_cores = ctx
.command("nproc")
.output()
.map(|output| {
if output.status.success() {
String::from_utf8_lossy(&output.stdout)
.trim()
.parse::<usize>()
.unwrap_or(1)
} else {
1 // Default to 1 if nproc fails
}
})
.unwrap_or(1); // Default to 1 if we can't execute the command
// Build options: parallel, disable tests by default
env.insert(
"DEB_BUILD_OPTIONS".to_string(),
format!("parallel={} nocheck", num_cores),
);
if cross { if cross {
log::debug!("Setting up environment for local cross build..."); log::debug!("Setting up environment for local cross build...");
cross::setup_environment(&mut env, arch)?; cross::setup_environment(&mut env, arch)?;
cross::ensure_repositories(arch, series)?; cross::ensure_repositories(arch, series)?;
} }
// UBUNTU: Ensure 'universe' repository is enabled
let mut sources = apt::sources::load(None)?; let mut sources = apt::sources::load(None)?;
let mut modified = false; let mut modified = false;
let mut added_ppas: Vec<(&str, &str)> = Vec::new();
// Add PPA repositories if specified
if let Some(ppas) = ppa {
for ppa_str in ppas {
// PPA format: user/ppa_name
let parts: Vec<&str> = ppa_str.split('/').collect();
if parts.len() == 2 {
let base_url = crate::package_info::ppa_to_base_url(parts[0], parts[1]);
// Add new PPA source if not found
if !sources.iter().any(|s| s.uri.contains(&base_url)) {
// Get host and target architectures
let host_arch = crate::get_current_arch();
let target_arch = arch;
// Create architectures list with both host and target if different
let mut architectures = vec![host_arch.clone()];
if host_arch != *target_arch {
architectures.push(target_arch.to_string());
}
// Create suite list with all Ubuntu series
let suites = vec![format!("{}", series)];
let new_source = crate::apt::sources::SourceEntry {
enabled: true,
components: vec!["main".to_string()],
architectures: architectures.clone(),
suite: suites,
uri: base_url,
};
sources.push(new_source);
modified = true;
added_ppas.push((parts[0], parts[1]));
log::info!(
"Added PPA: {} for series {} with architectures {:?}",
ppa_str,
series,
architectures
);
}
} else {
return Err(
format!("Invalid PPA format: '{}'. Expected: user/ppa_name", ppa_str).into(),
);
}
}
}
// UBUNTU: Ensure 'universe' repository is enabled
for source in &mut sources { for source in &mut sources {
if source.uri.contains("ubuntu") && !source.components.contains(&"universe".to_string()) { if source.uri.contains("ubuntu") && !source.components.contains(&"universe".to_string()) {
source.components.push("universe".to_string()); source.components.push("universe".to_string());
modified = true; modified = true;
} }
} }
if modified { if modified {
apt::sources::save_legacy(None, sources, "/etc/apt/sources.list")?; apt::sources::save_legacy(None, sources, "/etc/apt/sources.list")?;
// Download and import PPA keys for all added PPAs
for (user, ppa_name) in added_ppas {
if let Err(e) = crate::apt::keyring::download_trust_ppa_key(None, user, ppa_name).await
{
warn!(
"Failed to download PPA key for {}/{}: {}",
user, ppa_name, e
);
}
}
} }
// Update package lists // Update package lists
@@ -168,38 +80,16 @@ pub async fn build(
return Err("Could not install essential packages for the build".into()); return Err("Could not install essential packages for the build".into());
} }
// Find the actual package directory // Install build dependencies
let package_dir = crate::deb::find_package_directory(Path::new(build_root), package, version)?; log::debug!("Installing build dependencies...");
let package_dir_str = package_dir
.to_str()
.ok_or("Invalid package directory path")?;
// Install injected packages if specified
if let Some(packages) = inject_packages {
log::info!("Installing injected packages: {:?}", packages);
let mut cmd = ctx.command("apt-get"); let mut cmd = ctx.command("apt-get");
cmd.envs(env.clone()) cmd.current_dir(format!("{build_root}/{package}"))
.arg("-y")
.arg("--allow-downgrades")
.arg("install")
.args(packages);
let status = cmd.status()?;
if !status.success() {
return Err(format!("Could not install injected packages: {:?}", packages).into());
}
}
// Install arch-specific build dependencies
log::debug!("Installing arch-specific build dependencies...");
let mut cmd = ctx.command("apt-get");
cmd.current_dir(package_dir_str)
.envs(env.clone()) .envs(env.clone())
.arg("-y") .arg("-y")
.arg("build-dep"); .arg("build-dep");
if cross { if cross {
cmd.arg(format!("--host-architecture={arch}")); cmd.arg(format!("--host-architecture={arch}"));
} }
cmd.arg("--arch-only");
let status = cmd.arg("./").status()?; let status = cmd.arg("./").status()?;
// If build-dep fails, we try to explain the failure using dose-debcheck // If build-dep fails, we try to explain the failure using dose-debcheck
@@ -208,28 +98,11 @@ pub async fn build(
return Err("Could not install build-dependencies for the build".into()); return Err("Could not install build-dependencies for the build".into());
} }
// Install arch-independant build dependencies
log::debug!("Installing arch-independant build dependencies...");
let status = ctx
.command("apt-get")
.current_dir(package_dir_str)
.envs(env.clone())
.arg("-y")
.arg("build-dep")
.arg("./")
.status()?;
// If build-dep fails, we try to explain the failure using dose-debcheck
if !status.success() {
dose3_explain_dependencies(package, version, arch, build_root, cross)?;
return Err("Could not install build-dependencies for the build".into());
}
// Run the build step // Run the build step
log::debug!("Building (debian/rules build) package..."); log::debug!("Building (debian/rules build) package...");
let status = ctx let status = ctx
.command("debian/rules") .command("debian/rules")
.current_dir(package_dir_str) .current_dir(format!("{build_root}/{package}"))
.envs(env.clone()) .envs(env.clone())
.arg("build") .arg("build")
.status()?; .status()?;
@@ -240,7 +113,7 @@ pub async fn build(
// Run the 'binary' step to produce deb // Run the 'binary' step to produce deb
let status = ctx let status = ctx
.command("fakeroot") .command("fakeroot")
.current_dir(package_dir_str) .current_dir(format!("{build_root}/{package}"))
.envs(env.clone()) .envs(env.clone())
.arg("debian/rules") .arg("debian/rules")
.arg("binary") .arg("binary")
@@ -283,7 +156,6 @@ fn dose3_explain_dependencies(
} }
// Transform the dsc file into a 'Source' stanza (replacing 'Source' with 'Package') // Transform the dsc file into a 'Source' stanza (replacing 'Source' with 'Package')
// TODO: Remove potential GPG headers/signature
let dsc_path = find_dsc_file(build_root, package, version)?; let dsc_path = find_dsc_file(build_root, package, version)?;
let mut dsc_content = ctx.read_file(&dsc_path)?; let mut dsc_content = ctx.read_file(&dsc_path)?;
dsc_content = dsc_content.replace("Source", "Package"); dsc_content = dsc_content.replace("Source", "Package");

View File

@@ -17,14 +17,12 @@ pub enum BuildMode {
} }
/// Build package in 'cwd' to a .deb /// Build package in 'cwd' to a .deb
pub async fn build_binary_package( pub fn build_binary_package(
arch: Option<&str>, arch: Option<&str>,
series: Option<&str>, series: Option<&str>,
cwd: Option<&Path>, cwd: Option<&Path>,
cross: bool, cross: bool,
mode: Option<BuildMode>, mode: Option<BuildMode>,
ppa: Option<&[&str]>,
inject_packages: Option<&[&str]>,
) -> Result<(), Box<dyn Error>> { ) -> Result<(), Box<dyn Error>> {
let cwd = cwd.unwrap_or_else(|| Path::new(".")); let cwd = cwd.unwrap_or_else(|| Path::new("."));
@@ -50,15 +48,8 @@ pub async fn build_binary_package(
}; };
// Create an ephemeral unshare context for all Local builds // Create an ephemeral unshare context for all Local builds
// Use qemu_binfmt when target architecture differs from host and cross is not requested let _guard = if mode == BuildMode::Local {
let chroot_arch = if mode == BuildMode::Local && arch != current_arch && !cross { Some(ephemeral::EphemeralContextGuard::new(series)?)
Some(arch)
} else {
None
};
let mut guard = if mode == BuildMode::Local {
Some(ephemeral::EphemeralContextGuard::new(series, chroot_arch).await?)
} else { } else {
None None
}; };
@@ -77,19 +68,7 @@ pub async fn build_binary_package(
// Run the build using target build mode // Run the build using target build mode
match mode { match mode {
BuildMode::Local => { BuildMode::Local => local::build(&package, &version, arch, series, &build_root, cross)?,
local::build(
&package,
&version,
arch,
series,
&build_root,
cross,
ppa,
inject_packages,
)
.await?
}
BuildMode::Sbuild => sbuild::build(&package, &version, arch, series, &build_root, cross)?, BuildMode::Sbuild => sbuild::build(&package, &version, arch, series, &build_root, cross)?,
}; };
@@ -103,95 +82,9 @@ pub async fn build_binary_package(
} }
} }
// Mark build as successful to trigger chroot cleanup
if let Some(ref mut g) = guard {
g.mark_build_successful();
}
Ok(()) Ok(())
} }
/// Find the current package directory by trying both patterns:
/// - package/package
/// - package/package-origversion
pub(crate) fn find_package_directory(
parent_dir: &Path,
package: &str,
version: &str,
) -> Result<PathBuf, Box<dyn Error>> {
let ctx = context::current();
// Try package/package pattern first
let package_dir = parent_dir.join(package).join(package);
if ctx.exists(&package_dir)? && ctx.exists(&package_dir.join("debian"))? {
return Ok(package_dir);
}
// Compute origversion from version: remove everything after first '-', after stripping epoch
let version_without_epoch = version.split_once(':').map(|(_, v)| v).unwrap_or(version);
let origversion = version_without_epoch
.split_once('-')
.map(|(v, _)| v)
.unwrap_or(version);
// Try package/package-origversion pattern
let package_dir = parent_dir
.join(package)
.join(format!("{}-{}", package, origversion));
if ctx.exists(&package_dir)? && ctx.exists(&package_dir.join("debian"))? {
return Ok(package_dir);
}
// Try 'package' only
let package_dir = parent_dir.join(package);
if ctx.exists(&package_dir)? && ctx.exists(&package_dir.join("debian"))? {
return Ok(package_dir);
}
// Try package-origversion only
let package_dir = parent_dir.join(format!("{}-{}", package, origversion));
if ctx.exists(&package_dir)? && ctx.exists(&package_dir.join("debian"))? {
return Ok(package_dir);
}
// List all directories under 'package/' and log them
let package_parent = parent_dir;
if ctx.exists(package_parent)? {
log::debug!(
"Listing all directories under '{}':",
package_parent.display()
);
let entries = ctx.list_files(package_parent)?;
let mut found_dirs = Vec::new();
for entry in entries {
if entry.is_dir() {
if let Some(file_name) = entry.file_name() {
found_dirs.push(file_name.to_string_lossy().into_owned());
}
log::debug!(" - {}", entry.display());
}
}
// If we found directories but none matched our patterns, provide helpful error
if !found_dirs.is_empty() {
return Err(format!(
"Could not find package directory for {} in {}. Found directories: {}",
package,
parent_dir.display(),
found_dirs.join(", ")
)
.into());
}
}
Err(format!(
"Could not find package directory for {} in {}",
package,
parent_dir.display()
)
.into())
}
fn find_dsc_file( fn find_dsc_file(
build_root: &str, build_root: &str,
package: &str, package: &str,
@@ -232,24 +125,18 @@ mod tests {
let cwd = temp_dir.path(); let cwd = temp_dir.path();
log::debug!("Created temporary directory: {}", cwd.display()); log::debug!("Created temporary directory: {}", cwd.display());
log::info!("Pulling package {} from {}...", package, series); log::info!("Pulling package {} from Ubuntu {}...", package, series);
let package_info = crate::pull::pull(package, "", Some(series), "", "", dist, Some(cwd), None)
crate::package_info::lookup(package, None, Some(series), "", dist, None, None)
.await
.expect("Cannot lookup package information");
crate::pull::pull(&package_info, Some(cwd), None, true)
.await .await
.expect("Cannot pull package"); .expect("Cannot pull package");
log::info!("Successfully pulled package {}", package); log::info!("Successfully pulled package {}", package);
// Change directory to the package directory // Change directory to the package directory
let cwd = crate::deb::find_package_directory(cwd, package, &package_info.stanza.version) let cwd = cwd.join(package).join(package);
.expect("Cannot find package directory");
log::debug!("Package directory: {}", cwd.display()); log::debug!("Package directory: {}", cwd.display());
log::info!("Starting binary package build..."); log::info!("Starting binary package build...");
crate::deb::build_binary_package(arch, Some(series), Some(&cwd), cross, None, None, None) crate::deb::build_binary_package(arch, Some(series), Some(&cwd), cross, None)
.await
.expect("Cannot build binary package (deb)"); .expect("Cannot build binary package (deb)");
log::info!("Successfully built binary package"); log::info!("Successfully built binary package");
@@ -287,7 +174,6 @@ mod tests {
test_build_end_to_end("hello", "noble", None, None, false).await; test_build_end_to_end("hello", "noble", None, None, false).await;
} }
/// This ensures that we can cross-build packages
#[tokio::test] #[tokio::test]
#[test_log::test] #[test_log::test]
#[cfg(target_arch = "x86_64")] #[cfg(target_arch = "x86_64")]
@@ -295,41 +181,4 @@ mod tests {
async fn test_deb_hello_ubuntu_cross_end_to_end() { async fn test_deb_hello_ubuntu_cross_end_to_end() {
test_build_end_to_end("hello", "noble", None, Some("riscv64"), true).await; test_build_end_to_end("hello", "noble", None, Some("riscv64"), true).await;
} }
/// This ensures that we can build packages from sid, even on older
/// releases. It can sometimes be difficult with mmdebstrap issues
/// for example.
#[tokio::test]
#[test_log::test]
#[serial]
async fn test_deb_hello_debian_sid_end_to_end() {
test_build_end_to_end("hello", "sid", None, None, false).await;
}
/// This is a specific test case for the linux-riscv package on Ubuntu
/// It is important to ensure that pkh can cross-compile linux-riscv, as
/// for risc-v hardware is still rare and cross-compilation is necessary
/// to debug and test
#[tokio::test]
#[test_log::test]
#[cfg(target_arch = "x86_64")]
async fn test_deb_linux_riscv_ubuntu_cross_end_to_end() {
test_build_end_to_end("linux-riscv", "questing", None, Some("riscv64"), true).await;
}
/// This is a specific test case for the latest gcc package on Debian
/// The GCC package is complex and hard to build, with specific stages
/// and system-bound scripts. Building it requires specific things that
/// we want to ensure are not broken.
/// NOTE: Ideally, we want to run this in CI, but it takes more than 20h
/// to fully build the gcc-15 package on an amd64 builder, which is too
/// much time.
#[ignore]
#[cfg(target_arch = "x86_64")]
#[tokio::test]
#[test_log::test]
#[serial]
async fn test_deb_gcc_debian_end_to_end() {
test_build_end_to_end("gcc-15", "sid", None, None, false).await;
}
} }

View File

@@ -2,26 +2,18 @@
/// Call 'sbuild' with the dsc file to build the package with unshare /// Call 'sbuild' with the dsc file to build the package with unshare
use crate::context; use crate::context;
use std::error::Error; use std::error::Error;
use std::path::Path;
pub fn build( pub fn build(
package: &str, package: &str,
version: &str, _version: &str,
arch: &str, arch: &str,
series: &str, series: &str,
build_root: &str, build_root: &str,
cross: bool, cross: bool,
) -> Result<(), Box<dyn Error>> { ) -> Result<(), Box<dyn Error>> {
let ctx = context::current(); let ctx = context::current();
// Find the actual package directory
let package_dir = crate::deb::find_package_directory(Path::new(build_root), package, version)?;
let package_dir_str = package_dir
.to_str()
.ok_or("Invalid package directory path")?;
let mut cmd = ctx.command("sbuild"); let mut cmd = ctx.command("sbuild");
cmd.current_dir(package_dir_str); cmd.current_dir(format!("{}/{}", build_root, package));
cmd.arg("--chroot-mode=unshare"); cmd.arg("--chroot-mode=unshare");
cmd.arg("--no-clean-source"); cmd.arg("--no-clean-source");

View File

@@ -1,373 +0,0 @@
use chrono::NaiveDate;
use lazy_static::lazy_static;
use serde::Deserialize;
use std::error::Error;
use std::path::Path;
#[derive(Debug, Clone)]
/// Information about a specific distribution series
pub struct SeriesInformation {
/// Distribution series
pub series: String,
/// Codename, i.e. full name of series
pub codename: String,
/// Series version as numbers
pub version: Option<String>,
/// Series creation date
pub created: NaiveDate,
/// Series release date
pub release: Option<NaiveDate>,
/// Series end-of-life date
pub eol: Option<NaiveDate>,
}
#[derive(Debug, Deserialize)]
struct SeriesInfo {
local: String,
network: String,
}
#[derive(Debug, Deserialize)]
struct DistData {
base_url: String,
archive_keyring: String,
pockets: Vec<String>,
series: SeriesInfo,
}
#[derive(Debug, Deserialize)]
struct Data {
dist: std::collections::HashMap<String, DistData>,
}
const DATA_YAML: &str = include_str!("../distro_info.yml");
lazy_static! {
static ref DATA: Data = serde_yaml::from_str(DATA_YAML).unwrap();
}
fn parse_series_csv(content: &str) -> Result<Vec<SeriesInformation>, Box<dyn Error>> {
let mut rdr = csv::ReaderBuilder::new()
.flexible(true)
.from_reader(content.as_bytes());
let headers = rdr.headers()?.clone();
let series_idx = headers
.iter()
.position(|h| h == "series")
.ok_or("Column 'series' not found")?;
let codename_idx = headers
.iter()
.position(|h| h == "codename")
.ok_or("Column 'codename' not found")?;
let version_idx = headers
.iter()
.position(|h| h == "version")
.ok_or("Column 'version' not found")?;
let created_idx = headers
.iter()
.position(|h| h == "created")
.ok_or("Column 'created' not found")?;
let release_idx = headers
.iter()
.position(|h| h == "release")
.ok_or("Column 'release' not found")?;
let eol_idx = headers
.iter()
.position(|h| h == "eol")
.ok_or("Column 'eol' not found")?;
let mut series_info_list = Vec::new();
for result in rdr.records() {
let record = result?;
let series = record.get(series_idx).unwrap().to_string();
let codename = record.get(codename_idx).unwrap().to_string();
let version = record.get(version_idx).map(|s| s.to_string());
let created = record
.get(created_idx)
.map(|date_str| NaiveDate::parse_from_str(date_str, "%Y-%m-%d").unwrap())
.unwrap();
let release = record
.get(release_idx)
.map(|date_str| NaiveDate::parse_from_str(date_str, "%Y-%m-%d").unwrap());
let eol = record
.get(eol_idx)
.map(|date_str| NaiveDate::parse_from_str(date_str, "%Y-%m-%d").unwrap());
series_info_list.push(SeriesInformation {
series,
codename,
version,
created,
release,
eol,
});
}
// Revert to sort by most recent
series_info_list.reverse();
Ok(series_info_list)
}
/// Get time-ordered list of series information for a distribution, development series first
pub async fn get_ordered_series(dist: &str) -> Result<Vec<SeriesInformation>, Box<dyn Error>> {
let series_info = &DATA.dist.get(dist).unwrap().series;
let content = if Path::new(series_info.local.as_str()).exists() {
std::fs::read_to_string(format!("/usr/share/distro-info/{dist}.csv"))?
} else {
reqwest::get(series_info.network.as_str())
.await?
.text()
.await?
};
let series_info_list = parse_series_csv(&content)?;
Ok(series_info_list)
}
/// Get time-ordered list of series names for a distribution, development series first
pub async fn get_ordered_series_name(dist: &str) -> Result<Vec<String>, Box<dyn Error>> {
let series = get_ordered_series(dist).await?;
Ok(series.iter().map(|info| info.series.clone()).collect())
}
/// Get the latest released series for a dist (excluding future releases and special cases like sid)
pub async fn get_latest_released_series(dist: &str) -> Result<String, Box<dyn Error>> {
let series_info_list = get_ordered_series(dist).await?;
let today = chrono::Local::now().date_naive();
let mut released_series = Vec::new();
for series_info in series_info_list {
// Skip 'sid' and series without release dates or with future release dates
if series_info.series != "sid"
&& series_info.series != "experimental"
&& series_info.release.is_some()
&& series_info.release.unwrap() <= today
{
released_series.push(series_info);
}
}
// Sort by release date descending (newest first)
released_series.sort_by(|a, b| b.release.cmp(&a.release));
if let Some(latest) = released_series.first() {
Ok(latest.series.clone())
} else {
Err("No released series found".into())
}
}
/// Obtain the distribution (eg. debian, ubuntu) from a distribution series (eg. noble, bookworm)
pub async fn get_dist_from_series(series: &str) -> Result<String, Box<dyn Error>> {
for dist in DATA.dist.keys() {
if get_ordered_series_name(dist)
.await?
.contains(&series.to_string())
{
return Ok(dist.to_string());
}
}
Err(format!("Unknown series: {}", series).into())
}
/// Get the package pockets available for a given distribution
///
/// Example: get_dist_pockets(ubuntu) => ["proposed", "updates", ""]
pub fn get_dist_pockets(dist: &str) -> Vec<String> {
let mut pockets = DATA.dist.get(dist).unwrap().pockets.clone();
// Explicitely add 'main' pocket, which is just the empty string
pockets.push("".to_string());
pockets
}
/// Get the sources URL for a distribution, series, pocket, and component
pub fn get_sources_url(base_url: &str, series: &str, pocket: &str, component: &str) -> String {
let pocket_full = if pocket.is_empty() {
String::new()
} else {
format!("-{}", pocket)
};
format!("{base_url}/dists/{series}{pocket_full}/{component}/source/Sources.gz")
}
/// Get the archive base URL for a distribution
///
/// Example: ubuntu => http://archive.ubuntu.com/ubuntu
pub fn get_base_url(dist: &str) -> String {
DATA.dist.get(dist).unwrap().base_url.clone()
}
/// Obtain the URL for the archive keyring of a distribution series
pub async fn get_keyring_url(series: &str) -> Result<String, Box<dyn Error>> {
let dist = get_dist_from_series(series).await?;
let dist_data = DATA
.dist
.get(&dist)
.ok_or(format!("Unsupported distribution: {}", dist))?;
// For Debian, we need the series number to form the keyring URL
if dist == "debian" {
// Special case for 'sid' - use the latest released version
if series == "sid" || series == "experimental" {
let latest_released = get_latest_released_series("debian").await?;
let series_num = get_debian_series_number(&latest_released).await?.unwrap();
// Replace {series_num} placeholder with the latest released series number
Ok(dist_data
.archive_keyring
.replace("{series_num}", &series_num))
} else {
let series_num = get_debian_series_number(series).await?.unwrap();
// Replace {series_num} placeholder with the actual series number
Ok(dist_data
.archive_keyring
.replace("{series_num}", &series_num))
}
} else {
// For other distributions like Ubuntu, use the keyring directly
Ok(dist_data.archive_keyring.clone())
}
}
/// Obtain the URL for the 'Release' file of a distribution series
fn get_release_url(base_url: &str, series: &str, pocket: &str) -> String {
let pocket_full = if pocket.is_empty() {
String::new()
} else {
format!("-{}", pocket)
};
format!("{base_url}/dists/{series}{pocket_full}/Release")
}
/// Obtain the components of a distribution series by parsing the 'Release' file
pub async fn get_components(
base_url: &str,
series: &str,
pocket: &str,
) -> Result<Vec<String>, Box<dyn Error>> {
let url = get_release_url(base_url, series, pocket);
log::debug!("Fetching Release file from: {}", url);
let content = reqwest::get(&url).await?.text().await?;
for line in content.lines() {
if line.starts_with("Components:")
&& let Some((_, components)) = line.split_once(':')
{
return Ok(components
.split_whitespace()
.map(|s| s.to_string())
.collect());
}
}
Err("Components not found.".into())
}
/// Map a Debian series name to its version number
pub async fn get_debian_series_number(series: &str) -> Result<Option<String>, Box<dyn Error>> {
let series_info = &DATA.dist.get("debian").unwrap().series;
let content = if Path::new(series_info.local.as_str()).exists() {
std::fs::read_to_string(series_info.local.as_str())?
} else {
reqwest::get(series_info.network.as_str())
.await?
.text()
.await?
};
let mut rdr = csv::ReaderBuilder::new()
.flexible(true)
.from_reader(content.as_bytes());
let headers = rdr.headers()?.clone();
let series_idx = headers
.iter()
.position(|h| h == "series")
.ok_or("Column 'series' not found")?;
let version_idx = headers
.iter()
.position(|h| h == "version")
.ok_or("Column 'version' not found")?;
for result in rdr.records() {
let record = result?;
if let (Some(s), Some(v)) = (record.get(series_idx), record.get(version_idx))
&& s.to_lowercase() == series.to_lowercase()
{
return Ok(Some(v.to_string()));
}
}
Ok(None)
}
#[cfg(test)]
mod tests {
use super::*;
#[tokio::test]
async fn test_get_debian_series() {
let series = get_ordered_series_name("debian").await.unwrap();
assert!(series.contains(&"sid".to_string()));
assert!(series.contains(&"bookworm".to_string()));
}
#[tokio::test]
async fn test_get_ubuntu_series() {
let series = get_ordered_series_name("ubuntu").await.unwrap();
assert!(series.contains(&"noble".to_string()));
assert!(series.contains(&"jammy".to_string()));
}
#[tokio::test]
async fn test_get_dist_from_series() {
assert_eq!(get_dist_from_series("sid").await.unwrap(), "debian");
assert_eq!(get_dist_from_series("noble").await.unwrap(), "ubuntu");
}
#[tokio::test]
async fn test_get_debian_series_number() {
// Test with known Debian series
let bookworm_number = get_debian_series_number("bookworm").await.unwrap();
assert!(bookworm_number.is_some());
assert_eq!(bookworm_number.unwrap(), "12");
let trixie_number = get_debian_series_number("trixie").await.unwrap();
assert!(trixie_number.is_some());
assert_eq!(trixie_number.unwrap(), "13");
// Test with unknown series
let unknown_number = get_debian_series_number("unknown").await.unwrap();
assert!(unknown_number.is_none());
}
#[tokio::test]
async fn test_get_keyring_url_sid() {
// Test that 'sid' uses the latest released version for keyring URL
let sid_keyring = get_keyring_url("sid").await.unwrap();
let latest_released = get_latest_released_series("debian").await.unwrap();
let latest_keyring = get_keyring_url(&latest_released).await.unwrap();
// The keyring URL for 'sid' should be the same as the latest released version
assert_eq!(sid_keyring, latest_keyring);
}
#[tokio::test]
async fn test_get_latest_released_debian_series() {
// Test that we get a valid released series
let latest_released = get_latest_released_series("debian").await.unwrap();
// Should not be 'sid' or 'experimental'
assert_ne!(latest_released, "sid");
assert_ne!(latest_released, "experimental");
// Should have a version number
let version = get_debian_series_number(&latest_released).await.unwrap();
assert!(version.is_some());
}
}

View File

@@ -11,14 +11,10 @@ pub mod build;
pub mod changelog; pub mod changelog;
/// Build a Debian package into a binary (.deb) /// Build a Debian package into a binary (.deb)
pub mod deb; pub mod deb;
/// Obtain general information about distribution, series, etc
pub mod distro_info;
/// Obtain information about one or multiple packages /// Obtain information about one or multiple packages
pub mod package_info; pub mod package_info;
/// Download a source package locally /// Download a source package locally
pub mod pull; pub mod pull;
/// Handle package-specific quirks and workarounds
pub mod quirks;
/// Handle context for .deb building: locally, over ssh, in a chroot... /// Handle context for .deb building: locally, over ssh, in a chroot...
pub mod context; pub mod context;

View File

@@ -7,6 +7,8 @@ use pkh::context::ContextConfig;
extern crate flate2; extern crate flate2;
use pkh::pull::pull;
use pkh::changelog::generate_entry; use pkh::changelog::generate_entry;
use indicatif_log_bridge::LogWrapper; use indicatif_log_bridge::LogWrapper;
@@ -37,7 +39,6 @@ fn main() {
.required(false), .required(false),
) )
.arg(arg!(-v --version <version> "Target package version").required(false)) .arg(arg!(-v --version <version> "Target package version").required(false))
.arg(arg!(--archive "Only use the archive to download package source, not git").required(false))
.arg(arg!(--ppa <ppa> "Download the package from a specific PPA").required(false)) .arg(arg!(--ppa <ppa> "Download the package from a specific PPA").required(false))
.arg(arg!(<package> "Target package")), .arg(arg!(<package> "Target package")),
) )
@@ -48,16 +49,12 @@ fn main() {
.arg(arg!(--backport "This changelog is for a backport entry").required(false)) .arg(arg!(--backport "This changelog is for a backport entry").required(false))
.arg(arg!(-v --version <version> "Target version").required(false)), .arg(arg!(-v --version <version> "Target version").required(false)),
) )
.subcommand(Command::new("build").about("Build the source package (into a .dsc)")) .subcommand(Command::new("build").about("Build the source package"))
.subcommand( .subcommand(
Command::new("deb") Command::new("deb")
.about("Build the source package into binary package (.deb)") .about("Build the binary package")
.arg(arg!(-s --series <series> "Target distribution series").required(false)) .arg(arg!(-s --series <series> "Target distribution series").required(false))
.arg(arg!(-a --arch <arch> "Target architecture").required(false)) .arg(arg!(-a --arch <arch> "Target architecture").required(false))
.arg(arg!(--ppa <ppa> "Build the package adding a specific PPA for dependencies (can be specified multiple times)")
.long_help("Build the package adding a specific PPA for dependencies. Can be specified multiple times.").required(false).action(clap::ArgAction::Append))
.arg(arg!(--inject <package> "Inject a package into the build environment (can be specified multiple times)")
.long_help("Inject a package into the build environment before build-dep. Can be a .deb file path, a package name from the archive, or a package from a previously added PPA. Can be specified multiple times.").required(false).action(clap::ArgAction::Append))
.arg(arg!(--cross "Cross-compile for target architecture (instead of qemu-binfmt)") .arg(arg!(--cross "Cross-compile for target architecture (instead of qemu-binfmt)")
.long_help("Cross-compile for target architecture (instead of using qemu-binfmt)\nNote that most packages cannot be cross-compiled").required(false)) .long_help("Cross-compile for target architecture (instead of using qemu-binfmt)\nNote that most packages cannot be cross-compiled").required(false))
.arg(arg!(--mode <mode> "Change build mode [sbuild, local]").required(false) .arg(arg!(--mode <mode> "Change build mode [sbuild, local]").required(false)
@@ -97,37 +94,28 @@ fn main() {
let package = sub_matches.get_one::<String>("package").expect("required"); let package = sub_matches.get_one::<String>("package").expect("required");
let series = sub_matches.get_one::<String>("series").map(|s| s.as_str()); let series = sub_matches.get_one::<String>("series").map(|s| s.as_str());
let dist = sub_matches.get_one::<String>("dist").map(|s| s.as_str()); let dist = sub_matches.get_one::<String>("dist").map(|s| s.as_str());
let version = sub_matches.get_one::<String>("version").map(|s| s.as_str()); let version = sub_matches
let ppa = sub_matches.get_one::<String>("ppa").map(|s| s.as_str()); .get_one::<String>("version")
let archive = sub_matches.get_one::<bool>("archive").unwrap_or(&false); .map(|s| s.as_str())
.unwrap_or("");
let ppa = sub_matches
.get_one::<String>("ppa")
.map(|s| s.as_str())
.unwrap_or("");
let (pb, progress_callback) = ui::create_progress_bar(&multi); let (pb, progress_callback) = ui::create_progress_bar(&multi);
// Convert PPA to base URL if provided
let base_url = ppa.and_then(|ppa_str| {
// PPA format: user/ppa_name
let parts: Vec<&str> = ppa_str.split('/').collect();
if parts.len() == 2 {
Some(pkh::package_info::ppa_to_base_url(parts[0], parts[1]))
} else {
None
}
});
// Since pull is async, we need to block on it // Since pull is async, we need to block on it
if let Err(e) = rt.block_on(async { if let Err(e) = rt.block_on(pull(
let package_info = pkh::package_info::lookup(
package, package,
version, version,
series, series,
"", "",
ppa,
dist, dist,
base_url.as_deref(), None,
Some(&progress_callback), Some(&progress_callback),
) )) {
.await?;
pkh::pull::pull(&package_info, None, Some(&progress_callback), *archive).await
}) {
pb.finish_and_clear(); pb.finish_and_clear();
error!("{}", e); error!("{}", e);
std::process::exit(1); std::process::exit(1);
@@ -163,24 +151,6 @@ fn main() {
let series = sub_matches.get_one::<String>("series").map(|s| s.as_str()); let series = sub_matches.get_one::<String>("series").map(|s| s.as_str());
let arch = sub_matches.get_one::<String>("arch").map(|s| s.as_str()); let arch = sub_matches.get_one::<String>("arch").map(|s| s.as_str());
let cross = sub_matches.get_one::<bool>("cross").unwrap_or(&false); let cross = sub_matches.get_one::<bool>("cross").unwrap_or(&false);
let ppa: Vec<&str> = sub_matches
.get_many::<String>("ppa")
.map(|v| v.map(|s| s.as_str()).collect())
.unwrap_or_default();
let ppa = if ppa.is_empty() {
None
} else {
Some(ppa.as_slice())
};
let inject_packages: Vec<&str> = sub_matches
.get_many::<String>("inject")
.map(|v| v.map(|s| s.as_str()).collect())
.unwrap_or_default();
let inject_packages = if inject_packages.is_empty() {
None
} else {
Some(inject_packages.as_slice())
};
let mode: Option<&str> = sub_matches.get_one::<String>("mode").map(|s| s.as_str()); let mode: Option<&str> = sub_matches.get_one::<String>("mode").map(|s| s.as_str());
let mode: Option<pkh::deb::BuildMode> = match mode { let mode: Option<pkh::deb::BuildMode> = match mode {
Some("sbuild") => Some(pkh::deb::BuildMode::Sbuild), Some("sbuild") => Some(pkh::deb::BuildMode::Sbuild),
@@ -188,18 +158,9 @@ fn main() {
_ => None, _ => None,
}; };
if let Err(e) = rt.block_on(async { if let Err(e) =
pkh::deb::build_binary_package( pkh::deb::build_binary_package(arch, series, Some(cwd.as_path()), *cross, mode)
arch, {
series,
Some(cwd.as_path()),
*cross,
mode,
ppa,
inject_packages,
)
.await
}) {
error!("{}", e); error!("{}", e);
std::process::exit(1); std::process::exit(1);
} }

View File

@@ -1,37 +1,124 @@
use chrono::NaiveDate;
use flate2::read::GzDecoder; use flate2::read::GzDecoder;
use std::collections::HashMap; use std::collections::HashMap;
use std::error::Error; use std::error::Error;
use std::io::Read; use std::io::Read;
use std::path::Path;
use crate::ProgressCallback; use crate::ProgressCallback;
use log::{debug, warn}; use log::{debug, warn};
/// Convert a PPA specification to a base URL const BASE_URL_UBUNTU: &str = "http://archive.ubuntu.com/ubuntu";
/// const BASE_URL_DEBIAN: &str = "http://deb.debian.org/debian";
/// # Arguments
/// * user: user for the PPA
/// * name: name of the PPA
///
/// # Returns
/// * The base URL for the PPA (e.g., "https://ppa.launchpadcontent.net/user/ppa_name/ubuntu/")
pub fn ppa_to_base_url(user: &str, name: &str) -> String {
format!("http://ppa.launchpadcontent.net/{}/{}/ubuntu", user, name)
}
async fn check_launchpad_repo(package: &str) -> Result<Option<String>, Box<dyn Error>> { async fn check_launchpad_repo(package: &str) -> Result<Option<String>, Box<dyn Error>> {
let url = format!("https://git.launchpad.net/ubuntu/+source/{}", package); let url = format!("https://git.launchpad.net/ubuntu/+source/{}", package);
let client = reqwest::Client::builder()
.redirect(reqwest::redirect::Policy::none())
.build()?;
let response = client.head(&url).send().await?;
// Use libgit2 to check if the remote repository exists if response.status().is_success() {
// This is more reliable than HTTP HEAD requests when CGIt is disabled Ok(Some(url))
match git2::Remote::create_detached(url.clone()) { } else {
Ok(mut remote) => match remote.connect(git2::Direction::Fetch) { Ok(None)
Ok(_) => Ok(Some(url)),
Err(_) => Ok(None),
},
Err(_) => Ok(None),
} }
} }
fn parse_series_csv(content: &str) -> Result<Vec<String>, Box<dyn Error>> {
let mut rdr = csv::ReaderBuilder::new()
.flexible(true)
.from_reader(content.as_bytes());
let headers = rdr.headers()?.clone();
let series_idx = headers
.iter()
.position(|h| h == "series")
.ok_or("Column 'series' not found")?;
let created_idx = headers
.iter()
.position(|h| h == "created")
.ok_or("Column 'created' not found")?;
let mut entries = Vec::new();
for result in rdr.records() {
let record = result?;
if let (Some(s), Some(c)) = (record.get(series_idx), record.get(created_idx))
&& let Ok(date) = NaiveDate::parse_from_str(c, "%Y-%m-%d")
{
entries.push((s.to_string(), date));
}
}
// Sort by date descending (newest first)
entries.sort_by(|a, b| b.1.cmp(&a.1));
Ok(entries.into_iter().map(|(s, _)| s).collect())
}
async fn get_ordered_series(dist: &str) -> Result<Vec<String>, Box<dyn Error>> {
let content = if Path::new(format!("/usr/share/distro-info/{dist}.csv").as_str()).exists() {
std::fs::read_to_string(format!("/usr/share/distro-info/{dist}.csv"))?
} else {
reqwest::get(
format!("https://salsa.debian.org/debian/distro-info-data/-/raw/main/{dist}.csv")
.as_str(),
)
.await?
.text()
.await?
};
let mut series = parse_series_csv(&content)?;
// For Debian, ensure 'sid' is first if it's not (it usually doesn't have a date or is very old/new depending on file)
// Actually in the file sid has 1993 date.
// But we want to try 'sid' (unstable) first for Debian.
if dist == "debian" {
series.retain(|s| s != "sid");
series.insert(0, "sid".to_string());
}
Ok(series)
}
// Keep existing functions for compatibility or refactor them to use get_ordered_series
async fn get_series_from_url(url: &str) -> Result<Vec<String>, Box<dyn Error>> {
let content = reqwest::get(url).await?.text().await?;
parse_series_csv(&content)
}
fn get_series_from_file(path: &str) -> Result<Vec<String>, Box<dyn Error>> {
let content = std::fs::read_to_string(path)?;
parse_series_csv(&content)
}
/// Obtain a list of series from a distribution
pub async fn get_dist_series(dist: &str) -> Result<Vec<String>, Box<dyn Error>> {
if Path::new(format!("/usr/share/distro-info/{dist}.csv").as_str()).exists() {
get_series_from_file(format!("/usr/share/distro-info/{dist}.csv").as_str())
} else {
get_series_from_url(
format!("https://salsa.debian.org/debian/distro-info-data/-/raw/main/{dist}.csv")
.as_str(),
)
.await
}
}
/// Obtain the distribution (eg. debian, ubuntu) from a distribution series (eg. noble, bookworm)
pub async fn get_dist_from_series(series: &str) -> Result<String, Box<dyn Error>> {
let debian_series = get_dist_series("debian").await?;
if debian_series.contains(&series.to_string()) {
return Ok("debian".to_string());
}
let ubuntu_series = get_dist_series("ubuntu").await?;
if ubuntu_series.contains(&series.to_string()) {
return Ok("ubuntu".to_string());
}
Err(format!("Unknown series: {}", series).into())
}
/// A File used in a source package /// A File used in a source package
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct FileEntry { pub struct FileEntry {
@@ -86,6 +173,66 @@ impl PackageInfo {
} }
} }
fn get_dist_pockets(dist: &str) -> Vec<&'static str> {
match dist {
"ubuntu" => vec!["proposed", "updates", ""],
"debian" => vec!["proposed-updates", "updates", ""],
_ => vec![""],
}
}
fn get_sources_url(base_url: &str, series: &str, pocket: &str, component: &str) -> String {
let pocket_full = if pocket.is_empty() {
String::new()
} else {
format!("-{}", pocket)
};
format!("{base_url}/dists/{series}{pocket_full}/{component}/source/Sources.gz")
}
fn get_base_url(dist: &str) -> &str {
match dist {
"ubuntu" => BASE_URL_UBUNTU,
"debian" => BASE_URL_DEBIAN,
_ => panic!("Unknown distribution"),
}
}
/// Obtain the URL for the 'Release' file of a distribution series
fn get_release_url(base_url: &str, series: &str, pocket: &str) -> String {
let pocket_full = if pocket.is_empty() {
String::new()
} else {
format!("-{}", pocket)
};
format!("{base_url}/dists/{series}{pocket_full}/Release")
}
/// Obtain the components of a distribution series by parsing the 'Release' file
async fn get_components(
base_url: &str,
series: &str,
pocket: &str,
) -> Result<Vec<String>, Box<dyn Error>> {
let url = get_release_url(base_url, series, pocket);
debug!("Fetching Release file from: {}", url);
let content = reqwest::get(&url).await?.text().await?;
for line in content.lines() {
if line.starts_with("Components:")
&& let Some((_, components)) = line.split_once(':')
{
return Ok(components
.split_whitespace()
.map(|s| s.to_string())
.collect());
}
}
Err("Components not found.".into())
}
struct DebianSources { struct DebianSources {
splitted_sources: std::str::Split<'static, &'static str>, splitted_sources: std::str::Split<'static, &'static str>,
} }
@@ -185,14 +332,13 @@ fn parse_sources(
} }
/// Get package information from a package, distribution series, and pocket /// Get package information from a package, distribution series, and pocket
async fn get( pub async fn get(
package_name: &str, package_name: &str,
series: &str, series: &str,
pocket: &str, pocket: &str,
version: Option<&str>, version: Option<&str>,
base_url: Option<&str>,
) -> Result<PackageInfo, Box<dyn Error>> { ) -> Result<PackageInfo, Box<dyn Error>> {
let dist = crate::distro_info::get_dist_from_series(series).await?; let dist = get_dist_from_series(series).await?;
// Handle Ubuntu case: Vcs-Git does not usually point to Launchpad but Salsa // Handle Ubuntu case: Vcs-Git does not usually point to Launchpad but Salsa
// We need to check manually if there is a launchpad repository for the package // We need to check manually if there is a launchpad repository for the package
@@ -204,22 +350,13 @@ async fn get(
preferred_vcs = Some(lp_url); preferred_vcs = Some(lp_url);
} }
// Determine the base URL to use (either provided PPA URL or default archive) let base_url = get_base_url(&dist);
let distro_base_url = crate::distro_info::get_base_url(&dist);
let base_url = if let Some(ppa_url) = base_url {
ppa_url.to_string()
} else {
distro_base_url.clone()
};
// If using a custom base URL (PPA), disable VCS lookup to force archive download let components = get_components(base_url, series, pocket).await?;
let from_ppa = base_url != distro_base_url;
let components = crate::distro_info::get_components(&base_url, series, pocket).await?;
debug!("Found components: {:?}", components); debug!("Found components: {:?}", components);
for component in components { for component in components {
let url = crate::distro_info::get_sources_url(&base_url, series, pocket, &component); let url = get_sources_url(base_url, series, pocket, &component);
debug!("Fetching sources from: {}", url); debug!("Fetching sources from: {}", url);
@@ -250,11 +387,6 @@ async fn get(
preferred_vcs = Some(vcs.clone()); preferred_vcs = Some(vcs.clone());
} }
// If downloading from PPA, make sure we don't use a VCS
if from_ppa {
preferred_vcs = None;
}
let archive_url = format!("{base_url}/{0}", stanza.directory); let archive_url = format!("{base_url}/{0}", stanza.directory);
return Ok(PackageInfo { return Ok(PackageInfo {
dist, dist,
@@ -274,15 +406,14 @@ async fn get(
} }
/// Try to find package information in a distribution, trying all series and pockets /// Try to find package information in a distribution, trying all series and pockets
async fn find_package( pub async fn find_package(
package_name: &str, package_name: &str,
dist: &str, dist: &str,
pocket: &str, pocket: &str,
version: Option<&str>, version: Option<&str>,
base_url: Option<&str>,
progress: ProgressCallback<'_>, progress: ProgressCallback<'_>,
) -> Result<PackageInfo, Box<dyn Error>> { ) -> Result<PackageInfo, Box<dyn Error>> {
let series_list = crate::distro_info::get_ordered_series_name(dist).await?; let series_list = get_ordered_series(dist).await?;
for (i, series) in series_list.iter().enumerate() { for (i, series) in series_list.iter().enumerate() {
if let Some(cb) = progress { if let Some(cb) = progress {
@@ -290,13 +421,13 @@ async fn find_package(
} }
let pockets = if pocket.is_empty() { let pockets = if pocket.is_empty() {
crate::distro_info::get_dist_pockets(dist) get_dist_pockets(dist)
} else { } else {
vec![pocket.to_string()] vec![pocket]
}; };
for p in pockets { for p in pockets {
match get(package_name, series, &p, version, base_url).await { match get(package_name, series, p, version).await {
Ok(info) => { Ok(info) => {
if i > 0 { if i > 0 {
warn!( warn!(
@@ -321,72 +452,6 @@ async fn find_package(
Err(format!("Package '{}' not found.", package_name).into()) Err(format!("Package '{}' not found.", package_name).into())
} }
/// Lookup package information for a source package
///
/// This function obtains package information either directly from a specific series
/// or by searching across all series in a distribution.
///
/// # Arguments
/// * `package` - The name of the package to look up
/// * `version` - Optional specific version to look for
/// * `series` - Optional distribution series (e.g., "noble", "bookworm")
/// * `pocket` - Pocket to search in (e.g., "updates", "security", or "" for main)
/// * `dist` - Optional distribution name (e.g., "ubuntu", "debian")
/// * `base_url` - Optional base URL for the package archive (e.g., "https://ppa.launchpadcontent.net/user/ppa/ubuntu/")
/// * `progress` - Optional progress callback
pub async fn lookup(
package: &str,
version: Option<&str>,
series: Option<&str>,
pocket: &str,
dist: Option<&str>,
base_url: Option<&str>,
progress: ProgressCallback<'_>,
) -> Result<PackageInfo, Box<dyn Error>> {
// Obtain the package information, either directly in a series or with a search in all series
let package_info = if let Some(s) = series {
if let Some(cb) = progress {
cb(
&format!("Resolving package info for {}...", package),
"",
0,
0,
);
}
// Get the package information from that series and pocket
get(package, s, pocket, version, base_url).await?
} else {
let dist = dist.unwrap_or_else(||
// Use auto-detection to see if current distro is ubuntu, or fallback to debian by default
if std::process::Command::new("lsb_release").arg("-i").arg("-s").output()
.map(|o| String::from_utf8_lossy(&o.stdout).trim().to_lowercase()).unwrap_or_default() == "ubuntu" {
"ubuntu"
} else {
"debian"
}
);
if let Some(cb) = progress {
cb(
&format!(
"Searching for package {} in {}...",
package,
if base_url.is_none() { dist } else { "ppa" }
),
"",
0,
0,
);
}
// Try to find the package in all series from that dist
find_package(package, dist, pocket, version, base_url, progress).await?
};
Ok(package_info)
}
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
@@ -408,6 +473,26 @@ mod tests {
assert!(url.is_none()); assert!(url.is_none());
} }
#[tokio::test]
async fn test_get_debian_series() {
let series = get_dist_series("debian").await.unwrap();
assert!(series.contains(&"sid".to_string()));
assert!(series.contains(&"bookworm".to_string()));
}
#[tokio::test]
async fn test_get_ubuntu_series() {
let series = get_dist_series("ubuntu").await.unwrap();
assert!(series.contains(&"noble".to_string()));
assert!(series.contains(&"jammy".to_string()));
}
#[tokio::test]
async fn test_get_dist_from_series() {
assert_eq!(get_dist_from_series("sid").await.unwrap(), "debian");
assert_eq!(get_dist_from_series("noble").await.unwrap(), "ubuntu");
}
#[test] #[test]
fn test_parse_sources() { fn test_parse_sources() {
use flate2::Compression; use flate2::Compression;
@@ -445,7 +530,7 @@ Version: 1.0
#[tokio::test] #[tokio::test]
async fn test_find_package_fallback() { async fn test_find_package_fallback() {
// python2.7 is in bullseye but not above // python2.7 is in bullseye but not above
let info = find_package("python2.7", "debian", "", None, None, None) let info = find_package("python2.7", "debian", "", None, None)
.await .await
.unwrap(); .unwrap();
assert_eq!(info.stanza.package, "python2.7"); assert_eq!(info.stanza.package, "python2.7");
@@ -455,7 +540,7 @@ Version: 1.0
#[tokio::test] #[tokio::test]
async fn test_find_package_devel() { async fn test_find_package_devel() {
// hello is in sid // hello is in sid
let info = find_package("hello", "debian", "", None, None, None) let info = find_package("hello", "debian", "", None, None)
.await .await
.unwrap(); .unwrap();
assert_eq!(info.stanza.package, "hello"); assert_eq!(info.stanza.package, "hello");

View File

@@ -1,8 +1,8 @@
use std::cmp::min; use std::cmp::min;
use std::error::Error; use std::error::Error;
use std::path::Path; use std::path::Path;
use std::path::PathBuf;
use crate::package_info;
use crate::package_info::PackageInfo; use crate::package_info::PackageInfo;
use std::process::Command; use std::process::Command;
@@ -91,91 +91,23 @@ use futures_util::StreamExt;
use tar::Archive; use tar::Archive;
use xz2::read::XzDecoder; use xz2::read::XzDecoder;
fn copy_dir_all(src: &Path, dst: &Path) -> Result<(), Box<dyn Error>> { fn extract_archive(path: &Path, dest: &Path) -> Result<(), Box<dyn Error>> {
if !dst.exists() { let file = File::open(path)?;
std::fs::create_dir_all(dst)?;
}
for entry in std::fs::read_dir(src)? {
let entry = entry?;
let src_path = entry.path();
let dst_path = dst.join(entry.file_name());
if src_path.is_dir() {
copy_dir_all(&src_path, &dst_path)?;
} else {
std::fs::copy(&src_path, &dst_path)?;
}
}
Ok(())
}
/// Helper function to extract tar archive with progress tracking
fn extract_tar_archive<D, F>(
file_path: &Path,
dest: &Path,
progress: ProgressCallback<'_>,
decoder_factory: F,
) -> Result<Vec<String>, Box<dyn Error>>
where
D: std::io::Read,
F: Fn(File) -> D,
{
let file = File::open(file_path)?;
let decoder = decoder_factory(file);
let mut archive = Archive::new(decoder);
// Get total number of entries for progress tracking
let total_entries = archive.entries()?.count();
let mut current_entry = 0;
// Reset the archive to read entries again
let file = File::open(file_path)?;
let decoder = decoder_factory(file);
let mut archive = Archive::new(decoder);
let mut extracted_files = Vec::new();
for entry in archive.entries()? {
let mut entry = entry?;
let path = entry.path()?.to_path_buf();
let dest_path = dest.join(&path);
// Create parent directories if needed
if let Some(parent) = dest_path.parent() {
std::fs::create_dir_all(parent)?;
}
// Extract the file
entry.unpack(&dest_path)?;
extracted_files.push(dest_path.to_string_lossy().to_string());
current_entry += 1;
// Report progress
if let Some(cb) = progress {
cb("", "Extracting...", current_entry, total_entries);
}
}
Ok(extracted_files)
}
fn extract_archive(
path: &Path,
dest: &Path,
progress: ProgressCallback<'_>,
) -> Result<Vec<String>, Box<dyn Error>> {
let filename = path.file_name().unwrap().to_string_lossy(); let filename = path.file_name().unwrap().to_string_lossy();
if filename.ends_with(".tar.gz") || filename.ends_with(".tgz") { if filename.ends_with(".tar.gz") || filename.ends_with(".tgz") {
extract_tar_archive(path, dest, progress, GzDecoder::new) let tar = GzDecoder::new(file);
let mut archive = Archive::new(tar);
archive.unpack(dest)?;
} else if filename.ends_with(".tar.xz") || filename.ends_with(".txz") { } else if filename.ends_with(".tar.xz") || filename.ends_with(".txz") {
extract_tar_archive(path, dest, progress, XzDecoder::new) let tar = XzDecoder::new(file);
let mut archive = Archive::new(tar);
archive.unpack(dest)?;
} else { } else {
Err(format!("Unsupported archive format: {}", filename).into()) return Err(format!("Unsupported archive format: {}", filename).into());
} }
Ok(())
} }
fn checkout_pristine_tar(package_dir: &Path, filename: &str) -> Result<(), Box<dyn Error>> { fn checkout_pristine_tar(package_dir: &Path, filename: &str) -> Result<(), Box<dyn Error>> {
@@ -375,120 +307,90 @@ async fn fetch_archive_sources(
for file in &info.stanza.files { for file in &info.stanza.files {
let url = format!("{}/{}", info.archive_url, file.name); let url = format!("{}/{}", info.archive_url, file.name);
download_file_checksum(&url, &file.sha256, package_dir, progress).await?; download_file_checksum(&url, &file.sha256, package_dir, progress).await?;
}
// Extract all tar archives, merging extracted directories // Extract the debian tarball or diff
if file.name.ends_with(".tar.gz") || file.name.ends_with(".tar.xz") { let debian_file = info
.stanza
.files
.iter()
.find(|f| f.name.contains(".debian.tar.") || f.name.contains(".diff.gz"));
if let Some(file) = debian_file {
let path = package_dir.join(&file.name); let path = package_dir.join(&file.name);
let extract_dir = package_dir.join(&info.stanza.package); let extract_dir = package_dir.join(&info.stanza.package);
let extracted = extract_archive(&path, &extract_dir, progress)?; if (file.name.ends_with(".tar.xz") || file.name.ends_with(".tar.gz"))
&& let Err(e) = extract_archive(&path, &extract_dir)
// Special case: the debian tar does only contain 'debian'
if file.name.contains("debian.tar.") {
continue;
}
// List root directories extracted and use the first one as the source directory
debug!("Root directories extracted:");
let mut source_dir: Option<PathBuf> = None;
for file in &extracted {
let path = Path::new(file);
// Check if this is a directory and is at the archive root level
// (i.e., the path relative to extract_dir has no parent components)
if let Ok(relative_path) = path.strip_prefix(&extract_dir)
&& relative_path.components().count() == 1
&& path.is_dir()
{ {
debug!("- {}", relative_path.file_name().unwrap().to_string_lossy()); return Err(format!("Failed to extract {}: {}", file.name, e).into());
// Use the first directory found as the source
if source_dir.is_none() {
source_dir = Some(path.to_path_buf());
}
}
} }
// Use the extracted directory as the source, assuming there is only one // Remove archive after extraction
if let Some(src_dir) = source_dir { std::fs::remove_file(&path)?;
let target_dir = package_dir.join(&info.stanza.package);
if target_dir.exists() {
// Target exists, we need to merge contents
for sub_entry in std::fs::read_dir(&src_dir)? {
let sub_entry = sub_entry?;
let sub_path = sub_entry.path();
let target_path = target_dir.join(sub_entry.file_name());
if sub_path.is_dir() {
std::fs::create_dir_all(&target_path)?;
// Recursively copy directory contents
copy_dir_all(&sub_path, &target_path)?;
} else {
std::fs::copy(&sub_path, &target_path)?;
}
}
std::fs::remove_dir_all(&src_dir)?;
} else {
std::fs::rename(&src_dir, &target_dir)?;
}
}
}
// Extract and apply .diff.gz if present (old packages)
if file.name.ends_with(".diff.gz") {
let diff_gz_path = package_dir.join(&file.name);
let source_dir = package_dir.join(&info.stanza.package);
// Create the .diff file path by replacing .gz with empty string
let diff_path = diff_gz_path.with_extension("");
// Decompress the .diff.gz file directly to .diff
let input_file = File::open(&diff_gz_path)?;
let mut decoder = GzDecoder::new(input_file);
let mut output_file = File::create(&diff_path)?;
std::io::copy(&mut decoder, &mut output_file)?;
// Use relative path for the diff file (it's in the parent directory)
let relative_diff_path =
format!("../{}", diff_path.file_name().unwrap().to_string_lossy());
// Apply the patch using the patch command with relative path
let output = Command::new("patch")
.current_dir(&source_dir)
.arg("-p1")
.arg("--input")
.arg(&relative_diff_path)
.output()?;
if !output.status.success() {
let stderr = String::from_utf8_lossy(&output.stderr);
return Err(
format!("Failed to apply patch: {}\n{}", diff_path.display(), stderr).into(),
);
}
debug!("Successfully applied patch: {}", diff_path.display());
// Clean up the extracted .diff file
std::fs::remove_file(&diff_path)?;
}
} }
Ok(()) Ok(())
} }
/// Pull a source package locally using pre-retrieved package information /// Pull a source package locally
/// ///
/// This function takes a PackageInfo struct and downloads the package using the preferred method /// Will try to find the package information, and use it to download it over prefered way
/// (either git or direct archive download), as well as orig tarball, inside 'package' directory. /// (either git or direct archive download), as well as orig tarball, inside 'package' directory
/// The source will be extracted under 'package/package'. /// The source will be extracted under 'package/package'
pub async fn pull( pub async fn pull(
package_info: &PackageInfo, package: &str,
_version: &str,
series: Option<&str>,
pocket: &str,
_ppa: &str,
dist: Option<&str>,
cwd: Option<&Path>, cwd: Option<&Path>,
progress: ProgressCallback<'_>, progress: ProgressCallback<'_>,
force_archive: bool, ) -> Result<PackageInfo, Box<dyn Error>> {
) -> Result<(), Box<dyn Error>> { let version_opt = if _version.is_empty() {
let package = &package_info.stanza.package; None
let series = &package_info.series; } else {
Some(_version)
};
/* Obtain the package information, either directly in a series or with a search in all series */
let package_info = if let Some(s) = series {
if let Some(cb) = progress {
cb(
&format!("Resolving package info for {}...", package),
"",
0,
0,
);
}
// Get the package information from that series and pocket
package_info::get(package, s, pocket, version_opt).await?
} else {
let dist = dist.unwrap_or_else(||
// Use auto-detection to see if current distro is ubuntu, or fallback to debian by default
if std::process::Command::new("lsb_release").arg("-i").arg("-s").output()
.map(|o| String::from_utf8_lossy(&o.stdout).trim().to_lowercase()).unwrap_or_default() == "ubuntu" {
"ubuntu"
} else {
"debian"
}
);
if let Some(cb) = progress {
cb(
&format!("Searching for package {} in {}...", package, dist),
"",
0,
0,
);
}
// Try to find the package in all series from that dist
package_info::find_package(package, dist, pocket, version_opt, progress).await?
};
let package_dir = if let Some(path) = cwd { let package_dir = if let Some(path) = cwd {
path.join(package) path.join(package)
} else { } else {
@@ -496,20 +398,15 @@ pub async fn pull(
}; };
/* Fetch the package: either via git (preferred VCS) or the archive */ /* Fetch the package: either via git (preferred VCS) or the archive */
if let Some(ref url) = package_info.preferred_vcs if let Some(ref url) = package_info.preferred_vcs {
&& !force_archive
{
// We have found a preferred VCS (git repository) for the package, so // We have found a preferred VCS (git repository) for the package, so
// we fetch the package from that repo. // we fetch the package from that repo.
// Depending on target series, we pick target branch; if latest series is specified, // Depending on target series, we pick target branch; if no series is specified,
// we target the development branch, i.e. the default branch // we target the development branch, i.e. the default branch
let branch_name = if crate::distro_info::get_ordered_series_name(package_info.dist.as_str()) let branch_name = if let Some(s) = series {
.await?[0]
!= *series
{
if package_info.dist == "ubuntu" { if package_info.dist == "ubuntu" {
Some(format!("{}/{}", package_info.dist, series)) Some(format!("{}/{}", package_info.dist, s))
} else { } else {
// Debian does not have reliable branch naming... // Debian does not have reliable branch naming...
// For now, we skip that part and clone default // For now, we skip that part and clone default
@@ -549,7 +446,7 @@ pub async fn pull(
if let Some(cb) = progress { if let Some(cb) = progress {
cb("Fetching orig tarball...", "", 0, 0); cb("Fetching orig tarball...", "", 0, 0);
} }
fetch_orig_tarball(package_info, Some(&package_dir), progress).await?; fetch_orig_tarball(&package_info, Some(&package_dir), progress).await?;
} else { } else {
debug!("Native package, skipping orig tarball fetch."); debug!("Native package, skipping orig tarball fetch.");
} }
@@ -557,43 +454,35 @@ pub async fn pull(
if let Some(cb) = progress { if let Some(cb) = progress {
cb("Fetching dsc file...", "", 0, 0); cb("Fetching dsc file...", "", 0, 0);
} }
fetch_dsc_file(package_info, Some(&package_dir), progress).await?; fetch_dsc_file(&package_info, Some(&package_dir), progress).await?;
} else { } else {
// Fallback to archive fetching // Fallback to archive fetching
if let Some(cb) = progress { if let Some(cb) = progress {
cb("Downloading from archive...", "", 0, 0); cb("Downloading from archive...", "", 0, 0);
} }
fetch_archive_sources(package_info, Some(&package_dir), progress).await?; fetch_archive_sources(&package_info, Some(&package_dir), progress).await?;
} }
Ok(()) Ok(package_info)
} }
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
async fn test_pull_package_end_to_end( async fn test_pull_package_end_to_end(package: &str, series: Option<&str>, dist: Option<&str>) {
package: &str,
series: Option<&str>,
dist: Option<&str>,
archive: Option<bool>,
) {
// This test verifies that 'pkh pull' clones the repo and fetches the tarball. // This test verifies that 'pkh pull' clones the repo and fetches the tarball.
// For determinism, we require for tests that either a distro or series is specified, // For determinism, we require for tests that either a distro or series is specified,
// as no distribution would mean fallback to system distro // as no distribution would mean fallback to system distro
assert!(dist.is_some() || series.is_some()); assert!(dist != None || series != None);
// Use a temp directory as working directory // Use a temp directory as working directory
let temp_dir = tempfile::tempdir().unwrap(); let temp_dir = tempfile::tempdir().unwrap();
let cwd = temp_dir.path(); let cwd = temp_dir.path();
// Main 'pull' command: the one we want to test // Main 'pull' command: the one we want to test
let info = crate::package_info::lookup(package, None, series, "", dist, None, None) let info = pull(package, "", series, "", "", dist, Some(cwd), None)
.await
.unwrap();
pull(&info, Some(cwd), None, archive.unwrap_or(false))
.await .await
.unwrap(); .unwrap();
@@ -655,39 +544,33 @@ mod tests {
#[tokio::test] #[tokio::test]
async fn test_pull_hello_ubuntu_end_to_end() { async fn test_pull_hello_ubuntu_end_to_end() {
test_pull_package_end_to_end("hello", Some("noble"), None, None).await; test_pull_package_end_to_end("hello", Some("noble"), None).await;
} }
#[tokio::test] #[tokio::test]
async fn test_pull_hello_debian_end_to_end() { async fn test_pull_hello_debian_end_to_end() {
test_pull_package_end_to_end("hello", Some("bookworm"), None, None).await; test_pull_package_end_to_end("hello", Some("bookworm"), None).await;
}
/// Specific test for a package using a .diff.gz, instead of .debian and .orig
#[tokio::test]
async fn test_pull_linux_riscv_ubuntu_end_to_end() {
test_pull_package_end_to_end("linux-riscv", Some("noble"), None, Some(true)).await;
} }
#[tokio::test] #[tokio::test]
async fn test_pull_2048_universe_ubuntu_end_to_end() { async fn test_pull_2048_universe_ubuntu_end_to_end() {
test_pull_package_end_to_end("2048", Some("noble"), None, None).await; test_pull_package_end_to_end("2048", Some("noble"), None).await;
} }
#[tokio::test] #[tokio::test]
async fn test_pull_1oom_contrib_debian_end_to_end() { async fn test_pull_1oom_contrib_debian_end_to_end() {
test_pull_package_end_to_end("1oom", Some("trixie"), None, None).await; test_pull_package_end_to_end("1oom", Some("trixie"), None).await;
} }
#[tokio::test] #[tokio::test]
async fn test_pull_agg_svn_fallback_ok() { async fn test_pull_agg_svn_fallback_ok() {
test_pull_package_end_to_end("agg", Some("trixie"), None, None).await; test_pull_package_end_to_end("agg", Some("trixie"), None).await;
} }
#[tokio::test] #[tokio::test]
async fn test_pull_hello_debian_latest_end_to_end() { async fn test_pull_hello_debian_latest_end_to_end() {
test_pull_package_end_to_end("hello", None, Some("debian"), None).await; test_pull_package_end_to_end("hello", None, Some("debian")).await;
} }
#[tokio::test] #[tokio::test]
async fn test_pull_hello_ubuntu_latest_end_to_end() { async fn test_pull_hello_ubuntu_latest_end_to_end() {
test_pull_package_end_to_end("hello", None, Some("ubuntu"), None).await; test_pull_package_end_to_end("hello", None, Some("ubuntu")).await;
} }
} }

View File

@@ -1,77 +0,0 @@
//! Quirks module for handling package-specific workarounds
//!
//! This module provides functionality to read quirks from a YAML file
//! and apply them during pull and deb operations.
use lazy_static::lazy_static;
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
/// Quirks configuration for a specific operation (pull or deb)
#[derive(Debug, Clone, Deserialize, Serialize)]
pub struct OperationQuirks {
/// Extra dependencies to install before the operation
#[serde(default)]
pub extra_dependencies: Vec<String>,
/// Additional parameters for the operation
#[serde(default)]
pub parameters: HashMap<String, serde_yaml::Value>,
}
/// Quirks for a specific package
#[derive(Debug, Clone, Deserialize, Serialize)]
pub struct PackageQuirks {
/// Quirks to apply during pull operation
#[serde(default)]
pub pull: Option<OperationQuirks>,
/// Quirks to apply during deb operation
#[serde(default)]
pub deb: Option<OperationQuirks>,
}
/// Top-level quirks configuration
#[derive(Debug, Clone, Deserialize, Serialize)]
pub struct QuirksConfig {
/// Map of package names to their quirks
pub quirks: HashMap<String, PackageQuirks>,
}
const QUIRKS_YAML: &str = include_str!("../quirks.yml");
lazy_static! {
static ref QUIRKS_DATA: QuirksConfig = serde_yaml::from_str(QUIRKS_YAML).unwrap();
}
/// Get quirks for a specific package
///
/// # Arguments
/// * `config` - The quirks configuration
/// * `package` - The package name
///
/// # Returns
/// * `Option<PackageQuirks>` - The quirks for the package, or None if not found
pub fn get_package_quirks<'a>(
config: &'a QuirksConfig,
package: &str,
) -> Option<&'a PackageQuirks> {
config.quirks.get(package)
}
/// Get deb-time extra dependencies for a package
///
/// This function returns the list of extra dependencies that should be installed
/// before building a package, as defined in the quirks configuration.
///
/// # Arguments
/// * `package` - The package name
///
/// # Returns
/// * `Vec<String>` - List of extra dependencies, or empty vector if none
pub fn get_deb_extra_dependencies(package: &str) -> Vec<String> {
if let Some(quirks) = &get_package_quirks(&QUIRKS_DATA, package).unwrap().deb {
return quirks.extra_dependencies.clone();
}
Vec::new()
}

View File

@@ -1,2 +1 @@
pub mod gpg; pub mod gpg;
pub mod root;

View File

@@ -1,15 +0,0 @@
//! Root privilege checking utilities
use std::error::Error;
/// Check if the current process has root privileges
///
/// # Returns
/// * `Ok(true)` - Running as root
/// * `Ok(false)` - Not running as root
/// * `Err` - Failed to check privileges
pub fn is_root() -> Result<bool, Box<dyn Error>> {
// Check if we're running as root by checking the effective user ID
let uid = unsafe { libc::geteuid() };
Ok(uid == 0)
}