Compare commits

...

31 Commits

Author SHA1 Message Date
225157be63 pull: allow pulling from ppa
All checks were successful
CI / build (push) Successful in 15m54s
CI / snap (push) Successful in 3m19s
2026-01-29 17:11:01 +01:00
af73c14674 deb: fix package directory detection by looking for 'debian' subfolder
All checks were successful
CI / build (push) Successful in 16m35s
CI / snap (push) Successful in 3m35s
2026-01-28 15:59:31 +01:00
b2d89ead7e snap: ensure action runs on host
All checks were successful
CI / build (push) Successful in 17m7s
CI / snap (push) Successful in 3m57s
2026-01-26 17:48:05 +01:00
88ec82c0a0 snap: try running ci on host
Some checks failed
CI / build (push) Successful in 15m46s
CI / snap (push) Has been cancelled
2026-01-26 17:21:26 +01:00
9ffedb4d1a snap: switch ci action used to build snap
Some checks failed
CI / build (push) Successful in 17m3s
CI / snap (push) Failing after 10s
2026-01-26 16:49:55 +01:00
1db1048a2e apt/keyring: use original keyring file name
Some checks failed
CI / build (push) Successful in 14m20s
CI / snap (push) Failing after 49s
2026-01-26 16:27:18 +01:00
d832666858 distro_info: fully parse distro info
Some checks failed
CI / build (push) Failing after 10m16s
CI / snap (push) Has been skipped
2026-01-26 16:07:39 +01:00
45960e5f17 distro_info: fix sid keyring url
Some checks failed
CI / build (push) Failing after 10m27s
CI / snap (push) Has been skipped
2026-01-26 14:20:19 +01:00
97a78336fa snap: build a devmode test snap (in ci)
Some checks failed
CI / build (push) Failing after 9m26s
CI / snap (push) Has been skipped
2026-01-26 14:02:38 +01:00
c64c3146d7 deb: test building packages from sid
Some checks failed
CI / build (push) Failing after 9m17s
2026-01-26 12:06:00 +01:00
13c44daf9a deb: allow use parallel building
All checks were successful
CI / build (push) Successful in 10m29s
2026-01-26 10:31:58 +01:00
dfd197415f unshare: fix device creation code 2026-01-26 10:26:16 +01:00
73a61042e8 deb: disable gcc-15 test for CI 2026-01-26 10:25:53 +01:00
2f5fb3b1e7 deb: fix runtime usage for chroot
Some checks failed
CI / build (push) Failing after 11m13s
2026-01-26 10:14:04 +01:00
c3a116203a pull: allow applying diff.gz from archive download
Some checks failed
CI / build (push) Failing after 7m56s
2026-01-22 00:05:39 +01:00
dd9cc07285 deb: make sure to have the right apt keyrings
Some checks failed
CI / build (push) Failing after 14s
2026-01-20 19:31:07 +01:00
ab35af5fb5 package_info: refactor into distro_info and package_info split, yaml data
Some checks failed
CI / build (push) Failing after 9m3s
2026-01-14 21:18:25 +01:00
a975c24e54 deb: ci test building gcc-15 from sid
Some checks failed
CI / build (push) Failing after 9m17s
2026-01-14 11:12:12 +01:00
7e69131886 pull: only extract real archives
All checks were successful
CI / build (push) Successful in 9m0s
2026-01-14 09:32:13 +01:00
d7a99f77f5 pull: always extract tars when archive pulling, merging dirs
Some checks failed
CI / build (push) Failing after 3m18s
2026-01-13 23:22:55 +01:00
b4a60e2ae2 deb: cleanup ephemeral context only on success
All checks were successful
CI / build (push) Successful in 11m39s
2026-01-13 14:54:56 +01:00
87ce0f648b deb: create special device nodes inside chroot
All checks were successful
CI / build (push) Successful in 9m5s
2026-01-13 09:58:16 +01:00
29297d6f34 pkh: added pull --archive flag
All checks were successful
CI / build (push) Successful in 9m20s
2026-01-12 22:42:09 +01:00
843f28e8af pull: move orig tarball content to the right place on archive download
All checks were successful
CI / build (push) Successful in 9m13s
2026-01-12 21:58:57 +01:00
35f9517732 pull: extract orig tarball as well on archive download
Some checks failed
CI / build (push) Failing after 7m50s
2026-01-12 18:54:07 +01:00
593793373a deb: consider all kinds of package dirs
Some checks failed
CI / build (push) Failing after 12m32s
2026-01-12 11:29:18 +01:00
21bb76153e deb: consider package directories with version
Some checks failed
CI / build (push) Failing after 1m6s
2026-01-12 10:37:09 +01:00
bd10a37c2a pull: fmt
Some checks failed
CI / build (push) Failing after 8m1s
2026-01-11 22:19:46 +01:00
91c812a530 pull: allow to force pull from archive
Some checks failed
CI / build (push) Failing after 1m2s
2026-01-11 20:43:01 +01:00
70e6d8c051 pull: refactor to remove series argument
All checks were successful
CI / build (push) Successful in 9m31s
2026-01-11 12:36:19 +01:00
2f43ed1597 ci: fix clippy
All checks were successful
CI / build (push) Successful in 9m7s
2026-01-11 12:22:00 +01:00
17 changed files with 1076 additions and 285 deletions

View File

@@ -33,11 +33,11 @@ jobs:
- name: Build - name: Build
run: cargo build run: cargo build
env: env:
RUST_FLAGS: -Dwarnings RUSTFLAGS: -Dwarnings
- name: Lint - name: Lint
run: cargo clippy --all-targets --all-features run: cargo clippy --all-targets --all-features
env: env:
RUST_FLAGS: -Dwarnings RUSTFLAGS: -Dwarnings
- name: Install runtime system dependencies - name: Install runtime system dependencies
run: | run: |
sudo apt-get update sudo apt-get update
@@ -49,3 +49,17 @@ jobs:
env: env:
RUST_LOG: debug RUST_LOG: debug
run: timeout 30m cargo test -- --nocapture run: timeout 30m cargo test -- --nocapture
snap:
needs: build
runs-on: ubuntu-latest
outputs:
snap-file: ${{ steps.build-snap.outputs.snap }}
steps:
- uses: actions/checkout@v4
- uses: snapcore/action-build@v1
id: build-snap
- uses: actions/upload-artifact@v3
with:
name: snap
path: ${{ steps.build-snap.outputs.snap }}

View File

@@ -28,6 +28,8 @@ serde_json = "1.0.145"
directories = "6.0.0" directories = "6.0.0"
ssh2 = "0.9.5" ssh2 = "0.9.5"
gpgme = "0.11" gpgme = "0.11"
serde_yaml = "0.9"
lazy_static = "1.4.0"
[dev-dependencies] [dev-dependencies]
test-log = "0.2.19" test-log = "0.2.19"

View File

@@ -82,7 +82,7 @@ Missing features:
- [ ] `pkh pull` - [ ] `pkh pull`
- [x] Obtain package sources from git - [x] Obtain package sources from git
- [x] Obtain package sources from the archive (fallback) - [x] Obtain package sources from the archive (fallback)
- [ ] Obtain package source from PPA (--ppa) - [x] Obtain package source from PPA (--ppa)
- [ ] Obtain a specific version of the package - [ ] Obtain a specific version of the package
- [x] Fetch the correct git branch for series on Ubuntu - [x] Fetch the correct git branch for series on Ubuntu
- [ ] Try to fetch the correct git branch for series on Debian, or fallback to the archive - [ ] Try to fetch the correct git branch for series on Debian, or fallback to the archive

28
distro_info.yml Normal file
View File

@@ -0,0 +1,28 @@
## Static data needed for pkh operations
## Instead of hardcoding the data in code, data files allow to quickly
## update and maintain such data in one unique place
## The goal is to have the minimal possible set of data necessary
## to grab the actual data. For example we don't want to store every Ubuntu
## or Debian series, but rather an URL where we can properly access that data.
dist_info:
local: /usr/share/distro-info/{dist}
network: https://salsa.debian.org/debian/distro-info-data/-/raw/main/
dist:
debian:
base_url: http://deb.debian.org/debian
archive_keyring: https://ftp-master.debian.org/keys/archive-key-{series_num}.asc
pockets:
- proposed-updates
- updates
series:
local: /usr/share/distro-info/debian.csv
network: https://salsa.debian.org/debian/distro-info-data/-/raw/main/debian.csv
ubuntu:
base_url: http://archive.ubuntu.com/ubuntu
archive_keyring: http://archive.ubuntu.com/ubuntu/project/ubuntu-archive-keyring.gpg
pockets:
- proposed
- updates
series:
local: /usr/share/distro-info/ubuntu.csv
network: https://salsa.debian.org/debian/distro-info-data/-/raw/main/ubuntu.csv

42
snap/snapcraft.yaml Normal file
View File

@@ -0,0 +1,42 @@
name: pkh
base: core24
summary: pkh is a packaging helper for Debian/Ubuntu packages
description: |
pkh aims at wrapping the different debian tools and workflows
into one tool, that would have the same interface for everything,
while being smarter at integrating all workflows.
adopt-info: pkh-part
confinement: devmode
apps:
pkh:
command: bin/pkh
parts:
pkh-part:
plugin: rust
source: .
override-pull: |
craftctl default
craftctl set version=$(git rev-parse --short=11 HEAD)
craftctl set grade="devel"
build-packages:
- pkg-config
- libssl-dev
- libgpg-error-dev
- libgpgme-dev
stage-packages:
- libgpgme11t64
- git
- curl
- pristine-tar
- sbuild
- mmdebstrap
- util-linux
- dpkg-dev
stage:
- -usr/lib/x86_64-linux-gnu/libicuio.so.74.2
- -usr/lib/x86_64-linux-gnu/libicutest.so.74.2
- -usr/lib/x86_64-linux-gnu/libicutu.so.74.2
- -usr/lib/x86_64-linux-gnu/libicui18n.so.74.2

View File

@@ -0,0 +1,58 @@
//! APT keyring management for mmdebstrap
//!
//! Provides a simple function to ensure that archive keyrings are available
//! for mmdebstrap operations by downloading them from specified URLs.
use crate::context;
use crate::distro_info;
use std::error::Error;
use std::path::Path;
use std::sync::Arc;
/// Download a keyring into apt trusted.gpg.d directory, trusting that keyring
pub async fn download_trust_keyring(
ctx: Option<Arc<context::Context>>,
series: &str,
) -> Result<(), Box<dyn Error>> {
let ctx = ctx.unwrap_or_else(context::current);
// Obtain keyring URL from distro_info
let keyring_url = distro_info::get_keyring_url(series).await?;
log::debug!("Downloading keyring from: {}", keyring_url);
// Create trusted.gpg.d directory if it doesn't exist
let trusted_gpg_d = "/etc/apt/trusted.gpg.d";
if !ctx.exists(Path::new(trusted_gpg_d))? {
ctx.command("mkdir").arg("-p").arg(trusted_gpg_d).status()?;
}
// Extract the original filename from the keyring URL
let filename = keyring_url
.split('/')
.next_back()
.unwrap_or("pkh-{}.gpg")
.replace("{}", series);
let keyring_path = format!("{}/{}", trusted_gpg_d, filename);
// Download the keyring directly to the final location using curl
let mut curl_cmd = ctx.command("curl");
curl_cmd
.arg("-s")
.arg("-f")
.arg("-L")
.arg(&keyring_url)
.arg("--output")
.arg(&keyring_path);
let status = curl_cmd.status()?;
if !status.success() {
return Err(format!("Failed to download keyring from {}", keyring_url).into());
}
log::info!(
"Successfully downloaded and installed keyring for {} to {}",
series,
keyring_path
);
Ok(())
}

View File

@@ -1 +1,2 @@
pub mod keyring;
pub mod sources; pub mod sources;

View File

@@ -208,7 +208,7 @@ impl UnshareDriver {
} }
cmd.arg("--").arg("bash").arg("-c").arg(format!( cmd.arg("--").arg("bash").arg("-c").arg(format!(
"mount -t proc proc /proc; mount -t devpts devpts /dev/pts; mount --bind /dev/pts/ptmx /dev/ptmx; {} {}", "mount -t proc proc /proc; mkdir /dev/pts; mount -t devpts devpts /dev/pts; touch /dev/ptmx; mount --bind /dev/pts/ptmx /dev/ptmx; {} {}",
program, program,
args.join(" ") args.join(" ")
)); ));

View File

@@ -12,11 +12,12 @@ use xz2::read::XzDecoder;
pub struct EphemeralContextGuard { pub struct EphemeralContextGuard {
previous_context: String, previous_context: String,
chroot_path: PathBuf, chroot_path: PathBuf,
build_succeeded: bool,
} }
impl EphemeralContextGuard { impl EphemeralContextGuard {
/// Create a new ephemeral unshare context for the specified series /// Create a new ephemeral unshare context for the specified series
pub fn new(series: &str) -> Result<Self, Box<dyn Error>> { pub async fn new(series: &str) -> Result<Self, Box<dyn Error>> {
let current_context_name = context::manager().current_name(); let current_context_name = context::manager().current_name();
// Create a temporary directory for the chroot // Create a temporary directory for the chroot
@@ -30,7 +31,7 @@ impl EphemeralContextGuard {
); );
// Download and extract the chroot tarball // Download and extract the chroot tarball
Self::download_and_extract_chroot(series, &chroot_path)?; Self::download_and_extract_chroot(series, &chroot_path).await?;
// Switch to an ephemeral context to build the package in the chroot // Switch to an ephemeral context to build the package in the chroot
context::manager().set_current_ephemeral(Context::new(ContextConfig::Unshare { context::manager().set_current_ephemeral(Context::new(ContextConfig::Unshare {
@@ -41,10 +42,11 @@ impl EphemeralContextGuard {
Ok(Self { Ok(Self {
previous_context: current_context_name, previous_context: current_context_name,
chroot_path, chroot_path,
build_succeeded: false,
}) })
} }
fn download_and_extract_chroot( async fn download_and_extract_chroot(
series: &str, series: &str,
chroot_path: &PathBuf, chroot_path: &PathBuf,
) -> Result<(), Box<dyn Error>> { ) -> Result<(), Box<dyn Error>> {
@@ -93,7 +95,7 @@ impl EphemeralContextGuard {
// Download tarball if it doesn't exist // Download tarball if it doesn't exist
if !tarball_path.exists() { if !tarball_path.exists() {
log::debug!("Downloading chroot tarball for {}...", series); log::debug!("Downloading chroot tarball for {}...", series);
Self::download_chroot_tarball(series, &tarball_path)?; Self::download_chroot_tarball(series, &tarball_path).await?;
} else { } else {
log::debug!("Using cached chroot tarball for {}", series); log::debug!("Using cached chroot tarball for {}", series);
} }
@@ -102,10 +104,17 @@ impl EphemeralContextGuard {
log::debug!("Extracting chroot tarball to {}...", chroot_path.display()); log::debug!("Extracting chroot tarball to {}...", chroot_path.display());
Self::extract_tarball(&tarball_path, chroot_path)?; Self::extract_tarball(&tarball_path, chroot_path)?;
// Create device nodes in the chroot
log::debug!("Creating device nodes in chroot...");
Self::create_device_nodes(chroot_path)?;
Ok(()) Ok(())
} }
fn download_chroot_tarball(series: &str, tarball_path: &Path) -> Result<(), Box<dyn Error>> { async fn download_chroot_tarball(
series: &str,
tarball_path: &Path,
) -> Result<(), Box<dyn Error>> {
let ctx = context::current(); let ctx = context::current();
// Create a lock file to make sure that noone tries to use the file while it's not fully downloaded // Create a lock file to make sure that noone tries to use the file while it's not fully downloaded
@@ -114,6 +123,9 @@ impl EphemeralContextGuard {
.arg(lockfile_path.to_string_lossy().to_string()) .arg(lockfile_path.to_string_lossy().to_string())
.status()?; .status()?;
// Make sure we have the right apt keyrings to mmdebstrap the chroot
crate::apt::keyring::download_trust_keyring(Some(ctx.clone()), series).await?;
// Use mmdebstrap to download the tarball to the cache directory // Use mmdebstrap to download the tarball to the cache directory
let status = ctx let status = ctx
.command("mmdebstrap") .command("mmdebstrap")
@@ -167,6 +179,62 @@ impl EphemeralContextGuard {
Ok(()) Ok(())
} }
fn create_device_nodes(chroot_path: &Path) -> Result<(), Box<dyn Error>> {
let ctx = context::current();
let dev_null_path = chroot_path.join("dev/null");
let dev_zero_path = chroot_path.join("dev/zero");
// Ensure /dev directory exists
fs::create_dir_all(chroot_path.join("dev"))?;
// Remove existing device nodes if they exist
let _ = ctx
.command("rm")
.arg("-f")
.arg(dev_null_path.to_string_lossy().to_string())
.status();
let _ = ctx
.command("rm")
.arg("-f")
.arg(dev_zero_path.to_string_lossy().to_string())
.status();
// Create new device nodes using fakeroot and mknod
let status_null = ctx
.command("sudo")
.arg("mknod")
.arg("-m")
.arg("666")
.arg(dev_null_path.to_string_lossy().to_string())
.arg("c")
.arg("1")
.arg("3")
.status()?;
let status_zero = ctx
.command("sudo")
.arg("mknod")
.arg("-m")
.arg("666")
.arg(dev_zero_path.to_string_lossy().to_string())
.arg("c")
.arg("1")
.arg("5")
.status()?;
if !status_null.success() || !status_zero.success() {
return Err("Failed to create device nodes".into());
}
Ok(())
}
/// Mark the build as successful, which will trigger chroot cleanup on drop
pub fn mark_build_successful(&mut self) {
self.build_succeeded = true;
}
} }
impl Drop for EphemeralContextGuard { impl Drop for EphemeralContextGuard {
@@ -177,31 +245,46 @@ impl Drop for EphemeralContextGuard {
log::error!("Failed to restore context {}: {}", self.previous_context, e); log::error!("Failed to restore context {}: {}", self.previous_context, e);
} }
// Remove chroot directory // Remove chroot directory only if build succeeded
// We use the restored context to execute the cleanup command if self.build_succeeded {
let result = context::current() log::debug!(
.command("sudo") "Build succeeded, removing chroot directory: {}",
.arg("rm") self.chroot_path.display()
.arg("-rf") );
.arg(&self.chroot_path) let result = context::current()
.status(); .command("sudo")
.arg("rm")
.arg("-rf")
.arg(&self.chroot_path)
.status();
match result { match result {
Ok(status) => { Ok(status) => {
if !status.success() { if !status.success() {
log::error!(
"Failed to remove chroot directory {}",
self.chroot_path.display()
);
} else {
log::debug!(
"Successfully removed chroot directory: {}",
self.chroot_path.display()
);
}
}
Err(e) => {
log::error!( log::error!(
"Failed to remove chroot directory {}", "Failed to execute cleanup command for {}: {}",
self.chroot_path.display() self.chroot_path.display(),
e
); );
} }
} }
Err(e) => { } else {
log::error!( log::debug!(
"Failed to execute cleanup command for {}: {}", "Build did not succeed or was not marked as successful, keeping chroot directory: {}",
self.chroot_path.display(), self.chroot_path.display()
e );
);
}
} }
} }
} }

View File

@@ -24,6 +24,26 @@ pub fn build(
let ctx = context::current(); let ctx = context::current();
// Parallel building: find local number of cores, and use that
let num_cores = ctx
.command("nproc")
.output()
.map(|output| {
if output.status.success() {
String::from_utf8_lossy(&output.stdout)
.trim()
.parse::<usize>()
.unwrap_or(1)
} else {
1 // Default to 1 if nproc fails
}
})
.unwrap_or(1); // Default to 1 if we can't execute the command
env.insert(
"DEB_BUILD_OPTIONS".to_string(),
format!("parallel={}", num_cores),
);
if cross { if cross {
log::debug!("Setting up environment for local cross build..."); log::debug!("Setting up environment for local cross build...");
cross::setup_environment(&mut env, arch)?; cross::setup_environment(&mut env, arch)?;
@@ -80,10 +100,16 @@ pub fn build(
return Err("Could not install essential packages for the build".into()); return Err("Could not install essential packages for the build".into());
} }
// Find the actual package directory
let package_dir = crate::deb::find_package_directory(Path::new(build_root), package, version)?;
let package_dir_str = package_dir
.to_str()
.ok_or("Invalid package directory path")?;
// Install build dependencies // Install build dependencies
log::debug!("Installing build dependencies..."); log::debug!("Installing build dependencies...");
let mut cmd = ctx.command("apt-get"); let mut cmd = ctx.command("apt-get");
cmd.current_dir(format!("{build_root}/{package}")) cmd.current_dir(package_dir_str)
.envs(env.clone()) .envs(env.clone())
.arg("-y") .arg("-y")
.arg("build-dep"); .arg("build-dep");
@@ -102,7 +128,7 @@ pub fn build(
log::debug!("Building (debian/rules build) package..."); log::debug!("Building (debian/rules build) package...");
let status = ctx let status = ctx
.command("debian/rules") .command("debian/rules")
.current_dir(format!("{build_root}/{package}")) .current_dir(package_dir_str)
.envs(env.clone()) .envs(env.clone())
.arg("build") .arg("build")
.status()?; .status()?;
@@ -113,7 +139,7 @@ pub fn build(
// Run the 'binary' step to produce deb // Run the 'binary' step to produce deb
let status = ctx let status = ctx
.command("fakeroot") .command("fakeroot")
.current_dir(format!("{build_root}/{package}")) .current_dir(package_dir_str)
.envs(env.clone()) .envs(env.clone())
.arg("debian/rules") .arg("debian/rules")
.arg("binary") .arg("binary")

View File

@@ -17,7 +17,7 @@ pub enum BuildMode {
} }
/// Build package in 'cwd' to a .deb /// Build package in 'cwd' to a .deb
pub fn build_binary_package( pub async fn build_binary_package(
arch: Option<&str>, arch: Option<&str>,
series: Option<&str>, series: Option<&str>,
cwd: Option<&Path>, cwd: Option<&Path>,
@@ -48,8 +48,8 @@ pub fn build_binary_package(
}; };
// Create an ephemeral unshare context for all Local builds // Create an ephemeral unshare context for all Local builds
let _guard = if mode == BuildMode::Local { let mut guard = if mode == BuildMode::Local {
Some(ephemeral::EphemeralContextGuard::new(series)?) Some(ephemeral::EphemeralContextGuard::new(series).await?)
} else { } else {
None None
}; };
@@ -82,9 +82,95 @@ pub fn build_binary_package(
} }
} }
// Mark build as successful to trigger chroot cleanup
if let Some(ref mut g) = guard {
g.mark_build_successful();
}
Ok(()) Ok(())
} }
/// Find the current package directory by trying both patterns:
/// - package/package
/// - package/package-origversion
pub(crate) fn find_package_directory(
parent_dir: &Path,
package: &str,
version: &str,
) -> Result<PathBuf, Box<dyn Error>> {
let ctx = context::current();
// Try package/package pattern first
let package_dir = parent_dir.join(package).join(package);
if ctx.exists(&package_dir)? && ctx.exists(&package_dir.join("debian"))? {
return Ok(package_dir);
}
// Compute origversion from version: remove everything after first '-', after stripping epoch
let version_without_epoch = version.split_once(':').map(|(_, v)| v).unwrap_or(version);
let origversion = version_without_epoch
.split_once('-')
.map(|(v, _)| v)
.unwrap_or(version);
// Try package/package-origversion pattern
let package_dir = parent_dir
.join(package)
.join(format!("{}-{}", package, origversion));
if ctx.exists(&package_dir)? && ctx.exists(&package_dir.join("debian"))? {
return Ok(package_dir);
}
// Try 'package' only
let package_dir = parent_dir.join(package);
if ctx.exists(&package_dir)? && ctx.exists(&package_dir.join("debian"))? {
return Ok(package_dir);
}
// Try package-origversion only
let package_dir = parent_dir.join(format!("{}-{}", package, origversion));
if ctx.exists(&package_dir)? && ctx.exists(&package_dir.join("debian"))? {
return Ok(package_dir);
}
// List all directories under 'package/' and log them
let package_parent = parent_dir;
if ctx.exists(package_parent)? {
log::debug!(
"Listing all directories under '{}':",
package_parent.display()
);
let entries = ctx.list_files(package_parent)?;
let mut found_dirs = Vec::new();
for entry in entries {
if entry.is_dir() {
if let Some(file_name) = entry.file_name() {
found_dirs.push(file_name.to_string_lossy().into_owned());
}
log::debug!(" - {}", entry.display());
}
}
// If we found directories but none matched our patterns, provide helpful error
if !found_dirs.is_empty() {
return Err(format!(
"Could not find package directory for {} in {}. Found directories: {}",
package,
parent_dir.display(),
found_dirs.join(", ")
)
.into());
}
}
Err(format!(
"Could not find package directory for {} in {}",
package,
parent_dir.display()
)
.into())
}
fn find_dsc_file( fn find_dsc_file(
build_root: &str, build_root: &str,
package: &str, package: &str,
@@ -126,20 +212,23 @@ mod tests {
log::debug!("Created temporary directory: {}", cwd.display()); log::debug!("Created temporary directory: {}", cwd.display());
log::info!("Pulling package {} from {}...", package, series); log::info!("Pulling package {} from {}...", package, series);
let package_info = crate::package_info::lookup(package, None, Some(series), "", dist, None) let package_info =
.await crate::package_info::lookup(package, None, Some(series), "", dist, None, None)
.expect("Cannot lookup package information"); .await
crate::pull::pull(&package_info, Some(series), Some(cwd), None) .expect("Cannot lookup package information");
crate::pull::pull(&package_info, Some(cwd), None, true)
.await .await
.expect("Cannot pull package"); .expect("Cannot pull package");
log::info!("Successfully pulled package {}", package); log::info!("Successfully pulled package {}", package);
// Change directory to the package directory // Change directory to the package directory
let cwd = cwd.join(package).join(package); let cwd = crate::deb::find_package_directory(cwd, package, &package_info.stanza.version)
.expect("Cannot find package directory");
log::debug!("Package directory: {}", cwd.display()); log::debug!("Package directory: {}", cwd.display());
log::info!("Starting binary package build..."); log::info!("Starting binary package build...");
crate::deb::build_binary_package(arch, Some(series), Some(&cwd), cross, None) crate::deb::build_binary_package(arch, Some(series), Some(&cwd), cross, None)
.await
.expect("Cannot build binary package (deb)"); .expect("Cannot build binary package (deb)");
log::info!("Successfully built binary package"); log::info!("Successfully built binary package");
@@ -177,6 +266,7 @@ mod tests {
test_build_end_to_end("hello", "noble", None, None, false).await; test_build_end_to_end("hello", "noble", None, None, false).await;
} }
/// This ensures that we can cross-build packages
#[tokio::test] #[tokio::test]
#[test_log::test] #[test_log::test]
#[cfg(target_arch = "x86_64")] #[cfg(target_arch = "x86_64")]
@@ -184,4 +274,30 @@ mod tests {
async fn test_deb_hello_ubuntu_cross_end_to_end() { async fn test_deb_hello_ubuntu_cross_end_to_end() {
test_build_end_to_end("hello", "noble", None, Some("riscv64"), true).await; test_build_end_to_end("hello", "noble", None, Some("riscv64"), true).await;
} }
/// This ensures that we can build packages from sid, even on older
/// releases. It can sometimes be difficult with mmdebstrap issues
/// for example.
#[tokio::test]
#[test_log::test]
#[serial]
async fn test_deb_hello_debian_sid_end_to_end() {
test_build_end_to_end("hello", "sid", None, None, false).await;
}
/// This is a specific test case for the latest gcc package on Debian
/// The GCC package is complex and hard to build, with specific stages
/// and system-bound scripts. Building it requires specific things that
/// we want to ensure are not broken.
/// NOTE: Ideally, we want to run this in CI, but it takes more than 20h
/// to fully build the gcc-15 package on an amd64 builder, which is too
/// much time.
#[ignore]
#[cfg(target_arch = "x86_64")]
#[tokio::test]
#[test_log::test]
#[serial]
async fn test_deb_gcc_debian_end_to_end() {
test_build_end_to_end("gcc-15", "sid", None, None, false).await;
}
} }

View File

@@ -2,18 +2,26 @@
/// Call 'sbuild' with the dsc file to build the package with unshare /// Call 'sbuild' with the dsc file to build the package with unshare
use crate::context; use crate::context;
use std::error::Error; use std::error::Error;
use std::path::Path;
pub fn build( pub fn build(
package: &str, package: &str,
_version: &str, version: &str,
arch: &str, arch: &str,
series: &str, series: &str,
build_root: &str, build_root: &str,
cross: bool, cross: bool,
) -> Result<(), Box<dyn Error>> { ) -> Result<(), Box<dyn Error>> {
let ctx = context::current(); let ctx = context::current();
// Find the actual package directory
let package_dir = crate::deb::find_package_directory(Path::new(build_root), package, version)?;
let package_dir_str = package_dir
.to_str()
.ok_or("Invalid package directory path")?;
let mut cmd = ctx.command("sbuild"); let mut cmd = ctx.command("sbuild");
cmd.current_dir(format!("{}/{}", build_root, package)); cmd.current_dir(package_dir_str);
cmd.arg("--chroot-mode=unshare"); cmd.arg("--chroot-mode=unshare");
cmd.arg("--no-clean-source"); cmd.arg("--no-clean-source");

373
src/distro_info.rs Normal file
View File

@@ -0,0 +1,373 @@
use chrono::NaiveDate;
use lazy_static::lazy_static;
use serde::Deserialize;
use std::error::Error;
use std::path::Path;
#[derive(Debug, Clone)]
/// Information about a specific distribution series
pub struct SeriesInformation {
/// Distribution series
pub series: String,
/// Codename, i.e. full name of series
pub codename: String,
/// Series version as numbers
pub version: Option<String>,
/// Series creation date
pub created: NaiveDate,
/// Series release date
pub release: Option<NaiveDate>,
/// Series end-of-life date
pub eol: Option<NaiveDate>,
}
#[derive(Debug, Deserialize)]
struct SeriesInfo {
local: String,
network: String,
}
#[derive(Debug, Deserialize)]
struct DistData {
base_url: String,
archive_keyring: String,
pockets: Vec<String>,
series: SeriesInfo,
}
#[derive(Debug, Deserialize)]
struct Data {
dist: std::collections::HashMap<String, DistData>,
}
const DATA_YAML: &str = include_str!("../distro_info.yml");
lazy_static! {
static ref DATA: Data = serde_yaml::from_str(DATA_YAML).unwrap();
}
fn parse_series_csv(content: &str) -> Result<Vec<SeriesInformation>, Box<dyn Error>> {
let mut rdr = csv::ReaderBuilder::new()
.flexible(true)
.from_reader(content.as_bytes());
let headers = rdr.headers()?.clone();
let series_idx = headers
.iter()
.position(|h| h == "series")
.ok_or("Column 'series' not found")?;
let codename_idx = headers
.iter()
.position(|h| h == "codename")
.ok_or("Column 'codename' not found")?;
let version_idx = headers
.iter()
.position(|h| h == "version")
.ok_or("Column 'version' not found")?;
let created_idx = headers
.iter()
.position(|h| h == "created")
.ok_or("Column 'created' not found")?;
let release_idx = headers
.iter()
.position(|h| h == "release")
.ok_or("Column 'release' not found")?;
let eol_idx = headers
.iter()
.position(|h| h == "eol")
.ok_or("Column 'eol' not found")?;
let mut series_info_list = Vec::new();
for result in rdr.records() {
let record = result?;
let series = record.get(series_idx).unwrap().to_string();
let codename = record.get(codename_idx).unwrap().to_string();
let version = record.get(version_idx).map(|s| s.to_string());
let created = record
.get(created_idx)
.map(|date_str| NaiveDate::parse_from_str(date_str, "%Y-%m-%d").unwrap())
.unwrap();
let release = record
.get(release_idx)
.map(|date_str| NaiveDate::parse_from_str(date_str, "%Y-%m-%d").unwrap());
let eol = record
.get(eol_idx)
.map(|date_str| NaiveDate::parse_from_str(date_str, "%Y-%m-%d").unwrap());
series_info_list.push(SeriesInformation {
series,
codename,
version,
created,
release,
eol,
});
}
// Revert to sort by most recent
series_info_list.reverse();
Ok(series_info_list)
}
/// Get time-ordered list of series information for a distribution, development series first
pub async fn get_ordered_series(dist: &str) -> Result<Vec<SeriesInformation>, Box<dyn Error>> {
let series_info = &DATA.dist.get(dist).unwrap().series;
let content = if Path::new(series_info.local.as_str()).exists() {
std::fs::read_to_string(format!("/usr/share/distro-info/{dist}.csv"))?
} else {
reqwest::get(series_info.network.as_str())
.await?
.text()
.await?
};
let series_info_list = parse_series_csv(&content)?;
Ok(series_info_list)
}
/// Get time-ordered list of series names for a distribution, development series first
pub async fn get_ordered_series_name(dist: &str) -> Result<Vec<String>, Box<dyn Error>> {
let series = get_ordered_series(dist).await?;
Ok(series.iter().map(|info| info.series.clone()).collect())
}
/// Get the latest released series for a dist (excluding future releases and special cases like sid)
pub async fn get_latest_released_series(dist: &str) -> Result<String, Box<dyn Error>> {
let series_info_list = get_ordered_series(dist).await?;
let today = chrono::Local::now().date_naive();
let mut released_series = Vec::new();
for series_info in series_info_list {
// Skip 'sid' and series without release dates or with future release dates
if series_info.series != "sid"
&& series_info.series != "experimental"
&& series_info.release.is_some()
&& series_info.release.unwrap() <= today
{
released_series.push(series_info);
}
}
// Sort by release date descending (newest first)
released_series.sort_by(|a, b| b.release.cmp(&a.release));
if let Some(latest) = released_series.first() {
Ok(latest.series.clone())
} else {
Err("No released series found".into())
}
}
/// Obtain the distribution (eg. debian, ubuntu) from a distribution series (eg. noble, bookworm)
pub async fn get_dist_from_series(series: &str) -> Result<String, Box<dyn Error>> {
for dist in DATA.dist.keys() {
if get_ordered_series_name(dist)
.await?
.contains(&series.to_string())
{
return Ok(dist.to_string());
}
}
Err(format!("Unknown series: {}", series).into())
}
/// Get the package pockets available for a given distribution
///
/// Example: get_dist_pockets(ubuntu) => ["proposed", "updates", ""]
pub fn get_dist_pockets(dist: &str) -> Vec<String> {
let mut pockets = DATA.dist.get(dist).unwrap().pockets.clone();
// Explicitely add 'main' pocket, which is just the empty string
pockets.push("".to_string());
pockets
}
/// Get the sources URL for a distribution, series, pocket, and component
pub fn get_sources_url(base_url: &str, series: &str, pocket: &str, component: &str) -> String {
let pocket_full = if pocket.is_empty() {
String::new()
} else {
format!("-{}", pocket)
};
format!("{base_url}/dists/{series}{pocket_full}/{component}/source/Sources.gz")
}
/// Get the archive base URL for a distribution
///
/// Example: ubuntu => http://archive.ubuntu.com/ubuntu
pub fn get_base_url(dist: &str) -> String {
DATA.dist.get(dist).unwrap().base_url.clone()
}
/// Obtain the URL for the archive keyring of a distribution series
pub async fn get_keyring_url(series: &str) -> Result<String, Box<dyn Error>> {
let dist = get_dist_from_series(series).await?;
let dist_data = DATA
.dist
.get(&dist)
.ok_or(format!("Unsupported distribution: {}", dist))?;
// For Debian, we need the series number to form the keyring URL
if dist == "debian" {
// Special case for 'sid' - use the latest released version
if series == "sid" || series == "experimental" {
let latest_released = get_latest_released_series("debian").await?;
let series_num = get_debian_series_number(&latest_released).await?.unwrap();
// Replace {series_num} placeholder with the latest released series number
Ok(dist_data
.archive_keyring
.replace("{series_num}", &series_num))
} else {
let series_num = get_debian_series_number(series).await?.unwrap();
// Replace {series_num} placeholder with the actual series number
Ok(dist_data
.archive_keyring
.replace("{series_num}", &series_num))
}
} else {
// For other distributions like Ubuntu, use the keyring directly
Ok(dist_data.archive_keyring.clone())
}
}
/// Obtain the URL for the 'Release' file of a distribution series
fn get_release_url(base_url: &str, series: &str, pocket: &str) -> String {
let pocket_full = if pocket.is_empty() {
String::new()
} else {
format!("-{}", pocket)
};
format!("{base_url}/dists/{series}{pocket_full}/Release")
}
/// Obtain the components of a distribution series by parsing the 'Release' file
pub async fn get_components(
base_url: &str,
series: &str,
pocket: &str,
) -> Result<Vec<String>, Box<dyn Error>> {
let url = get_release_url(base_url, series, pocket);
log::debug!("Fetching Release file from: {}", url);
let content = reqwest::get(&url).await?.text().await?;
for line in content.lines() {
if line.starts_with("Components:")
&& let Some((_, components)) = line.split_once(':')
{
return Ok(components
.split_whitespace()
.map(|s| s.to_string())
.collect());
}
}
Err("Components not found.".into())
}
/// Map a Debian series name to its version number
pub async fn get_debian_series_number(series: &str) -> Result<Option<String>, Box<dyn Error>> {
let series_info = &DATA.dist.get("debian").unwrap().series;
let content = if Path::new(series_info.local.as_str()).exists() {
std::fs::read_to_string(series_info.local.as_str())?
} else {
reqwest::get(series_info.network.as_str())
.await?
.text()
.await?
};
let mut rdr = csv::ReaderBuilder::new()
.flexible(true)
.from_reader(content.as_bytes());
let headers = rdr.headers()?.clone();
let series_idx = headers
.iter()
.position(|h| h == "series")
.ok_or("Column 'series' not found")?;
let version_idx = headers
.iter()
.position(|h| h == "version")
.ok_or("Column 'version' not found")?;
for result in rdr.records() {
let record = result?;
if let (Some(s), Some(v)) = (record.get(series_idx), record.get(version_idx))
&& s.to_lowercase() == series.to_lowercase()
{
return Ok(Some(v.to_string()));
}
}
Ok(None)
}
#[cfg(test)]
mod tests {
use super::*;
#[tokio::test]
async fn test_get_debian_series() {
let series = get_ordered_series_name("debian").await.unwrap();
assert!(series.contains(&"sid".to_string()));
assert!(series.contains(&"bookworm".to_string()));
}
#[tokio::test]
async fn test_get_ubuntu_series() {
let series = get_ordered_series_name("ubuntu").await.unwrap();
assert!(series.contains(&"noble".to_string()));
assert!(series.contains(&"jammy".to_string()));
}
#[tokio::test]
async fn test_get_dist_from_series() {
assert_eq!(get_dist_from_series("sid").await.unwrap(), "debian");
assert_eq!(get_dist_from_series("noble").await.unwrap(), "ubuntu");
}
#[tokio::test]
async fn test_get_debian_series_number() {
// Test with known Debian series
let bookworm_number = get_debian_series_number("bookworm").await.unwrap();
assert!(bookworm_number.is_some());
assert_eq!(bookworm_number.unwrap(), "12");
let trixie_number = get_debian_series_number("trixie").await.unwrap();
assert!(trixie_number.is_some());
assert_eq!(trixie_number.unwrap(), "13");
// Test with unknown series
let unknown_number = get_debian_series_number("unknown").await.unwrap();
assert!(unknown_number.is_none());
}
#[tokio::test]
async fn test_get_keyring_url_sid() {
// Test that 'sid' uses the latest released version for keyring URL
let sid_keyring = get_keyring_url("sid").await.unwrap();
let latest_released = get_latest_released_series("debian").await.unwrap();
let latest_keyring = get_keyring_url(&latest_released).await.unwrap();
// The keyring URL for 'sid' should be the same as the latest released version
assert_eq!(sid_keyring, latest_keyring);
}
#[tokio::test]
async fn test_get_latest_released_debian_series() {
// Test that we get a valid released series
let latest_released = get_latest_released_series("debian").await.unwrap();
// Should not be 'sid' or 'experimental'
assert_ne!(latest_released, "sid");
assert_ne!(latest_released, "experimental");
// Should have a version number
let version = get_debian_series_number(&latest_released).await.unwrap();
assert!(version.is_some());
}
}

View File

@@ -11,6 +11,8 @@ pub mod build;
pub mod changelog; pub mod changelog;
/// Build a Debian package into a binary (.deb) /// Build a Debian package into a binary (.deb)
pub mod deb; pub mod deb;
/// Obtain general information about distribution, series, etc
pub mod distro_info;
/// Obtain information about one or multiple packages /// Obtain information about one or multiple packages
pub mod package_info; pub mod package_info;
/// Download a source package locally /// Download a source package locally

View File

@@ -37,6 +37,7 @@ fn main() {
.required(false), .required(false),
) )
.arg(arg!(-v --version <version> "Target package version").required(false)) .arg(arg!(-v --version <version> "Target package version").required(false))
.arg(arg!(--archive "Only use the archive to download package source, not git").required(false))
.arg(arg!(--ppa <ppa> "Download the package from a specific PPA").required(false)) .arg(arg!(--ppa <ppa> "Download the package from a specific PPA").required(false))
.arg(arg!(<package> "Target package")), .arg(arg!(<package> "Target package")),
) )
@@ -93,13 +94,22 @@ fn main() {
let series = sub_matches.get_one::<String>("series").map(|s| s.as_str()); let series = sub_matches.get_one::<String>("series").map(|s| s.as_str());
let dist = sub_matches.get_one::<String>("dist").map(|s| s.as_str()); let dist = sub_matches.get_one::<String>("dist").map(|s| s.as_str());
let version = sub_matches.get_one::<String>("version").map(|s| s.as_str()); let version = sub_matches.get_one::<String>("version").map(|s| s.as_str());
let _ppa = sub_matches let ppa = sub_matches.get_one::<String>("ppa").map(|s| s.as_str());
.get_one::<String>("ppa") let archive = sub_matches.get_one::<bool>("archive").unwrap_or(&false);
.map(|s| s.as_str())
.unwrap_or("");
let (pb, progress_callback) = ui::create_progress_bar(&multi); let (pb, progress_callback) = ui::create_progress_bar(&multi);
// Convert PPA to base URL if provided
let base_url = ppa.and_then(|ppa_str| {
// PPA format: user/ppa_name
let parts: Vec<&str> = ppa_str.split('/').collect();
if parts.len() == 2 {
Some(pkh::package_info::ppa_to_base_url(parts[0], parts[1]))
} else {
None
}
});
// Since pull is async, we need to block on it // Since pull is async, we need to block on it
if let Err(e) = rt.block_on(async { if let Err(e) = rt.block_on(async {
let package_info = pkh::package_info::lookup( let package_info = pkh::package_info::lookup(
@@ -108,10 +118,11 @@ fn main() {
series, series,
"", "",
dist, dist,
base_url.as_deref(),
Some(&progress_callback), Some(&progress_callback),
) )
.await?; .await?;
pkh::pull::pull(&package_info, series, None, Some(&progress_callback)).await pkh::pull::pull(&package_info, None, Some(&progress_callback), *archive).await
}) { }) {
pb.finish_and_clear(); pb.finish_and_clear();
error!("{}", e); error!("{}", e);
@@ -155,9 +166,10 @@ fn main() {
_ => None, _ => None,
}; };
if let Err(e) = if let Err(e) = rt.block_on(async {
pkh::deb::build_binary_package(arch, series, Some(cwd.as_path()), *cross, mode) pkh::deb::build_binary_package(arch, series, Some(cwd.as_path()), *cross, mode)
{ .await
}) {
error!("{}", e); error!("{}", e);
std::process::exit(1); std::process::exit(1);
} }

View File

@@ -1,15 +1,22 @@
use chrono::NaiveDate;
use flate2::read::GzDecoder; use flate2::read::GzDecoder;
use std::collections::HashMap; use std::collections::HashMap;
use std::error::Error; use std::error::Error;
use std::io::Read; use std::io::Read;
use std::path::Path;
use crate::ProgressCallback; use crate::ProgressCallback;
use log::{debug, warn}; use log::{debug, warn};
const BASE_URL_UBUNTU: &str = "http://archive.ubuntu.com/ubuntu"; /// Convert a PPA specification to a base URL
const BASE_URL_DEBIAN: &str = "http://deb.debian.org/debian"; ///
/// # Arguments
/// * user: user for the PPA
/// * name: name of the PPA
///
/// # Returns
/// * The base URL for the PPA (e.g., "https://ppa.launchpadcontent.net/user/ppa_name/ubuntu/")
pub fn ppa_to_base_url(user: &str, name: &str) -> String {
format!("https://ppa.launchpadcontent.net/{}/{}/ubuntu", user, name)
}
async fn check_launchpad_repo(package: &str) -> Result<Option<String>, Box<dyn Error>> { async fn check_launchpad_repo(package: &str) -> Result<Option<String>, Box<dyn Error>> {
let url = format!("https://git.launchpad.net/ubuntu/+source/{}", package); let url = format!("https://git.launchpad.net/ubuntu/+source/{}", package);
@@ -25,100 +32,6 @@ async fn check_launchpad_repo(package: &str) -> Result<Option<String>, Box<dyn E
} }
} }
fn parse_series_csv(content: &str) -> Result<Vec<String>, Box<dyn Error>> {
let mut rdr = csv::ReaderBuilder::new()
.flexible(true)
.from_reader(content.as_bytes());
let headers = rdr.headers()?.clone();
let series_idx = headers
.iter()
.position(|h| h == "series")
.ok_or("Column 'series' not found")?;
let created_idx = headers
.iter()
.position(|h| h == "created")
.ok_or("Column 'created' not found")?;
let mut entries = Vec::new();
for result in rdr.records() {
let record = result?;
if let (Some(s), Some(c)) = (record.get(series_idx), record.get(created_idx))
&& let Ok(date) = NaiveDate::parse_from_str(c, "%Y-%m-%d")
{
entries.push((s.to_string(), date));
}
}
// Sort by date descending (newest first)
entries.sort_by(|a, b| b.1.cmp(&a.1));
Ok(entries.into_iter().map(|(s, _)| s).collect())
}
async fn get_ordered_series(dist: &str) -> Result<Vec<String>, Box<dyn Error>> {
let content = if Path::new(format!("/usr/share/distro-info/{dist}.csv").as_str()).exists() {
std::fs::read_to_string(format!("/usr/share/distro-info/{dist}.csv"))?
} else {
reqwest::get(
format!("https://salsa.debian.org/debian/distro-info-data/-/raw/main/{dist}.csv")
.as_str(),
)
.await?
.text()
.await?
};
let mut series = parse_series_csv(&content)?;
// For Debian, ensure 'sid' is first if it's not (it usually doesn't have a date or is very old/new depending on file)
// Actually in the file sid has 1993 date.
// But we want to try 'sid' (unstable) first for Debian.
if dist == "debian" {
series.retain(|s| s != "sid");
series.insert(0, "sid".to_string());
}
Ok(series)
}
// Keep existing functions for compatibility or refactor them to use get_ordered_series
async fn get_series_from_url(url: &str) -> Result<Vec<String>, Box<dyn Error>> {
let content = reqwest::get(url).await?.text().await?;
parse_series_csv(&content)
}
fn get_series_from_file(path: &str) -> Result<Vec<String>, Box<dyn Error>> {
let content = std::fs::read_to_string(path)?;
parse_series_csv(&content)
}
/// Obtain a list of series from a distribution
pub async fn get_dist_series(dist: &str) -> Result<Vec<String>, Box<dyn Error>> {
if Path::new(format!("/usr/share/distro-info/{dist}.csv").as_str()).exists() {
get_series_from_file(format!("/usr/share/distro-info/{dist}.csv").as_str())
} else {
get_series_from_url(
format!("https://salsa.debian.org/debian/distro-info-data/-/raw/main/{dist}.csv")
.as_str(),
)
.await
}
}
/// Obtain the distribution (eg. debian, ubuntu) from a distribution series (eg. noble, bookworm)
pub async fn get_dist_from_series(series: &str) -> Result<String, Box<dyn Error>> {
let debian_series = get_dist_series("debian").await?;
if debian_series.contains(&series.to_string()) {
return Ok("debian".to_string());
}
let ubuntu_series = get_dist_series("ubuntu").await?;
if ubuntu_series.contains(&series.to_string()) {
return Ok("ubuntu".to_string());
}
Err(format!("Unknown series: {}", series).into())
}
/// A File used in a source package /// A File used in a source package
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct FileEntry { pub struct FileEntry {
@@ -173,66 +86,6 @@ impl PackageInfo {
} }
} }
fn get_dist_pockets(dist: &str) -> Vec<&'static str> {
match dist {
"ubuntu" => vec!["proposed", "updates", ""],
"debian" => vec!["proposed-updates", "updates", ""],
_ => vec![""],
}
}
fn get_sources_url(base_url: &str, series: &str, pocket: &str, component: &str) -> String {
let pocket_full = if pocket.is_empty() {
String::new()
} else {
format!("-{}", pocket)
};
format!("{base_url}/dists/{series}{pocket_full}/{component}/source/Sources.gz")
}
fn get_base_url(dist: &str) -> &str {
match dist {
"ubuntu" => BASE_URL_UBUNTU,
"debian" => BASE_URL_DEBIAN,
_ => panic!("Unknown distribution"),
}
}
/// Obtain the URL for the 'Release' file of a distribution series
fn get_release_url(base_url: &str, series: &str, pocket: &str) -> String {
let pocket_full = if pocket.is_empty() {
String::new()
} else {
format!("-{}", pocket)
};
format!("{base_url}/dists/{series}{pocket_full}/Release")
}
/// Obtain the components of a distribution series by parsing the 'Release' file
async fn get_components(
base_url: &str,
series: &str,
pocket: &str,
) -> Result<Vec<String>, Box<dyn Error>> {
let url = get_release_url(base_url, series, pocket);
debug!("Fetching Release file from: {}", url);
let content = reqwest::get(&url).await?.text().await?;
for line in content.lines() {
if line.starts_with("Components:")
&& let Some((_, components)) = line.split_once(':')
{
return Ok(components
.split_whitespace()
.map(|s| s.to_string())
.collect());
}
}
Err("Components not found.".into())
}
struct DebianSources { struct DebianSources {
splitted_sources: std::str::Split<'static, &'static str>, splitted_sources: std::str::Split<'static, &'static str>,
} }
@@ -337,8 +190,9 @@ async fn get(
series: &str, series: &str,
pocket: &str, pocket: &str,
version: Option<&str>, version: Option<&str>,
base_url: Option<&str>,
) -> Result<PackageInfo, Box<dyn Error>> { ) -> Result<PackageInfo, Box<dyn Error>> {
let dist = get_dist_from_series(series).await?; let dist = crate::distro_info::get_dist_from_series(series).await?;
// Handle Ubuntu case: Vcs-Git does not usually point to Launchpad but Salsa // Handle Ubuntu case: Vcs-Git does not usually point to Launchpad but Salsa
// We need to check manually if there is a launchpad repository for the package // We need to check manually if there is a launchpad repository for the package
@@ -350,13 +204,22 @@ async fn get(
preferred_vcs = Some(lp_url); preferred_vcs = Some(lp_url);
} }
let base_url = get_base_url(&dist); // Determine the base URL to use (either provided PPA URL or default archive)
let distro_base_url = crate::distro_info::get_base_url(&dist);
let base_url = if let Some(ppa_url) = base_url {
ppa_url.to_string()
} else {
distro_base_url.clone()
};
let components = get_components(base_url, series, pocket).await?; // If using a custom base URL (PPA), disable VCS lookup to force archive download
let from_ppa = base_url != distro_base_url;
let components = crate::distro_info::get_components(&base_url, series, pocket).await?;
debug!("Found components: {:?}", components); debug!("Found components: {:?}", components);
for component in components { for component in components {
let url = get_sources_url(base_url, series, pocket, &component); let url = crate::distro_info::get_sources_url(&base_url, series, pocket, &component);
debug!("Fetching sources from: {}", url); debug!("Fetching sources from: {}", url);
@@ -387,6 +250,11 @@ async fn get(
preferred_vcs = Some(vcs.clone()); preferred_vcs = Some(vcs.clone());
} }
// If downloading from PPA, make sure we don't use a VCS
if from_ppa {
preferred_vcs = None;
}
let archive_url = format!("{base_url}/{0}", stanza.directory); let archive_url = format!("{base_url}/{0}", stanza.directory);
return Ok(PackageInfo { return Ok(PackageInfo {
dist, dist,
@@ -411,9 +279,10 @@ async fn find_package(
dist: &str, dist: &str,
pocket: &str, pocket: &str,
version: Option<&str>, version: Option<&str>,
base_url: Option<&str>,
progress: ProgressCallback<'_>, progress: ProgressCallback<'_>,
) -> Result<PackageInfo, Box<dyn Error>> { ) -> Result<PackageInfo, Box<dyn Error>> {
let series_list = get_ordered_series(dist).await?; let series_list = crate::distro_info::get_ordered_series_name(dist).await?;
for (i, series) in series_list.iter().enumerate() { for (i, series) in series_list.iter().enumerate() {
if let Some(cb) = progress { if let Some(cb) = progress {
@@ -421,13 +290,13 @@ async fn find_package(
} }
let pockets = if pocket.is_empty() { let pockets = if pocket.is_empty() {
get_dist_pockets(dist) crate::distro_info::get_dist_pockets(dist)
} else { } else {
vec![pocket] vec![pocket.to_string()]
}; };
for p in pockets { for p in pockets {
match get(package_name, series, p, version).await { match get(package_name, series, &p, version, base_url).await {
Ok(info) => { Ok(info) => {
if i > 0 { if i > 0 {
warn!( warn!(
@@ -456,12 +325,22 @@ async fn find_package(
/// ///
/// This function obtains package information either directly from a specific series /// This function obtains package information either directly from a specific series
/// or by searching across all series in a distribution. /// or by searching across all series in a distribution.
///
/// # Arguments
/// * `package` - The name of the package to look up
/// * `version` - Optional specific version to look for
/// * `series` - Optional distribution series (e.g., "noble", "bookworm")
/// * `pocket` - Pocket to search in (e.g., "updates", "security", or "" for main)
/// * `dist` - Optional distribution name (e.g., "ubuntu", "debian")
/// * `base_url` - Optional base URL for the package archive (e.g., "https://ppa.launchpadcontent.net/user/ppa/ubuntu/")
/// * `progress` - Optional progress callback
pub async fn lookup( pub async fn lookup(
package: &str, package: &str,
version: Option<&str>, version: Option<&str>,
series: Option<&str>, series: Option<&str>,
pocket: &str, pocket: &str,
dist: Option<&str>, dist: Option<&str>,
base_url: Option<&str>,
progress: ProgressCallback<'_>, progress: ProgressCallback<'_>,
) -> Result<PackageInfo, Box<dyn Error>> { ) -> Result<PackageInfo, Box<dyn Error>> {
// Obtain the package information, either directly in a series or with a search in all series // Obtain the package information, either directly in a series or with a search in all series
@@ -476,7 +355,7 @@ pub async fn lookup(
} }
// Get the package information from that series and pocket // Get the package information from that series and pocket
get(package, s, pocket, version).await? get(package, s, pocket, version, base_url).await?
} else { } else {
let dist = dist.unwrap_or_else(|| let dist = dist.unwrap_or_else(||
// Use auto-detection to see if current distro is ubuntu, or fallback to debian by default // Use auto-detection to see if current distro is ubuntu, or fallback to debian by default
@@ -490,7 +369,11 @@ pub async fn lookup(
if let Some(cb) = progress { if let Some(cb) = progress {
cb( cb(
&format!("Searching for package {} in {}...", package, dist), &format!(
"Searching for package {} in {}...",
package,
if base_url.is_none() { dist } else { "ppa" }
),
"", "",
0, 0,
0, 0,
@@ -498,7 +381,7 @@ pub async fn lookup(
} }
// Try to find the package in all series from that dist // Try to find the package in all series from that dist
find_package(package, dist, pocket, version, progress).await? find_package(package, dist, pocket, version, base_url, progress).await?
}; };
Ok(package_info) Ok(package_info)
@@ -525,26 +408,6 @@ mod tests {
assert!(url.is_none()); assert!(url.is_none());
} }
#[tokio::test]
async fn test_get_debian_series() {
let series = get_dist_series("debian").await.unwrap();
assert!(series.contains(&"sid".to_string()));
assert!(series.contains(&"bookworm".to_string()));
}
#[tokio::test]
async fn test_get_ubuntu_series() {
let series = get_dist_series("ubuntu").await.unwrap();
assert!(series.contains(&"noble".to_string()));
assert!(series.contains(&"jammy".to_string()));
}
#[tokio::test]
async fn test_get_dist_from_series() {
assert_eq!(get_dist_from_series("sid").await.unwrap(), "debian");
assert_eq!(get_dist_from_series("noble").await.unwrap(), "ubuntu");
}
#[test] #[test]
fn test_parse_sources() { fn test_parse_sources() {
use flate2::Compression; use flate2::Compression;
@@ -582,7 +445,7 @@ Version: 1.0
#[tokio::test] #[tokio::test]
async fn test_find_package_fallback() { async fn test_find_package_fallback() {
// python2.7 is in bullseye but not above // python2.7 is in bullseye but not above
let info = find_package("python2.7", "debian", "", None, None) let info = find_package("python2.7", "debian", "", None, None, None)
.await .await
.unwrap(); .unwrap();
assert_eq!(info.stanza.package, "python2.7"); assert_eq!(info.stanza.package, "python2.7");
@@ -592,7 +455,7 @@ Version: 1.0
#[tokio::test] #[tokio::test]
async fn test_find_package_devel() { async fn test_find_package_devel() {
// hello is in sid // hello is in sid
let info = find_package("hello", "debian", "", None, None) let info = find_package("hello", "debian", "", None, None, None)
.await .await
.unwrap(); .unwrap();
assert_eq!(info.stanza.package, "hello"); assert_eq!(info.stanza.package, "hello");

View File

@@ -1,6 +1,7 @@
use std::cmp::min; use std::cmp::min;
use std::error::Error; use std::error::Error;
use std::path::Path; use std::path::Path;
use std::path::PathBuf;
use crate::package_info::PackageInfo; use crate::package_info::PackageInfo;
@@ -90,25 +91,93 @@ use futures_util::StreamExt;
use tar::Archive; use tar::Archive;
use xz2::read::XzDecoder; use xz2::read::XzDecoder;
fn extract_archive(path: &Path, dest: &Path) -> Result<(), Box<dyn Error>> { fn copy_dir_all(src: &Path, dst: &Path) -> Result<(), Box<dyn Error>> {
let file = File::open(path)?; if !dst.exists() {
let filename = path.file_name().unwrap().to_string_lossy(); std::fs::create_dir_all(dst)?;
}
if filename.ends_with(".tar.gz") || filename.ends_with(".tgz") { for entry in std::fs::read_dir(src)? {
let tar = GzDecoder::new(file); let entry = entry?;
let mut archive = Archive::new(tar); let src_path = entry.path();
archive.unpack(dest)?; let dst_path = dst.join(entry.file_name());
} else if filename.ends_with(".tar.xz") || filename.ends_with(".txz") {
let tar = XzDecoder::new(file); if src_path.is_dir() {
let mut archive = Archive::new(tar); copy_dir_all(&src_path, &dst_path)?;
archive.unpack(dest)?; } else {
} else { std::fs::copy(&src_path, &dst_path)?;
return Err(format!("Unsupported archive format: {}", filename).into()); }
} }
Ok(()) Ok(())
} }
/// Helper function to extract tar archive with progress tracking
fn extract_tar_archive<D, F>(
file_path: &Path,
dest: &Path,
progress: ProgressCallback<'_>,
decoder_factory: F,
) -> Result<Vec<String>, Box<dyn Error>>
where
D: std::io::Read,
F: Fn(File) -> D,
{
let file = File::open(file_path)?;
let decoder = decoder_factory(file);
let mut archive = Archive::new(decoder);
// Get total number of entries for progress tracking
let total_entries = archive.entries()?.count();
let mut current_entry = 0;
// Reset the archive to read entries again
let file = File::open(file_path)?;
let decoder = decoder_factory(file);
let mut archive = Archive::new(decoder);
let mut extracted_files = Vec::new();
for entry in archive.entries()? {
let mut entry = entry?;
let path = entry.path()?.to_path_buf();
let dest_path = dest.join(&path);
// Create parent directories if needed
if let Some(parent) = dest_path.parent() {
std::fs::create_dir_all(parent)?;
}
// Extract the file
entry.unpack(&dest_path)?;
extracted_files.push(dest_path.to_string_lossy().to_string());
current_entry += 1;
// Report progress
if let Some(cb) = progress {
cb("", "Extracting...", current_entry, total_entries);
}
}
Ok(extracted_files)
}
fn extract_archive(
path: &Path,
dest: &Path,
progress: ProgressCallback<'_>,
) -> Result<Vec<String>, Box<dyn Error>> {
let filename = path.file_name().unwrap().to_string_lossy();
if filename.ends_with(".tar.gz") || filename.ends_with(".tgz") {
extract_tar_archive(path, dest, progress, GzDecoder::new)
} else if filename.ends_with(".tar.xz") || filename.ends_with(".txz") {
extract_tar_archive(path, dest, progress, XzDecoder::new)
} else {
Err(format!("Unsupported archive format: {}", filename).into())
}
}
fn checkout_pristine_tar(package_dir: &Path, filename: &str) -> Result<(), Box<dyn Error>> { fn checkout_pristine_tar(package_dir: &Path, filename: &str) -> Result<(), Box<dyn Error>> {
let output = Command::new("pristine-tar") let output = Command::new("pristine-tar")
.current_dir(package_dir) .current_dir(package_dir)
@@ -306,27 +375,102 @@ async fn fetch_archive_sources(
for file in &info.stanza.files { for file in &info.stanza.files {
let url = format!("{}/{}", info.archive_url, file.name); let url = format!("{}/{}", info.archive_url, file.name);
download_file_checksum(&url, &file.sha256, package_dir, progress).await?; download_file_checksum(&url, &file.sha256, package_dir, progress).await?;
}
// Extract the debian tarball or diff // Extract all tar archives, merging extracted directories
let debian_file = info if file.name.ends_with(".tar.gz") || file.name.ends_with(".tar.xz") {
.stanza let path = package_dir.join(&file.name);
.files let extract_dir = package_dir.join(&info.stanza.package);
.iter()
.find(|f| f.name.contains(".debian.tar.") || f.name.contains(".diff.gz"));
if let Some(file) = debian_file { let extracted = extract_archive(&path, &extract_dir, progress)?;
let path = package_dir.join(&file.name);
let extract_dir = package_dir.join(&info.stanza.package);
if (file.name.ends_with(".tar.xz") || file.name.ends_with(".tar.gz")) // Special case: the debian tar does only contain 'debian'
&& let Err(e) = extract_archive(&path, &extract_dir) if file.name.contains("debian.tar.") {
{ continue;
return Err(format!("Failed to extract {}: {}", file.name, e).into()); }
// List root directories extracted and use the first one as the source directory
debug!("Root directories extracted:");
let mut source_dir: Option<PathBuf> = None;
for file in &extracted {
let path = Path::new(file);
// Check if this is a directory and is at the archive root level
// (i.e., the path relative to extract_dir has no parent components)
if let Ok(relative_path) = path.strip_prefix(&extract_dir)
&& relative_path.components().count() == 1
&& path.is_dir()
{
debug!("- {}", relative_path.file_name().unwrap().to_string_lossy());
// Use the first directory found as the source
if source_dir.is_none() {
source_dir = Some(path.to_path_buf());
}
}
}
// Use the extracted directory as the source, assuming there is only one
if let Some(src_dir) = source_dir {
let target_dir = package_dir.join(&info.stanza.package);
if target_dir.exists() {
// Target exists, we need to merge contents
for sub_entry in std::fs::read_dir(&src_dir)? {
let sub_entry = sub_entry?;
let sub_path = sub_entry.path();
let target_path = target_dir.join(sub_entry.file_name());
if sub_path.is_dir() {
std::fs::create_dir_all(&target_path)?;
// Recursively copy directory contents
copy_dir_all(&sub_path, &target_path)?;
} else {
std::fs::copy(&sub_path, &target_path)?;
}
}
std::fs::remove_dir_all(&src_dir)?;
} else {
std::fs::rename(&src_dir, &target_dir)?;
}
}
} }
// Remove archive after extraction // Extract and apply .diff.gz if present (old packages)
std::fs::remove_file(&path)?; if file.name.ends_with(".diff.gz") {
let diff_gz_path = package_dir.join(&file.name);
let source_dir = package_dir.join(&info.stanza.package);
// Create the .diff file path by replacing .gz with empty string
let diff_path = diff_gz_path.with_extension("");
// Decompress the .diff.gz file directly to .diff
let input_file = File::open(&diff_gz_path)?;
let mut decoder = GzDecoder::new(input_file);
let mut output_file = File::create(&diff_path)?;
std::io::copy(&mut decoder, &mut output_file)?;
// Use relative path for the diff file (it's in the parent directory)
let relative_diff_path =
format!("../{}", diff_path.file_name().unwrap().to_string_lossy());
// Apply the patch using the patch command with relative path
let output = Command::new("patch")
.current_dir(&source_dir)
.arg("-p1")
.arg("--input")
.arg(&relative_diff_path)
.output()?;
if !output.status.success() {
let stderr = String::from_utf8_lossy(&output.stderr);
return Err(
format!("Failed to apply patch: {}\n{}", diff_path.display(), stderr).into(),
);
}
debug!("Successfully applied patch: {}", diff_path.display());
// Clean up the extracted .diff file
std::fs::remove_file(&diff_path)?;
}
} }
Ok(()) Ok(())
@@ -339,11 +483,12 @@ async fn fetch_archive_sources(
/// The source will be extracted under 'package/package'. /// The source will be extracted under 'package/package'.
pub async fn pull( pub async fn pull(
package_info: &PackageInfo, package_info: &PackageInfo,
series: Option<&str>,
cwd: Option<&Path>, cwd: Option<&Path>,
progress: ProgressCallback<'_>, progress: ProgressCallback<'_>,
force_archive: bool,
) -> Result<(), Box<dyn Error>> { ) -> Result<(), Box<dyn Error>> {
let package = &package_info.stanza.package; let package = &package_info.stanza.package;
let series = &package_info.series;
let package_dir = if let Some(path) = cwd { let package_dir = if let Some(path) = cwd {
path.join(package) path.join(package)
} else { } else {
@@ -351,15 +496,20 @@ pub async fn pull(
}; };
/* Fetch the package: either via git (preferred VCS) or the archive */ /* Fetch the package: either via git (preferred VCS) or the archive */
if let Some(ref url) = package_info.preferred_vcs { if let Some(ref url) = package_info.preferred_vcs
&& !force_archive
{
// We have found a preferred VCS (git repository) for the package, so // We have found a preferred VCS (git repository) for the package, so
// we fetch the package from that repo. // we fetch the package from that repo.
// Depending on target series, we pick target branch; if no series is specified, // Depending on target series, we pick target branch; if latest series is specified,
// we target the development branch, i.e. the default branch // we target the development branch, i.e. the default branch
let branch_name = if let Some(s) = series { let branch_name = if crate::distro_info::get_ordered_series_name(package_info.dist.as_str())
.await?[0]
!= *series
{
if package_info.dist == "ubuntu" { if package_info.dist == "ubuntu" {
Some(format!("{}/{}", package_info.dist, s)) Some(format!("{}/{}", package_info.dist, series))
} else { } else {
// Debian does not have reliable branch naming... // Debian does not have reliable branch naming...
// For now, we skip that part and clone default // For now, we skip that part and clone default
@@ -423,22 +573,29 @@ pub async fn pull(
mod tests { mod tests {
use super::*; use super::*;
async fn test_pull_package_end_to_end(package: &str, series: Option<&str>, dist: Option<&str>) { async fn test_pull_package_end_to_end(
package: &str,
series: Option<&str>,
dist: Option<&str>,
archive: Option<bool>,
) {
// This test verifies that 'pkh pull' clones the repo and fetches the tarball. // This test verifies that 'pkh pull' clones the repo and fetches the tarball.
// For determinism, we require for tests that either a distro or series is specified, // For determinism, we require for tests that either a distro or series is specified,
// as no distribution would mean fallback to system distro // as no distribution would mean fallback to system distro
assert!(dist != None || series != None); assert!(dist.is_some() || series.is_some());
// Use a temp directory as working directory // Use a temp directory as working directory
let temp_dir = tempfile::tempdir().unwrap(); let temp_dir = tempfile::tempdir().unwrap();
let cwd = temp_dir.path(); let cwd = temp_dir.path();
// Main 'pull' command: the one we want to test // Main 'pull' command: the one we want to test
let info = crate::package_info::lookup(package, None, series, "", dist, None) let info = crate::package_info::lookup(package, None, series, "", dist, None, None)
.await
.unwrap();
pull(&info, Some(cwd), None, archive.unwrap_or(false))
.await .await
.unwrap(); .unwrap();
pull(&info, series, Some(cwd), None).await.unwrap();
let package_dir = cwd.join(package); let package_dir = cwd.join(package);
assert!(package_dir.exists()); assert!(package_dir.exists());
@@ -498,33 +655,39 @@ mod tests {
#[tokio::test] #[tokio::test]
async fn test_pull_hello_ubuntu_end_to_end() { async fn test_pull_hello_ubuntu_end_to_end() {
test_pull_package_end_to_end("hello", Some("noble"), None).await; test_pull_package_end_to_end("hello", Some("noble"), None, None).await;
} }
#[tokio::test] #[tokio::test]
async fn test_pull_hello_debian_end_to_end() { async fn test_pull_hello_debian_end_to_end() {
test_pull_package_end_to_end("hello", Some("bookworm"), None).await; test_pull_package_end_to_end("hello", Some("bookworm"), None, None).await;
}
/// Specific test for a package using a .diff.gz, instead of .debian and .orig
#[tokio::test]
async fn test_pull_linux_riscv_ubuntu_end_to_end() {
test_pull_package_end_to_end("linux-riscv", Some("noble"), None, Some(true)).await;
} }
#[tokio::test] #[tokio::test]
async fn test_pull_2048_universe_ubuntu_end_to_end() { async fn test_pull_2048_universe_ubuntu_end_to_end() {
test_pull_package_end_to_end("2048", Some("noble"), None).await; test_pull_package_end_to_end("2048", Some("noble"), None, None).await;
} }
#[tokio::test] #[tokio::test]
async fn test_pull_1oom_contrib_debian_end_to_end() { async fn test_pull_1oom_contrib_debian_end_to_end() {
test_pull_package_end_to_end("1oom", Some("trixie"), None).await; test_pull_package_end_to_end("1oom", Some("trixie"), None, None).await;
} }
#[tokio::test] #[tokio::test]
async fn test_pull_agg_svn_fallback_ok() { async fn test_pull_agg_svn_fallback_ok() {
test_pull_package_end_to_end("agg", Some("trixie"), None).await; test_pull_package_end_to_end("agg", Some("trixie"), None, None).await;
} }
#[tokio::test] #[tokio::test]
async fn test_pull_hello_debian_latest_end_to_end() { async fn test_pull_hello_debian_latest_end_to_end() {
test_pull_package_end_to_end("hello", None, Some("debian")).await; test_pull_package_end_to_end("hello", None, Some("debian"), None).await;
} }
#[tokio::test] #[tokio::test]
async fn test_pull_hello_ubuntu_latest_end_to_end() { async fn test_pull_hello_ubuntu_latest_end_to_end() {
test_pull_package_end_to_end("hello", None, Some("ubuntu")).await; test_pull_package_end_to_end("hello", None, Some("ubuntu"), None).await;
} }
} }