Compare commits

..

7 Commits

Author SHA1 Message Date
5b1bcdb453 exp: cross #7
Some checks failed
CI / build (pull_request) Failing after 2m16s
2025-12-23 17:19:41 +01:00
3ecfe6dda2 exp: cross #6 2025-12-22 23:08:44 +01:00
63389f0bad exp: cross #5 2025-12-22 00:13:37 +01:00
75751ad301 exp: cross #4 2025-12-21 22:07:34 +01:00
0d4ae565dd exp: cross #3 2025-12-21 21:37:56 +01:00
31bcd28c72 exp: cross #2 2025-12-20 00:06:07 +01:00
8e9e19a6ca exp: cross 2025-12-17 17:27:27 +01:00
29 changed files with 705 additions and 2270 deletions

View File

@@ -2,7 +2,7 @@ name: CI
on: on:
push: push:
branches: [ "main", "ci-test" ] branches: [ "main" ]
pull_request: pull_request:
branches: [ "main" ] branches: [ "main" ]
@@ -12,54 +12,23 @@ env:
jobs: jobs:
build: build:
runs-on: ubuntu-latest runs-on: ubuntu-latest
container:
image: ubuntu:24.04
options: --privileged --cap-add SYS_ADMIN --security-opt apparmor:unconfined
steps: steps:
- name: Set up container image
run: |
apt-get update
apt-get install -y nodejs sudo curl wget ca-certificates build-essential
- uses: actions/checkout@v6 - uses: actions/checkout@v6
- uses: dtolnay/rust-toolchain@stable - uses: dtolnay/rust-toolchain@stable
with: with:
components: rustfmt, clippy components: rustfmt
- name: Check format - name: Check format
run: cargo fmt --check run: cargo fmt --check
- name: Install build dependencies - name: Install build dependencies
run: | run: |
sudo apt-get update sudo apt-get update
sudo apt-get install -y pkg-config libssl-dev libgpg-error-dev libgpgme-dev sudo apt-get install -y pkg-config libssl-dev
- name: Build - name: Build
run: cargo build run: cargo build
env:
RUSTFLAGS: -Dwarnings
- name: Lint
run: cargo clippy --all-targets --all-features
env:
RUSTFLAGS: -Dwarnings
- name: Install runtime system dependencies - name: Install runtime system dependencies
run: | run: |
sudo apt-get update sudo apt-get update
sudo apt-get install -y git pristine-tar sbuild mmdebstrap util-linux dpkg-dev sudo apt-get install -y pristine-tar sbuild mmdebstrap dpkg-dev
- name: Setup subuid/subgid - name: Run tests
run: | run: cargo test
usermod --add-subuids 100000-200000 --add-subgids 100000-200000 ${USER:-root}
- name: Run tests with verbose logging (timeout 30min)
env:
RUST_LOG: debug
run: timeout 30m cargo test -- --nocapture
snap:
needs: build
runs-on: ubuntu-latest
outputs:
snap-file: ${{ steps.build-snap.outputs.snap }}
steps:
- uses: actions/checkout@v4
- uses: snapcore/action-build@v1
id: build-snap
- uses: actions/upload-artifact@v3
with:
name: snap
path: ${{ steps.build-snap.outputs.snap }}

View File

@@ -27,11 +27,4 @@ xz2 = "0.1"
serde_json = "1.0.145" serde_json = "1.0.145"
directories = "6.0.0" directories = "6.0.0"
ssh2 = "0.9.5" ssh2 = "0.9.5"
gpgme = "0.11"
serde_yaml = "0.9"
lazy_static = "1.4.0"
[dev-dependencies]
test-log = "0.2.19"
serial_test = "3.3.1"
tempfile = "3.10.1" tempfile = "3.10.1"

View File

@@ -24,10 +24,8 @@ Options:
Commands and workflows include: Commands and workflows include:
``` ```
Commands: Commands:
pull Pull a source package from the archive or git pull Get a source package from the archive or git
chlog Auto-generate changelog entry, editing it, committing it afterwards chlog Auto-generate changelog entry, editing it, committing it afterwards
build Build the source package (into a .dsc)
deb Build the source package into binary package (.deb)
help Print this message or the help of the given subcommand(s) help Print this message or the help of the given subcommand(s)
``` ```
@@ -82,7 +80,7 @@ Missing features:
- [ ] `pkh pull` - [ ] `pkh pull`
- [x] Obtain package sources from git - [x] Obtain package sources from git
- [x] Obtain package sources from the archive (fallback) - [x] Obtain package sources from the archive (fallback)
- [x] Obtain package source from PPA (--ppa) - [ ] Obtain package source from PPA (--ppa)
- [ ] Obtain a specific version of the package - [ ] Obtain a specific version of the package
- [x] Fetch the correct git branch for series on Ubuntu - [x] Fetch the correct git branch for series on Ubuntu
- [ ] Try to fetch the correct git branch for series on Debian, or fallback to the archive - [ ] Try to fetch the correct git branch for series on Debian, or fallback to the archive
@@ -98,7 +96,7 @@ Missing features:
- [ ] Three build modes: - [ ] Three build modes:
- [ ] Build locally (discouraged) - [ ] Build locally (discouraged)
- [x] Build using sbuild+unshare, with binary emulation (default) - [x] Build using sbuild+unshare, with binary emulation (default)
- [x] Cross-compilation - [ ] Cross-compilation
- [ ] Async build - [ ] Async build
- [ ] `pkh status` - [ ] `pkh status`
- [ ] Show build status - [ ] Show build status

View File

@@ -1,28 +0,0 @@
## Static data needed for pkh operations
## Instead of hardcoding the data in code, data files allow to quickly
## update and maintain such data in one unique place
## The goal is to have the minimal possible set of data necessary
## to grab the actual data. For example we don't want to store every Ubuntu
## or Debian series, but rather an URL where we can properly access that data.
dist_info:
local: /usr/share/distro-info/{dist}
network: https://salsa.debian.org/debian/distro-info-data/-/raw/main/
dist:
debian:
base_url: http://deb.debian.org/debian
archive_keyring: https://ftp-master.debian.org/keys/archive-key-{series_num}.asc
pockets:
- proposed-updates
- updates
series:
local: /usr/share/distro-info/debian.csv
network: https://salsa.debian.org/debian/distro-info-data/-/raw/main/debian.csv
ubuntu:
base_url: http://archive.ubuntu.com/ubuntu
archive_keyring: http://archive.ubuntu.com/ubuntu/project/ubuntu-archive-keyring.gpg
pockets:
- proposed
- updates
series:
local: /usr/share/distro-info/ubuntu.csv
network: https://salsa.debian.org/debian/distro-info-data/-/raw/main/ubuntu.csv

View File

@@ -1,42 +0,0 @@
name: pkh
base: core24
summary: pkh is a packaging helper for Debian/Ubuntu packages
description: |
pkh aims at wrapping the different debian tools and workflows
into one tool, that would have the same interface for everything,
while being smarter at integrating all workflows.
adopt-info: pkh-part
confinement: devmode
apps:
pkh:
command: bin/pkh
parts:
pkh-part:
plugin: rust
source: .
override-pull: |
craftctl default
craftctl set version=$(git rev-parse --short=11 HEAD)
craftctl set grade="devel"
build-packages:
- pkg-config
- libssl-dev
- libgpg-error-dev
- libgpgme-dev
stage-packages:
- libgpgme11t64
- git
- curl
- pristine-tar
- sbuild
- mmdebstrap
- util-linux
- dpkg-dev
stage:
- -usr/lib/x86_64-linux-gnu/libicuio.so.74.2
- -usr/lib/x86_64-linux-gnu/libicutest.so.74.2
- -usr/lib/x86_64-linux-gnu/libicutu.so.74.2
- -usr/lib/x86_64-linux-gnu/libicui18n.so.74.2

View File

@@ -1,58 +0,0 @@
//! APT keyring management for mmdebstrap
//!
//! Provides a simple function to ensure that archive keyrings are available
//! for mmdebstrap operations by downloading them from specified URLs.
use crate::context;
use crate::distro_info;
use std::error::Error;
use std::path::Path;
use std::sync::Arc;
/// Download a keyring into apt trusted.gpg.d directory, trusting that keyring
pub async fn download_trust_keyring(
ctx: Option<Arc<context::Context>>,
series: &str,
) -> Result<(), Box<dyn Error>> {
let ctx = ctx.unwrap_or_else(context::current);
// Obtain keyring URL from distro_info
let keyring_url = distro_info::get_keyring_url(series).await?;
log::debug!("Downloading keyring from: {}", keyring_url);
// Create trusted.gpg.d directory if it doesn't exist
let trusted_gpg_d = "/etc/apt/trusted.gpg.d";
if !ctx.exists(Path::new(trusted_gpg_d))? {
ctx.command("mkdir").arg("-p").arg(trusted_gpg_d).status()?;
}
// Extract the original filename from the keyring URL
let filename = keyring_url
.split('/')
.next_back()
.unwrap_or("pkh-{}.gpg")
.replace("{}", series);
let keyring_path = format!("{}/{}", trusted_gpg_d, filename);
// Download the keyring directly to the final location using curl
let mut curl_cmd = ctx.command("curl");
curl_cmd
.arg("-s")
.arg("-f")
.arg("-L")
.arg(&keyring_url)
.arg("--output")
.arg(&keyring_path);
let status = curl_cmd.status()?;
if !status.success() {
return Err(format!("Failed to download keyring from {}", keyring_url).into());
}
log::info!(
"Successfully downloaded and installed keyring for {} to {}",
series,
keyring_path
);
Ok(())
}

View File

@@ -1,2 +0,0 @@
pub mod keyring;
pub mod sources;

View File

@@ -1,336 +0,0 @@
//! APT sources.list management
//! Provides a simple structure for managing APT repository sources
use crate::context;
use std::error::Error;
use std::path::Path;
use std::sync::Arc;
/// Represents a single source entry in sources.list
#[derive(Debug, Clone)]
pub struct SourceEntry {
/// Is the source enabled?
pub enabled: bool,
/// Source components (universe, main, contrib)
pub components: Vec<String>,
/// Source architectures (amd64, riscv64, arm64)
pub architectures: Vec<String>,
/// Source URI
pub uri: String,
/// Source suites (series-pocket)
pub suite: Vec<String>,
}
impl SourceEntry {
/// Parse a string describing a source entry in deb822 format
pub fn from_deb822(data: &str) -> Option<Self> {
let mut current_entry = SourceEntry {
enabled: true,
components: Vec::new(),
architectures: Vec::new(),
uri: String::new(),
suite: Vec::new(),
};
for line in data.lines() {
let line = line.trim();
if line.starts_with('#') {
continue;
}
// Empty line: end of an entry, or beginning
if line.is_empty() {
if !current_entry.uri.is_empty() {
return Some(current_entry);
} else {
continue;
}
}
if let Some((key, value)) = line.split_once(':') {
let key = key.trim();
let value = value.trim();
match key {
"Types" => {
// We only care about deb types
}
"URIs" => current_entry.uri = value.to_string(),
"Suites" => {
current_entry.suite =
value.split_whitespace().map(|s| s.to_string()).collect();
}
"Components" => {
current_entry.components =
value.split_whitespace().map(|s| s.to_string()).collect();
}
"Architectures" => {
current_entry.architectures =
value.split_whitespace().map(|s| s.to_string()).collect();
}
_ => {}
}
}
}
// End of entry, or empty file?
if !current_entry.uri.is_empty() {
Some(current_entry)
} else {
None
}
}
/// Parse a line describing a legacy source entry
pub fn from_legacy(data: &str) -> Option<Self> {
let line = data.lines().next()?.trim();
if line.is_empty() || line.starts_with("#") {
return None;
}
// Parse legacy deb line format: deb [arch=... / signed_by=] uri suite [components...]
// Extract bracket parameters first
let mut architectures = Vec::new();
let mut line_without_brackets = line.to_string();
// Find and process bracket parameters
if let Some(start_bracket) = line.find('[')
&& let Some(end_bracket) = line.find(']')
{
let bracket_content = &line[start_bracket + 1..end_bracket];
// Parse parameters inside brackets
for param in bracket_content.split_whitespace() {
if param.starts_with("arch=") {
let arch_values = param.split('=').nth(1).unwrap_or("");
architectures = arch_values
.split(',')
.map(|s| s.trim().to_string())
.collect();
}
// signed-by parameter is parsed but not stored
}
// Remove the bracket section from the line
line_without_brackets = line[..start_bracket].to_string() + &line[end_bracket + 1..];
}
// Trim and split the remaining line
let line_without_brackets = line_without_brackets.trim();
let parts: Vec<&str> = line_without_brackets.split_whitespace().collect();
// We need at least: deb, uri, suite
if parts.len() < 3 || parts[0] != "deb" {
return None;
}
let uri = parts[1].to_string();
let suite = vec![parts[2].to_string()];
let components: Vec<String> = parts[3..].iter().map(|&s| s.to_string()).collect();
Some(SourceEntry {
enabled: true,
components,
architectures,
uri,
suite,
})
}
/// Convert this source entry to legacy format
pub fn to_legacy(&self) -> String {
let mut result = String::new();
// Legacy entries contain one suite per line
for suite in &self.suite {
// Start with "deb" type
result.push_str("deb");
// Add architectures if present
if !self.architectures.is_empty() {
result.push_str(" [arch=");
result.push_str(&self.architectures.join(","));
result.push(']');
}
// Add URI and suite
result.push(' ');
result.push_str(&self.uri);
result.push(' ');
result.push_str(suite);
// Add components
if !self.components.is_empty() {
result.push(' ');
result.push_str(&self.components.join(" "));
}
result.push('\n');
}
result
}
}
/// Parse a 'source list' string in deb822 format into a SourceEntry vector
pub fn parse_deb822(data: &str) -> Vec<SourceEntry> {
data.split("\n\n")
.flat_map(SourceEntry::from_deb822)
.collect()
}
/// Parse a 'source list' string in legacy format into a SourceEntry vector
pub fn parse_legacy(data: &str) -> Vec<SourceEntry> {
data.split("\n")
.flat_map(SourceEntry::from_legacy)
.collect()
}
/// Load sources from context (or current context by default)
pub fn load(ctx: Option<Arc<crate::context::Context>>) -> Result<Vec<SourceEntry>, Box<dyn Error>> {
let mut sources = Vec::new();
let ctx = ctx.unwrap_or_else(context::current);
// Try DEB822 format first (Ubuntu 24.04+ and Debian Trixie+)
if let Ok(entries) = load_deb822(&ctx, "/etc/apt/sources.list.d/ubuntu.sources") {
sources.extend(entries);
} else if let Ok(entries) = load_deb822(&ctx, "/etc/apt/sources.list.d/debian.sources") {
sources.extend(entries);
}
// Fall back to legacy format
if let Ok(entries) = load_legacy(&ctx, "/etc/apt/sources.list") {
sources.extend(entries);
}
Ok(sources)
}
/// Save sources back to context
pub fn save_legacy(
ctx: Option<Arc<crate::context::Context>>,
sources: Vec<SourceEntry>,
path: &str,
) -> Result<(), Box<dyn Error>> {
let ctx = if let Some(c) = ctx {
c
} else {
context::current()
};
let content = sources
.into_iter()
.map(|s| s.to_legacy())
.collect::<Vec<_>>()
.join("\n");
ctx.write_file(Path::new(path), &content)?;
Ok(())
}
/// Load sources from DEB822 format
fn load_deb822(ctx: &context::Context, path: &str) -> Result<Vec<SourceEntry>, Box<dyn Error>> {
let path = Path::new(path);
if path.exists() {
let content = ctx.read_file(path)?;
return Ok(parse_deb822(&content));
}
Ok(Vec::new())
}
/// Load sources from legacy format
fn load_legacy(ctx: &context::Context, path: &str) -> Result<Vec<SourceEntry>, Box<dyn Error>> {
let path = Path::new(path);
if path.exists() {
let content = ctx.read_file(path)?;
return Ok(content.lines().flat_map(SourceEntry::from_legacy).collect());
}
Ok(Vec::new())
}
#[cfg(test)]
mod tests {
use super::*;
#[tokio::test]
async fn test_parse_deb822() {
let deb822 = "\
Types: deb\n\
URIs: http://fr.archive.ubuntu.com/ubuntu/\n\
Suites: questing questing-updates questing-backports\n\
Components: main restricted universe multiverse\n\
Signed-By: /usr/share/keyrings/ubuntu-archive-keyring.gpg\n\
Architectures: amd64\n\
\n\
Types: deb\n\
URIs: http://security.ubuntu.com/ubuntu/\n\
Suites: questing-security\n\
Components: main restricted universe multiverse\n\
Signed-By: /usr/share/keyrings/ubuntu-archive-keyring.gpg\n\
Architectures: amd64\n\
\n\
Types: deb\n\
URIs: http://ports.ubuntu.com/ubuntu-ports/\n\
Suites: questing questing-updates questing-backports\n\
Components: main restricted universe multiverse\n\
Signed-By: /usr/share/keyrings/ubuntu-archive-keyring.gpg\n\
Architectures: riscv64\n\
";
let sources = parse_deb822(deb822);
assert_eq!(sources.len(), 3);
assert_eq!(sources[0].uri, "http://fr.archive.ubuntu.com/ubuntu/");
assert_eq!(sources[0].architectures, vec!["amd64"]);
assert_eq!(
sources[0].suite,
vec!["questing", "questing-updates", "questing-backports"]
);
assert_eq!(
sources[0].components,
vec!["main", "restricted", "universe", "multiverse"]
);
assert_eq!(sources[1].uri, "http://security.ubuntu.com/ubuntu/");
assert_eq!(sources[1].architectures, vec!["amd64"]);
assert_eq!(sources[1].suite, vec!["questing-security"]);
assert_eq!(
sources[1].components,
vec!["main", "restricted", "universe", "multiverse"]
);
assert_eq!(sources[2].uri, "http://ports.ubuntu.com/ubuntu-ports/");
assert_eq!(sources[2].architectures.len(), 1);
assert_eq!(sources[2].architectures, vec!["riscv64"]);
assert_eq!(
sources[2].suite,
vec!["questing", "questing-updates", "questing-backports"]
);
assert_eq!(
sources[2].components,
vec!["main", "restricted", "universe", "multiverse"]
);
}
#[tokio::test]
async fn test_parse_legacy() {
let legacy = "\
deb [signed-by=\"/usr/share/keyrings/ubuntu-archive-keyring.gpg\" arch=amd64] http://archive.ubuntu.com/ubuntu resolute main universe\n\
deb [arch=amd64,i386 signed-by=\"/usr/share/keyrings/ubuntu-archive-keyring.gpg\"] http://archive.ubuntu.com/ubuntu resolute-updates main\n\
deb [signed-by=\"/usr/share/keyrings/ubuntu-archive-keyring.gpg\"] http://security.ubuntu.com/ubuntu resolute-security main\n\
";
let sources = parse_legacy(legacy);
assert_eq!(sources.len(), 3);
assert_eq!(sources[0].uri, "http://archive.ubuntu.com/ubuntu");
assert_eq!(sources[0].suite, vec!["resolute"]);
assert_eq!(sources[0].components, vec!["main", "universe"]);
assert_eq!(sources[0].architectures, vec!["amd64"]);
assert_eq!(sources[1].uri, "http://archive.ubuntu.com/ubuntu");
assert_eq!(sources[1].suite, vec!["resolute-updates"]);
assert_eq!(sources[1].components, vec!["main"]);
assert_eq!(sources[1].architectures, vec!["amd64", "i386"]);
assert_eq!(sources[2].uri, "http://security.ubuntu.com/ubuntu");
assert_eq!(sources[2].suite, vec!["resolute-security"]);
assert_eq!(sources[2].components, vec!["main"]);
}
}

View File

@@ -2,62 +2,18 @@ use std::error::Error;
use std::path::Path; use std::path::Path;
use std::process::Command; use std::process::Command;
use crate::changelog::parse_changelog_footer;
use crate::utils::gpg;
/// Build a Debian source package (to a .dsc)
pub fn build_source_package(cwd: Option<&Path>) -> Result<(), Box<dyn Error>> { pub fn build_source_package(cwd: Option<&Path>) -> Result<(), Box<dyn Error>> {
let cwd = cwd.unwrap_or_else(|| Path::new(".")); let cwd = cwd.unwrap_or_else(|| Path::new("."));
// Parse changelog to get maintainer information from the last modification entry let status = Command::new("dpkg-buildpackage")
let changelog_path = cwd.join("debian/changelog");
let (maintainer_name, maintainer_email) = parse_changelog_footer(&changelog_path)?;
// Check if a GPG key matching the maintainer's email exists
let signing_key = match gpg::find_signing_key_for_email(&maintainer_email) {
Ok(key) => key,
Err(e) => {
// If GPG is not available or there's an error, continue without signing
log::warn!("Failed to check for GPG key: {}", e);
None
}
};
// Build command arguments
let mut command = Command::new("dpkg-buildpackage");
command
.current_dir(cwd) .current_dir(cwd)
.arg("-S") .args(["-S", "-I", "-i", "-nc", "-d"])
.arg("-I") .status()?;
.arg("-i")
.arg("-nc")
.arg("-d");
// If a signing key is found, use it for signing
if let Some(key_id) = &signing_key {
command.arg(format!("--sign-keyid={}", key_id));
log::info!("Using GPG key {} for signing", key_id);
} else {
command.arg("--no-sign");
log::info!(
"No GPG key found for {} ({}), building without signing",
maintainer_name,
maintainer_email
);
}
let status = command.status()?;
if !status.success() { if !status.success() {
return Err(format!("dpkg-buildpackage failed with status: {}", status).into()); return Err(format!("dpkg-buildpackage failed with status: {}", status).into());
} }
if signing_key.is_some() {
println!("Package built and signed successfully!");
} else {
println!("Package built successfully (unsigned).");
}
Ok(()) Ok(())
} }

View File

@@ -5,7 +5,9 @@ use std::fs::File;
use std::io::{self, BufRead, Read, Write}; use std::io::{self, BufRead, Read, Write};
use std::path::Path; use std::path::Path;
/// Automatically generate a changelog entry from a commit history and previous changelog /*
* Automatically generate a changelog entry from a commit history and previous changelog
*/
pub fn generate_entry( pub fn generate_entry(
changelog_file: &str, changelog_file: &str,
cwd: Option<&Path>, cwd: Option<&Path>,
@@ -59,8 +61,10 @@ pub fn generate_entry(
Ok(()) Ok(())
} }
/// Compute the next (most probable) version number of a package, from old version and /*
/// conditions on changes (is ubuntu upload, is a no change rebuild, is a non-maintainer upload) * Compute the next (most probable) version number of a package, from old version and
* conditions on changes (is ubuntu upload, is a no change rebuild, is a non-maintainer upload)
*/
fn compute_new_version( fn compute_new_version(
old_version: &str, old_version: &str,
is_ubuntu: bool, is_ubuntu: bool,
@@ -83,7 +87,9 @@ fn compute_new_version(
increment_suffix(old_version, "") increment_suffix(old_version, "")
} }
/// Increment a version number by 1, for a given suffix /*
* Increment a version number by 1, for a given suffix
*/
fn increment_suffix(version: &str, suffix: &str) -> String { fn increment_suffix(version: &str, suffix: &str) -> String {
// If suffix is empty, we just look for trailing digits // If suffix is empty, we just look for trailing digits
// If suffix is not empty, we look for suffix followed by digits // If suffix is not empty, we look for suffix followed by digits
@@ -114,8 +120,9 @@ fn increment_suffix(version: &str, suffix: &str) -> String {
} }
} }
/// Parse a changelog file first entry header /*
/// Returns (package, version, series) tuple from the last modification entry * Parse a changelog file first entry header, to obtain (package, version, series)
*/
pub fn parse_changelog_header( pub fn parse_changelog_header(
path: &Path, path: &Path,
) -> Result<(String, String, String), Box<dyn std::error::Error>> { ) -> Result<(String, String, String), Box<dyn std::error::Error>> {
@@ -136,33 +143,6 @@ pub fn parse_changelog_header(
} }
} }
/// Parse a changelog file footer to extract maintainer information
/// Returns (name, email) tuple from the last modification entry
pub fn parse_changelog_footer(path: &Path) -> Result<(String, String), Box<dyn std::error::Error>> {
let mut file = File::open(path)?;
let mut content = String::new();
file.read_to_string(&mut content)?;
// Find the last maintainer line (format: -- Name <email> Date)
let re = Regex::new(r"--\s*([^<]+?)\s*<([^>]+)>\s*")?;
if let Some(first_match) = re.captures_iter(&content).next() {
let name = first_match
.get(1)
.map_or("", |m| m.as_str())
.trim()
.to_string();
let email = first_match
.get(2)
.map_or("", |m| m.as_str())
.trim()
.to_string();
Ok((name, email))
} else {
Err(format!("No maintainer information found in {}", path.display()).into())
}
}
/* /*
* Obtain all commit messages as a list since a tagged version in a git repository * Obtain all commit messages as a list since a tagged version in a git repository
*/ */

View File

@@ -10,7 +10,6 @@ use super::schroot::SchrootDriver;
use super::ssh::SshDriver; use super::ssh::SshDriver;
use super::unshare::UnshareDriver; use super::unshare::UnshareDriver;
/// A ContextDriver is the interface for the logic happening inside a context
pub trait ContextDriver { pub trait ContextDriver {
fn ensure_available(&self, src: &Path, dest_root: &str) -> io::Result<PathBuf>; fn ensure_available(&self, src: &Path, dest_root: &str) -> io::Result<PathBuf>;
fn retrieve_path(&self, src: &Path, dest: &Path) -> io::Result<()>; fn retrieve_path(&self, src: &Path, dest: &Path) -> io::Result<()>;
@@ -33,7 +32,6 @@ pub trait ContextDriver {
fn copy_path(&self, src: &Path, dest: &Path) -> io::Result<()>; fn copy_path(&self, src: &Path, dest: &Path) -> io::Result<()>;
fn read_file(&self, path: &Path) -> io::Result<String>; fn read_file(&self, path: &Path) -> io::Result<String>;
fn write_file(&self, path: &Path, content: &str) -> io::Result<()>; fn write_file(&self, path: &Path, content: &str) -> io::Result<()>;
fn exists(&self, path: &Path) -> io::Result<bool>;
} }
/// Represents an execution environment (Local or via SSH). /// Represents an execution environment (Local or via SSH).
@@ -43,52 +41,34 @@ pub trait ContextDriver {
#[serde(tag = "type")] #[serde(tag = "type")]
#[derive(Default)] #[derive(Default)]
pub enum ContextConfig { pub enum ContextConfig {
/// Local context: actions executed locally
#[serde(rename = "local")] #[serde(rename = "local")]
#[default] #[default]
Local, Local,
/// SSH context: actions over an SSH connection
#[serde(rename = "ssh")] #[serde(rename = "ssh")]
Ssh { Ssh {
/// Host for the SSH connection
host: String, host: String,
/// User for the SSH connection
user: Option<String>, user: Option<String>,
/// TCP port for the SSH connection
port: Option<u16>, port: Option<u16>,
}, },
/// Schroot context: using `schroot`
#[serde(rename = "schroot")] #[serde(rename = "schroot")]
Schroot { Schroot {
/// Name of the schroot
name: String, name: String,
/// Optional parent context for the Schroot context
parent: Option<String>, parent: Option<String>,
}, },
/// Unshare context: chroot with dropped permissions (using `unshare`)
#[serde(rename = "unshare")] #[serde(rename = "unshare")]
Unshare { Unshare {
/// Path to use for chrooting
path: String, path: String,
/// Optional parent context for the Unshare context
parent: Option<String>, parent: Option<String>,
}, },
} }
/// A context, allowing to run commands, read and write files, etc
pub struct Context { pub struct Context {
/// Configuration for the context
pub config: ContextConfig, pub config: ContextConfig,
/// Parent context for the context
///
/// For example, you could have a chroot context over an ssh connection
pub parent: Option<Arc<Context>>, pub parent: Option<Arc<Context>>,
/// ContextDriver for the context, implementing the logic for actions
driver: Mutex<Option<Box<dyn ContextDriver + Send + Sync>>>, driver: Mutex<Option<Box<dyn ContextDriver + Send + Sync>>>,
} }
impl Context { impl Context {
/// Create a context from configuration
pub fn new(config: ContextConfig) -> Self { pub fn new(config: ContextConfig) -> Self {
let parent = match &config { let parent = match &config {
ContextConfig::Schroot { ContextConfig::Schroot {
@@ -117,7 +97,6 @@ impl Context {
} }
} }
/// Create a context with an explicit parent context
pub fn with_parent(config: ContextConfig, parent: Arc<Context>) -> Self { pub fn with_parent(config: ContextConfig, parent: Arc<Context>) -> Self {
Self { Self {
config, config,
@@ -126,7 +105,6 @@ impl Context {
} }
} }
/// Make a command inside context
pub fn command<S: AsRef<OsStr>>(&self, program: S) -> ContextCommand<'_> { pub fn command<S: AsRef<OsStr>>(&self, program: S) -> ContextCommand<'_> {
ContextCommand { ContextCommand {
context: self, context: self,
@@ -148,7 +126,6 @@ impl Context {
.ensure_available(src, dest_root) .ensure_available(src, dest_root)
} }
/// Create a temp directory inside context
pub fn create_temp_dir(&self) -> io::Result<String> { pub fn create_temp_dir(&self) -> io::Result<String> {
self.driver().as_ref().unwrap().create_temp_dir() self.driver().as_ref().unwrap().create_temp_dir()
} }
@@ -166,27 +143,18 @@ impl Context {
self.driver().as_ref().unwrap().list_files(path) self.driver().as_ref().unwrap().list_files(path)
} }
/// Copy a path inside context
pub fn copy_path(&self, src: &Path, dest: &Path) -> io::Result<()> { pub fn copy_path(&self, src: &Path, dest: &Path) -> io::Result<()> {
self.driver().as_ref().unwrap().copy_path(src, dest) self.driver().as_ref().unwrap().copy_path(src, dest)
} }
/// Read a file inside context
pub fn read_file(&self, path: &Path) -> io::Result<String> { pub fn read_file(&self, path: &Path) -> io::Result<String> {
self.driver().as_ref().unwrap().read_file(path) self.driver().as_ref().unwrap().read_file(path)
} }
/// Write a file inside context
pub fn write_file(&self, path: &Path, content: &str) -> io::Result<()> { pub fn write_file(&self, path: &Path, content: &str) -> io::Result<()> {
self.driver().as_ref().unwrap().write_file(path, content) self.driver().as_ref().unwrap().write_file(path, content)
} }
/// Check if a file or directory exists inside context
pub fn exists(&self, path: &Path) -> io::Result<bool> {
self.driver().as_ref().unwrap().exists(path)
}
/// Create and obtain a specific driver for the context
pub fn driver( pub fn driver(
&self, &self,
) -> std::sync::MutexGuard<'_, Option<Box<dyn ContextDriver + Send + Sync>>> { ) -> std::sync::MutexGuard<'_, Option<Box<dyn ContextDriver + Send + Sync>>> {
@@ -214,7 +182,6 @@ impl Context {
driver_lock driver_lock
} }
/// Clone a context
pub fn clone_raw(&self) -> Self { pub fn clone_raw(&self) -> Self {
Self { Self {
config: self.config.clone(), config: self.config.clone(),
@@ -240,13 +207,12 @@ pub struct ContextCommand<'a> {
} }
impl<'a> ContextCommand<'a> { impl<'a> ContextCommand<'a> {
/// Add an argument to current command
pub fn arg<S: AsRef<OsStr>>(&mut self, arg: S) -> &mut Self { pub fn arg<S: AsRef<OsStr>>(&mut self, arg: S) -> &mut Self {
self.args.push(arg.as_ref().to_string_lossy().to_string()); self.args.push(arg.as_ref().to_string_lossy().to_string());
self self
} }
/// Add multiple command arguments // Support chaining args
pub fn args<I, S>(&mut self, args: I) -> &mut Self pub fn args<I, S>(&mut self, args: I) -> &mut Self
where where
I: IntoIterator<Item = S>, I: IntoIterator<Item = S>,
@@ -258,7 +224,6 @@ impl<'a> ContextCommand<'a> {
self self
} }
/// Set environment variable for command
pub fn env<K, V>(&mut self, key: K, val: V) -> &mut Self pub fn env<K, V>(&mut self, key: K, val: V) -> &mut Self
where where
K: AsRef<OsStr>, K: AsRef<OsStr>,
@@ -271,7 +236,6 @@ impl<'a> ContextCommand<'a> {
self self
} }
/// Set multiple environment variables for command
pub fn envs<I, K, V>(&mut self, vars: I) -> &mut Self pub fn envs<I, K, V>(&mut self, vars: I) -> &mut Self
where where
I: IntoIterator<Item = (K, V)>, I: IntoIterator<Item = (K, V)>,
@@ -284,13 +248,11 @@ impl<'a> ContextCommand<'a> {
self self
} }
/// Set current working directory for command
pub fn current_dir<P: AsRef<OsStr>>(&mut self, dir: P) -> &mut Self { pub fn current_dir<P: AsRef<OsStr>>(&mut self, dir: P) -> &mut Self {
self.cwd = Some(dir.as_ref().to_string_lossy().to_string()); self.cwd = Some(dir.as_ref().to_string_lossy().to_string());
self self
} }
/// Run command and obtain exit status
pub fn status(&mut self) -> io::Result<std::process::ExitStatus> { pub fn status(&mut self) -> io::Result<std::process::ExitStatus> {
self.context.driver().as_ref().unwrap().run( self.context.driver().as_ref().unwrap().run(
&self.program, &self.program,
@@ -300,7 +262,7 @@ impl<'a> ContextCommand<'a> {
) )
} }
/// Run command, capturing output // Capture output
pub fn output(&mut self) -> io::Result<std::process::Output> { pub fn output(&mut self) -> io::Result<std::process::Output> {
self.context.driver().as_ref().unwrap().run_output( self.context.driver().as_ref().unwrap().run_output(
&self.program, &self.program,

View File

@@ -4,7 +4,6 @@ use super::api::ContextDriver;
use std::io; use std::io;
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use std::process::Command; use std::process::Command;
use std::time::SystemTime;
pub struct LocalDriver; pub struct LocalDriver;
@@ -21,34 +20,8 @@ impl ContextDriver for LocalDriver {
} }
fn create_temp_dir(&self) -> io::Result<String> { fn create_temp_dir(&self) -> io::Result<String> {
// Generate a unique temporary directory name with random string let temp_dir = tempfile::Builder::new().prefix("pkh-").tempdir()?;
let base_timestamp = SystemTime::now() Ok(temp_dir.keep().to_string_lossy().to_string())
.duration_since(SystemTime::UNIX_EPOCH)
.unwrap()
.as_secs();
let mut attempt = 0;
loop {
let work_dir_name = if attempt == 0 {
format!("pkh-{}", base_timestamp)
} else {
format!("pkh-{}-{}", base_timestamp, attempt)
};
let temp_dir_path = std::env::temp_dir().join(&work_dir_name);
// Check if directory already exists
if temp_dir_path.exists() {
attempt += 1;
continue;
}
// Create the directory
std::fs::create_dir_all(&temp_dir_path)?;
// Return the path as a string
return Ok(temp_dir_path.to_string_lossy().to_string());
}
} }
fn retrieve_path(&self, src: &Path, dest: &Path) -> io::Result<()> { fn retrieve_path(&self, src: &Path, dest: &Path) -> io::Result<()> {
@@ -105,10 +78,6 @@ impl ContextDriver for LocalDriver {
fn write_file(&self, path: &Path, content: &str) -> io::Result<()> { fn write_file(&self, path: &Path, content: &str) -> io::Result<()> {
std::fs::write(path, content) std::fs::write(path, content)
} }
fn exists(&self, path: &Path) -> io::Result<bool> {
Ok(path.exists())
}
} }
fn copy_dir_recursive(src: &Path, dest: &Path) -> io::Result<()> { fn copy_dir_recursive(src: &Path, dest: &Path) -> io::Result<()> {

View File

@@ -26,7 +26,6 @@ impl Default for Config {
} }
} }
/// Helper managing contexts
pub struct ContextManager { pub struct ContextManager {
context: RwLock<Arc<Context>>, context: RwLock<Arc<Context>>,
config_path: PathBuf, config_path: PathBuf,
@@ -68,12 +67,10 @@ impl ContextManager {
}) })
} }
/// Obtain current ContextManager configuration
pub fn get_config(&self) -> std::sync::RwLockReadGuard<'_, Config> { pub fn get_config(&self) -> std::sync::RwLockReadGuard<'_, Config> {
self.config.read().unwrap() self.config.read().unwrap()
} }
/// Make a ContextManager using a specific configuration path
pub fn with_path(path: PathBuf) -> Self { pub fn with_path(path: PathBuf) -> Self {
let config = Config::default(); let config = Config::default();
Self { Self {
@@ -83,7 +80,6 @@ impl ContextManager {
} }
} }
/// Save current context configuration to disk
pub fn save(&self) -> io::Result<()> { pub fn save(&self) -> io::Result<()> {
let config = self.config.read().unwrap(); let config = self.config.read().unwrap();
let content = serde_json::to_string_pretty(&*config) let content = serde_json::to_string_pretty(&*config)
@@ -101,7 +97,6 @@ impl ContextManager {
Context::new(context_config) Context::new(context_config)
} }
/// List contexts from configuration
pub fn list_contexts(&self) -> Vec<String> { pub fn list_contexts(&self) -> Vec<String> {
self.config self.config
.read() .read()
@@ -112,7 +107,6 @@ impl ContextManager {
.collect() .collect()
} }
/// Add a context to configuration
pub fn add_context(&self, name: &str, config: ContextConfig) -> io::Result<()> { pub fn add_context(&self, name: &str, config: ContextConfig) -> io::Result<()> {
self.config self.config
.write() .write()
@@ -122,7 +116,6 @@ impl ContextManager {
self.save() self.save()
} }
/// Remove context from configuration
pub fn remove_context(&self, name: &str) -> io::Result<()> { pub fn remove_context(&self, name: &str) -> io::Result<()> {
let mut config = self.config.write().unwrap(); let mut config = self.config.write().unwrap();
if name == "local" { if name == "local" {
@@ -144,7 +137,6 @@ impl ContextManager {
Ok(()) Ok(())
} }
/// Set current context from name (modifying configuration)
pub fn set_current(&self, name: &str) -> io::Result<()> { pub fn set_current(&self, name: &str) -> io::Result<()> {
let mut config = self.config.write().unwrap(); let mut config = self.config.write().unwrap();
if config.contexts.contains_key(name) { if config.contexts.contains_key(name) {
@@ -161,18 +153,14 @@ impl ContextManager {
} }
} }
/// Set current context, without modifying configuration
pub fn set_current_ephemeral(&self, context: Context) { pub fn set_current_ephemeral(&self, context: Context) {
*self.context.write().unwrap() = context.into(); *self.context.write().unwrap() = context.into();
} }
/// Obtain current context handle
pub fn current(&self) -> Arc<Context> { pub fn current(&self) -> Arc<Context> {
self.context.read().unwrap().clone() self.context.read().unwrap().clone()
} }
/// Obtain current context name
/// Will not work for ephemeral context (obtained from config)
pub fn current_name(&self) -> String { pub fn current_name(&self) -> String {
self.config.read().unwrap().context.clone() self.config.read().unwrap().context.clone()
} }

View File

@@ -9,12 +9,10 @@ pub use api::{Context, ContextCommand, ContextConfig};
pub use manager::ContextManager; pub use manager::ContextManager;
use std::sync::Arc; use std::sync::Arc;
/// Obtain global context manager
pub fn manager() -> &'static ContextManager { pub fn manager() -> &'static ContextManager {
&manager::MANAGER &manager::MANAGER
} }
/// Obtain current context
pub fn current() -> Arc<Context> { pub fn current() -> Arc<Context> {
manager::MANAGER.current() manager::MANAGER.current()
} }
@@ -34,13 +32,8 @@ mod tests {
let ctx = Context::new(ContextConfig::Local); let ctx = Context::new(ContextConfig::Local);
let dest = ctx.ensure_available(&src_file, "/tmp").unwrap(); let dest = ctx.ensure_available(&src_file, "/tmp").unwrap();
// Should return a path that exists and has the same content // Should return canonical path
assert!(dest.exists()); assert_eq!(dest, src_file.canonicalize().unwrap());
let content = fs::read_to_string(&dest).unwrap();
assert_eq!(content, "local");
// The dest should be in the /tmp directory
assert!(dest.starts_with("/tmp"));
} }
#[test] #[test]
@@ -113,7 +106,7 @@ mod tests {
#[test] #[test]
fn test_context_file_ops() { fn test_context_file_ops() {
let temp_dir = tempfile::tempdir().unwrap(); let temp_dir = tempfile::tempdir().unwrap();
let ctx = Context::new(ContextConfig::Local); let ctx = super::manager().current();
let file_path = temp_dir.path().join("test.txt"); let file_path = temp_dir.path().join("test.txt");
let content = "hello world"; let content = "hello world";

View File

@@ -262,14 +262,4 @@ impl ContextDriver for SchrootDriver {
} }
Ok(()) Ok(())
} }
fn exists(&self, path: &Path) -> io::Result<bool> {
let status = self.run(
"test",
&["-e".to_string(), path.to_string_lossy().to_string()],
&[],
None,
)?;
Ok(status.success())
}
} }

View File

@@ -244,15 +244,6 @@ impl ContextDriver for SshDriver {
remote_file.write_all(content.as_bytes())?; remote_file.write_all(content.as_bytes())?;
Ok(()) Ok(())
} }
fn exists(&self, path: &Path) -> io::Result<bool> {
let sess = connect_ssh(&self.host, self.user.as_deref(), self.port)?;
let sftp = sess.sftp().map_err(io::Error::other)?;
match sftp.stat(path) {
Ok(_) => Ok(true),
Err(_) => Ok(false),
}
}
} }
impl SshDriver { impl SshDriver {

View File

@@ -112,30 +112,17 @@ impl ContextDriver for UnshareDriver {
} }
fn create_temp_dir(&self) -> io::Result<String> { fn create_temp_dir(&self) -> io::Result<String> {
// Create a temporary directory inside the chroot with unique naming // Create a temporary directory inside the chroot
let base_timestamp = std::time::SystemTime::now() let timestamp = std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH) .duration_since(std::time::UNIX_EPOCH)
.unwrap() .unwrap()
.as_secs(); .as_secs();
let mut attempt = 0; let work_dir_name = format!("pkh-build-{}", timestamp);
loop {
let work_dir_name = if attempt == 0 {
format!("pkh-build-{}", base_timestamp)
} else {
format!("pkh-build-{}-{}", base_timestamp, attempt)
};
let work_dir_inside_chroot = format!("/tmp/{}", work_dir_name); let work_dir_inside_chroot = format!("/tmp/{}", work_dir_name);
let host_path = Path::new(&self.path).join("tmp").join(&work_dir_name);
// Check if directory already exists
if host_path.exists() {
attempt += 1;
continue;
}
// Create the directory on the host filesystem // Create the directory on the host filesystem
let host_path = Path::new(&self.path).join("tmp").join(&work_dir_name);
std::fs::create_dir_all(&host_path)?; std::fs::create_dir_all(&host_path)?;
debug!( debug!(
@@ -145,8 +132,7 @@ impl ContextDriver for UnshareDriver {
); );
// Return the path as it appears inside the chroot // Return the path as it appears inside the chroot
return Ok(work_dir_inside_chroot); Ok(work_dir_inside_chroot)
}
} }
fn copy_path(&self, src: &Path, dest: &Path) -> io::Result<()> { fn copy_path(&self, src: &Path, dest: &Path) -> io::Result<()> {
@@ -164,11 +150,6 @@ impl ContextDriver for UnshareDriver {
let host_path = Path::new(&self.path).join(path.to_string_lossy().trim_start_matches('/')); let host_path = Path::new(&self.path).join(path.to_string_lossy().trim_start_matches('/'));
self.parent().write_file(&host_path, content) self.parent().write_file(&host_path, content)
} }
fn exists(&self, path: &Path) -> io::Result<bool> {
let host_path = Path::new(&self.path).join(path.to_string_lossy().trim_start_matches('/'));
self.parent().exists(&host_path)
}
} }
impl UnshareDriver { impl UnshareDriver {
@@ -185,21 +166,11 @@ impl UnshareDriver {
env: &[(String, String)], env: &[(String, String)],
cwd: Option<&str>, cwd: Option<&str>,
) -> ContextCommand<'_> { ) -> ContextCommand<'_> {
let mut cmd = self.parent().command("unshare"); let mut cmd = self.parent().command("sudo");
cmd.args(env.iter().map(|(k, v)| format!("{k}={v}")));
cmd.envs(env.iter().cloned()); cmd.arg("unshare")
.arg("--mount-proc")
cmd.arg("--map-user=65536")
.arg("--map-group=65536")
.arg("--pid")
.arg("--ipc")
.arg("--uts")
.arg("--user")
.arg("--cgroup")
.arg("--map-auto")
.arg("-r")
.arg("--mount")
.arg("--fork")
.arg("-R") .arg("-R")
.arg(&self.path); .arg(&self.path);
@@ -207,11 +178,7 @@ impl UnshareDriver {
cmd.arg("-w").arg(dir); cmd.arg("-w").arg(dir);
} }
cmd.arg("--").arg("bash").arg("-c").arg(format!( cmd.arg(program).args(args);
"mount -t proc proc /proc; mkdir /dev/pts; mount -t devpts devpts /dev/pts; touch /dev/ptmx; mount --bind /dev/pts/ptmx /dev/ptmx; {} {}",
program,
args.join(" ")
));
cmd cmd
} }

View File

@@ -1,7 +1,119 @@
use crate::context; use crate::context;
use crate::context::{Context, ContextConfig};
use std::collections::HashMap; use std::collections::HashMap;
use std::error::Error; use std::error::Error;
use std::path::PathBuf;
pub struct EphemeralContextGuard {
previous_context: String,
chroot_path: PathBuf,
}
impl EphemeralContextGuard {
pub fn new(series: &str) -> Result<Self, Box<dyn Error>> {
let current_context_name = context::manager().current_name();
// Create a temporary directory for the chroot
let chroot_path_str = context::current().create_temp_dir()?;
let chroot_path = PathBuf::from(chroot_path_str);
log::debug!(
"Creating new chroot for {} at {}...",
series,
chroot_path.display()
);
let status = context::current()
.command("sudo")
.arg("mmdebstrap")
.arg("--variant=buildd")
.arg(series)
.arg(chroot_path.to_string_lossy().to_string())
.status()?;
if !status.success() {
// Clean up on failure
let _ = std::fs::remove_dir_all(&chroot_path);
return Err(format!("mmdebstrap failed for series {}", series).into());
}
// Mount '/dev' inside the chroot
let status = context::current()
.command("sudo")
.arg("mount")
.arg("--bind")
.arg("/dev")
.arg(format!("{}/dev", chroot_path.display()))
.status()?;
if !status.success() {
// Clean up on failure
let _ = std::fs::remove_dir_all(&chroot_path);
return Err("Failed to mount /dev inside chroot".into());
}
// Switch to an ephemeral context to build the package in the chroot
context::manager().set_current_ephemeral(Context::new(ContextConfig::Unshare {
path: chroot_path.to_string_lossy().to_string(),
parent: Some(current_context_name.clone()),
}));
Ok(Self {
previous_context: current_context_name,
chroot_path,
})
}
}
impl Drop for EphemeralContextGuard {
fn drop(&mut self) {
log::debug!("Cleaning up ephemeral context...");
// Reset to normal context
if let Err(e) = context::manager().set_current(&self.previous_context) {
log::error!("Failed to restore context {}: {}", self.previous_context, e);
}
// Unmount '/dev' inside the chroot
let status = context::current()
.command("sudo")
.arg("umount")
.arg(format!("{}/dev", &self.chroot_path.display()))
.status();
if status.is_err() || !status.unwrap().success() {
// If we fail to umount, then we can't remove (would remove /dev/xx on host)
log::error!("Failed to umount /dev inside chroot. Not cleaning up.");
return;
}
// Remove chroot directory
// We use the restored context to execute the cleanup command
let result = context::current()
.command("sudo")
.arg("rm")
.arg("-rf")
.arg(&self.chroot_path)
.status();
match result {
Ok(status) => {
if !status.success() {
log::error!(
"Failed to remove chroot directory {}",
self.chroot_path.display()
);
}
}
Err(e) => {
log::error!(
"Failed to execute cleanup command for {}: {}",
self.chroot_path.display(),
e
);
}
}
}
}
/// Set environment variables for cross-compilation /// Set environment variables for cross-compilation
pub fn setup_environment( pub fn setup_environment(
env: &mut HashMap<String, String>, env: &mut HashMap<String, String>,
@@ -10,8 +122,7 @@ pub fn setup_environment(
let dpkg_architecture = String::from_utf8( let dpkg_architecture = String::from_utf8(
context::current() context::current()
.command("dpkg-architecture") .command("dpkg-architecture")
.arg("-a") .arg(format!("-a{}", arch))
.arg(arch)
.output()? .output()?
.stdout, .stdout,
)?; )?;
@@ -51,99 +162,145 @@ pub fn ensure_repositories(arch: &str, series: &str) -> Result<(), Box<dyn Error
return Ok(()); return Ok(());
} }
// Load existing sources // Handle DEB822 format (Ubuntu 24.04+)
let mut sources = crate::apt::sources::load(Some(ctx.clone()))?;
// Ensure all components are enabled for the primary architecture
for source in &mut sources {
if source.uri.contains("archive.ubuntu.com") || source.uri.contains("security.ubuntu.com") {
// Scope to local_arch if not already scoped
if source.architectures.is_empty() {
source.architectures.push(local_arch.clone());
}
// Ensure all components are present
let required_components = ["main", "restricted", "universe", "multiverse"];
for &comp in &required_components {
if !source.components.contains(&comp.to_string()) {
source.components.push(comp.to_string());
}
}
// Ensure all suites (pockets) are enabled, excluding 'proposed'
let required_suites = [
series.to_string(),
format!("{}-updates", series),
format!("{}-backports", series),
format!("{}-security", series),
];
for suite in required_suites {
if !source.suite.contains(&suite) {
source.suite.push(suite);
}
}
}
}
// Check if ports repository already exists for the target architecture
let has_ports = sources
.iter()
.any(|s| s.uri.contains("ports.ubuntu.com") && s.architectures.contains(&arch.to_string()));
if !has_ports {
// Add ports repository for the target architecture
let ports_entry = crate::apt::sources::SourceEntry {
enabled: true,
components: vec![
"main".to_string(),
"restricted".to_string(),
"universe".to_string(),
"multiverse".to_string(),
],
architectures: vec![arch.to_string()],
uri: "http://ports.ubuntu.com/ubuntu-ports".to_string(),
suite: vec![
format!("{series}"),
format!("{series}-updates"),
format!("{series}-backports"),
format!("{series}-security"),
],
};
sources.push(ports_entry);
}
// Save the updated sources
// Try to save in DEB822 format first, fall back to legacy format
let deb822_path = "/etc/apt/sources.list.d/ubuntu.sources"; let deb822_path = "/etc/apt/sources.list.d/ubuntu.sources";
if ctx let has_deb822 = ctx
.command("test") .command("test")
.arg("-f") .arg("-f")
.arg(deb822_path) .arg(deb822_path)
.status()? .status()?
.success() .success();
{
// For DEB822 format, we need to reconstruct the file content if has_deb822 {
let mut content = String::new(); ensure_repositories_deb822(&ctx, arch, &local_arch, series, deb822_path)?;
for source in &sources {
if !source.enabled {
continue;
}
content.push_str("Types: deb\n");
content.push_str(&format!("URIs: {}\n", source.uri));
content.push_str(&format!("Suites: {}\n", source.suite.join(" ")));
content.push_str(&format!("Components: {}\n", source.components.join(" ")));
content.push_str("Signed-By: /usr/share/keyrings/ubuntu-archive-keyring.gpg\n");
content.push_str(&format!(
"Architectures: {}\n",
source.architectures.join(" ")
));
content.push('\n');
}
ctx.write_file(std::path::Path::new(deb822_path), &content)?;
} else { } else {
// Fall back to legacy format ensure_repositories_legacy(&ctx, arch, &local_arch, series, "/etc/apt/sources.list")?;
crate::apt::sources::save_legacy(Some(ctx.clone()), sources, "/etc/apt/sources.list")?;
} }
Ok(()) Ok(())
} }
fn ensure_repositories_deb822(
ctx: &context::Context,
arch: &str,
local_arch: &str,
series: &str,
deb822_path: &str,
) -> Result<(), Box<dyn Error>> {
// Scope existing to local_arch if not already scoped
ctx.command("sed")
.arg("-i")
.arg(format!("/URIs:.*\\(archive\\|security\\)\\.ubuntu\\.com/ {{ n; /^Architectures:/ ! i Architectures: {} }}", local_arch))
.arg(deb822_path)
.status()?;
// Ensure all components are enabled for the primary architecture
ctx.command("sed")
.arg("-i")
.arg("/URIs:.*\\(archive\\|security\\)\\.ubuntu\\.com/,/Components:/ s/^Components:.*/Components: main restricted universe multiverse/")
.arg(deb822_path)
.status()?;
// Ensure all suites (pockets) are enabled for the primary architecture
// Excluding 'proposed' as it contains unstable software
let suites = format!("{series} {series}-updates {series}-backports {series}-security");
ctx.command("sed")
.arg("-i")
.arg(format!(
"/URIs:.*\\(archive\\|security\\)\\.ubuntu\\.com/,/Suites:/ s/^Suites:.*/Suites: {}/",
suites
))
.arg(deb822_path)
.status()?;
// Add ports if not already present
let has_ports = ctx
.command("grep")
.arg("-q")
.arg("ports.ubuntu.com")
.arg(deb822_path)
.status()?
.success();
if !has_ports {
let ports_block = format!(
"\nTypes: deb\nURIs: http://ports.ubuntu.com/ubuntu-ports\nSuites: {series} {series}-updates {series}-backports {series}-security\nComponents: main restricted universe multiverse\nSigned-By: /usr/share/keyrings/ubuntu-archive-keyring.gpg\nArchitectures: {arch}\n"
);
ctx.command("sh")
.arg("-c")
.arg(format!("echo '{}' >> {}", ports_block, deb822_path))
.status()?;
}
Ok(())
}
fn ensure_repositories_legacy(
ctx: &context::Context,
arch: &str,
local_arch: &str,
series: &str,
sources_path: &str,
) -> Result<(), Box<dyn Error>> {
// Scope archive.ubuntu.com and security.ubuntu.com to local_arch if not already scoped
ctx.command("sed")
.arg("-i")
.arg(format!(
r"/archive.ubuntu.com\|security.ubuntu.com/ {{ /arch=/ ! {{ /^deb \[/ ! s/^deb /deb [arch={}] /; /^deb \[/ s/^deb \[\([^]]*\)\]/deb [arch={} \1]/ }} }}",
local_arch, local_arch
))
.arg(sources_path)
.status()?;
// Ensure all components (main restricted universe multiverse) are present for all archive/security lines
ctx.command("sed")
.arg("-i")
.arg(r"/archive.ubuntu.com\|security.ubuntu.com/ s/\( main\)\?\([ ]\+restricted\)\?\([ ]\+universe\)\?\([ ]\+multiverse\)\?$/ main restricted universe multiverse/")
.arg(sources_path)
.status()?;
// Ensure all pockets exist. If not, we append them.
for pocket in ["", "-updates", "-backports", "-security"] {
let suite = format!("{}{}", series, pocket);
let has_suite = ctx
.command("grep")
.arg("-q")
.arg(format!(" {}", suite))
.arg(sources_path)
.status()?
.success();
if !has_suite {
let line = format!(
"deb [arch={}] http://archive.ubuntu.com/ubuntu/ {} main restricted universe multiverse",
local_arch, suite
);
ctx.command("sh")
.arg("-c")
.arg(format!("echo '{}' >> {}", line, sources_path))
.status()?;
}
}
// Add ports repository to sources.list if not already present
let has_ports = ctx
.command("grep")
.arg("-q")
.arg("ports.ubuntu.com")
.arg(sources_path)
.status()?
.success();
if !has_ports {
let ports_lines = format!(
"deb [arch={arch}] http://ports.ubuntu.com/ubuntu-ports {series} main restricted universe multiverse\n\
deb [arch={arch}] http://ports.ubuntu.com/ubuntu-ports {series}-updates main restricted universe multiverse\n\
deb [arch={arch}] http://ports.ubuntu.com/ubuntu-ports {series}-backports main restricted universe multiverse\n\
deb [arch={arch}] http://ports.ubuntu.com/ubuntu-ports {series}-security main restricted universe multiverse"
);
ctx.command("sh")
.arg("-c")
.arg(format!("echo '{}' >> {}", ports_lines, sources_path))
.status()?;
}
Ok(())
}

View File

@@ -1,290 +0,0 @@
use crate::context;
use crate::context::{Context, ContextConfig};
use directories::ProjectDirs;
use std::error::Error;
use std::fs;
use std::path::{Path, PathBuf};
use tar::Archive;
use xz2::read::XzDecoder;
/// An ephemeral unshare context guard that creates and manages a temporary chroot environment
/// for building packages with unshare permissions.
pub struct EphemeralContextGuard {
previous_context: String,
chroot_path: PathBuf,
build_succeeded: bool,
}
impl EphemeralContextGuard {
/// Create a new ephemeral unshare context for the specified series
pub async fn new(series: &str) -> Result<Self, Box<dyn Error>> {
let current_context_name = context::manager().current_name();
// Create a temporary directory for the chroot
let chroot_path_str = context::current().create_temp_dir()?;
let chroot_path = PathBuf::from(chroot_path_str);
log::debug!(
"Creating new chroot for {} at {}...",
series,
chroot_path.display()
);
// Download and extract the chroot tarball
Self::download_and_extract_chroot(series, &chroot_path).await?;
// Switch to an ephemeral context to build the package in the chroot
context::manager().set_current_ephemeral(Context::new(ContextConfig::Unshare {
path: chroot_path.to_string_lossy().to_string(),
parent: Some(current_context_name.clone()),
}));
Ok(Self {
previous_context: current_context_name,
chroot_path,
build_succeeded: false,
})
}
async fn download_and_extract_chroot(
series: &str,
chroot_path: &PathBuf,
) -> Result<(), Box<dyn Error>> {
// Get project directories for caching
let proj_dirs = ProjectDirs::from("com", "pkh", "pkh")
.ok_or("Could not determine project directories")?;
let cache_dir = proj_dirs.cache_dir();
fs::create_dir_all(cache_dir)?;
// Create tarball filename based on series
let tarball_filename = format!("{}-buildd.tar.xz", series);
let tarball_path = cache_dir.join(&tarball_filename);
// Check for existing lockfile, and wait for a timeout if it exists
// After timeout, warn the user
let lockfile_path = tarball_path.with_extension("lock");
let ctx = context::current();
// Check if lockfile exists and wait for it to be removed
let mut wait_time = 0;
let timeout = 300; // 5 minutes timeout
let poll_interval = 5; // Check every 5 seconds
while ctx.exists(&lockfile_path)? {
if wait_time >= timeout {
log::warn!(
"Lockfile {} exists and has been present for more than {} seconds. \
Another process may be downloading the chroot tarball. Continuing anyway...",
lockfile_path.display(),
timeout
);
break;
}
log::info!(
"Lockfile {} exists, waiting for download to complete... ({}s/{})",
lockfile_path.display(),
wait_time,
timeout
);
std::thread::sleep(std::time::Duration::from_secs(poll_interval));
wait_time += poll_interval;
}
// Download tarball if it doesn't exist
if !tarball_path.exists() {
log::debug!("Downloading chroot tarball for {}...", series);
Self::download_chroot_tarball(series, &tarball_path).await?;
} else {
log::debug!("Using cached chroot tarball for {}", series);
}
// Extract tarball to chroot directory
log::debug!("Extracting chroot tarball to {}...", chroot_path.display());
Self::extract_tarball(&tarball_path, chroot_path)?;
// Create device nodes in the chroot
log::debug!("Creating device nodes in chroot...");
Self::create_device_nodes(chroot_path)?;
Ok(())
}
async fn download_chroot_tarball(
series: &str,
tarball_path: &Path,
) -> Result<(), Box<dyn Error>> {
let ctx = context::current();
// Create a lock file to make sure that noone tries to use the file while it's not fully downloaded
let lockfile_path = tarball_path.with_extension("lock");
ctx.command("touch")
.arg(lockfile_path.to_string_lossy().to_string())
.status()?;
// Make sure we have the right apt keyrings to mmdebstrap the chroot
crate::apt::keyring::download_trust_keyring(Some(ctx.clone()), series).await?;
// Use mmdebstrap to download the tarball to the cache directory
let status = ctx
.command("mmdebstrap")
.arg("--variant=buildd")
.arg("--mode=unshare")
.arg("--include=mount")
.arg("--format=tar")
.arg(series)
.arg(tarball_path.to_string_lossy().to_string())
.status()?;
if !status.success() {
// Remove file on error
let _ = ctx
.command("rm")
.arg("-f")
.arg(tarball_path.to_string_lossy().to_string())
.status();
let _ = ctx
.command("rm")
.arg("-f")
.arg(lockfile_path.to_string_lossy().to_string())
.status();
return Err(format!("Failed to download chroot tarball for series {}", series).into());
}
// Remove lockfile: tarball is fully downloaded
let _ = ctx
.command("rm")
.arg("-f")
.arg(lockfile_path.to_string_lossy().to_string())
.status();
Ok(())
}
fn extract_tarball(
tarball_path: &PathBuf,
chroot_path: &PathBuf,
) -> Result<(), Box<dyn Error>> {
// Create the chroot directory
fs::create_dir_all(chroot_path)?;
// Open the tarball file
let tarball_file = std::fs::File::open(tarball_path)?;
let xz_decoder = XzDecoder::new(tarball_file);
let mut archive = Archive::new(xz_decoder);
// Extract all files to the chroot directory
archive.unpack(chroot_path)?;
Ok(())
}
fn create_device_nodes(chroot_path: &Path) -> Result<(), Box<dyn Error>> {
let ctx = context::current();
let dev_null_path = chroot_path.join("dev/null");
let dev_zero_path = chroot_path.join("dev/zero");
// Ensure /dev directory exists
fs::create_dir_all(chroot_path.join("dev"))?;
// Remove existing device nodes if they exist
let _ = ctx
.command("rm")
.arg("-f")
.arg(dev_null_path.to_string_lossy().to_string())
.status();
let _ = ctx
.command("rm")
.arg("-f")
.arg(dev_zero_path.to_string_lossy().to_string())
.status();
// Create new device nodes using fakeroot and mknod
let status_null = ctx
.command("sudo")
.arg("mknod")
.arg("-m")
.arg("666")
.arg(dev_null_path.to_string_lossy().to_string())
.arg("c")
.arg("1")
.arg("3")
.status()?;
let status_zero = ctx
.command("sudo")
.arg("mknod")
.arg("-m")
.arg("666")
.arg(dev_zero_path.to_string_lossy().to_string())
.arg("c")
.arg("1")
.arg("5")
.status()?;
if !status_null.success() || !status_zero.success() {
return Err("Failed to create device nodes".into());
}
Ok(())
}
/// Mark the build as successful, which will trigger chroot cleanup on drop
pub fn mark_build_successful(&mut self) {
self.build_succeeded = true;
}
}
impl Drop for EphemeralContextGuard {
fn drop(&mut self) {
log::debug!("Cleaning up ephemeral context ({:?})...", &self.chroot_path);
// Reset to normal context
if let Err(e) = context::manager().set_current(&self.previous_context) {
log::error!("Failed to restore context {}: {}", self.previous_context, e);
}
// Remove chroot directory only if build succeeded
if self.build_succeeded {
log::debug!(
"Build succeeded, removing chroot directory: {}",
self.chroot_path.display()
);
let result = context::current()
.command("sudo")
.arg("rm")
.arg("-rf")
.arg(&self.chroot_path)
.status();
match result {
Ok(status) => {
if !status.success() {
log::error!(
"Failed to remove chroot directory {}",
self.chroot_path.display()
);
} else {
log::debug!(
"Successfully removed chroot directory: {}",
self.chroot_path.display()
);
}
}
Err(e) => {
log::error!(
"Failed to execute cleanup command for {}: {}",
self.chroot_path.display(),
e
);
}
}
} else {
log::debug!(
"Build did not succeed or was not marked as successful, keeping chroot directory: {}",
self.chroot_path.display()
);
}
}
}

View File

@@ -6,7 +6,6 @@ use std::collections::HashMap;
use std::error::Error; use std::error::Error;
use std::path::Path; use std::path::Path;
use crate::apt;
use crate::deb::cross; use crate::deb::cross;
pub fn build( pub fn build(
@@ -24,47 +23,12 @@ pub fn build(
let ctx = context::current(); let ctx = context::current();
// Parallel building: find local number of cores, and use that
let num_cores = ctx
.command("nproc")
.output()
.map(|output| {
if output.status.success() {
String::from_utf8_lossy(&output.stdout)
.trim()
.parse::<usize>()
.unwrap_or(1)
} else {
1 // Default to 1 if nproc fails
}
})
.unwrap_or(1); // Default to 1 if we can't execute the command
env.insert(
"DEB_BUILD_OPTIONS".to_string(),
format!("parallel={}", num_cores),
);
if cross { if cross {
log::debug!("Setting up environment for local cross build...");
cross::setup_environment(&mut env, arch)?; cross::setup_environment(&mut env, arch)?;
cross::ensure_repositories(arch, series)?; cross::ensure_repositories(arch, series)?;
} }
// UBUNTU: Ensure 'universe' repository is enabled
let mut sources = apt::sources::load(None)?;
let mut modified = false;
for source in &mut sources {
if source.uri.contains("ubuntu") && !source.components.contains(&"universe".to_string()) {
source.components.push("universe".to_string());
modified = true;
}
}
if modified {
apt::sources::save_legacy(None, sources, "/etc/apt/sources.list")?;
}
// Update package lists // Update package lists
log::debug!("Updating package lists for local build...");
let status = ctx let status = ctx
.command("apt-get") .command("apt-get")
.envs(env.clone()) .envs(env.clone())
@@ -78,7 +42,6 @@ pub fn build(
} }
// Install essential packages // Install essential packages
log::debug!("Installing essential packages for local build...");
let mut cmd = ctx.command("apt-get"); let mut cmd = ctx.command("apt-get");
cmd.envs(env.clone()) cmd.envs(env.clone())
@@ -100,16 +63,9 @@ pub fn build(
return Err("Could not install essential packages for the build".into()); return Err("Could not install essential packages for the build".into());
} }
// Find the actual package directory
let package_dir = crate::deb::find_package_directory(Path::new(build_root), package, version)?;
let package_dir_str = package_dir
.to_str()
.ok_or("Invalid package directory path")?;
// Install build dependencies // Install build dependencies
log::debug!("Installing build dependencies...");
let mut cmd = ctx.command("apt-get"); let mut cmd = ctx.command("apt-get");
cmd.current_dir(package_dir_str) cmd.current_dir(format!("{build_root}/{package}"))
.envs(env.clone()) .envs(env.clone())
.arg("-y") .arg("-y")
.arg("build-dep"); .arg("build-dep");
@@ -125,10 +81,9 @@ pub fn build(
} }
// Run the build step // Run the build step
log::debug!("Building (debian/rules build) package...");
let status = ctx let status = ctx
.command("debian/rules") .command("debian/rules")
.current_dir(package_dir_str) .current_dir(format!("{build_root}/{package}"))
.envs(env.clone()) .envs(env.clone())
.arg("build") .arg("build")
.status()?; .status()?;
@@ -139,7 +94,7 @@ pub fn build(
// Run the 'binary' step to produce deb // Run the 'binary' step to produce deb
let status = ctx let status = ctx
.command("fakeroot") .command("fakeroot")
.current_dir(package_dir_str) .current_dir(format!("{build_root}/{package}"))
.envs(env.clone()) .envs(env.clone())
.arg("debian/rules") .arg("debian/rules")
.arg("binary") .arg("binary")

View File

@@ -1,5 +1,4 @@
mod cross; mod cross;
mod ephemeral;
mod local; mod local;
mod sbuild; mod sbuild;
@@ -7,17 +6,13 @@ use crate::context;
use std::error::Error; use std::error::Error;
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
/// Build mode for the binary build
#[derive(PartialEq)] #[derive(PartialEq)]
pub enum BuildMode { pub enum BuildMode {
/// Use `sbuild` for the build, configured in unshare mode
Sbuild, Sbuild,
/// Local build, directly on the context
Local, Local,
} }
/// Build package in 'cwd' to a .deb pub fn build_binary_package(
pub async fn build_binary_package(
arch: Option<&str>, arch: Option<&str>,
series: Option<&str>, series: Option<&str>,
cwd: Option<&Path>, cwd: Option<&Path>,
@@ -43,13 +38,21 @@ pub async fn build_binary_package(
let mode = if let Some(m) = mode { let mode = if let Some(m) = mode {
m m
} else { } else {
// By default, we use local build // For cross-compilation, we use local with an ephemeral context
// created by the cross-compilation handler (see below)
if cross {
BuildMode::Local BuildMode::Local
} else {
// By default, we use sbuild
BuildMode::Sbuild
}
}; };
// Create an ephemeral unshare context for all Local builds // Specific case: native cross-compilation, we don't allow that
let mut guard = if mode == BuildMode::Local { // instead this wraps to an automatic unshare chroot
Some(ephemeral::EphemeralContextGuard::new(series).await?) // using an ephemeral context
let _guard = if cross && mode == BuildMode::Local {
Some(cross::EphemeralContextGuard::new(series)?)
} else { } else {
None None
}; };
@@ -82,95 +85,9 @@ pub async fn build_binary_package(
} }
} }
// Mark build as successful to trigger chroot cleanup
if let Some(ref mut g) = guard {
g.mark_build_successful();
}
Ok(()) Ok(())
} }
/// Find the current package directory by trying both patterns:
/// - package/package
/// - package/package-origversion
pub(crate) fn find_package_directory(
parent_dir: &Path,
package: &str,
version: &str,
) -> Result<PathBuf, Box<dyn Error>> {
let ctx = context::current();
// Try package/package pattern first
let package_dir = parent_dir.join(package).join(package);
if ctx.exists(&package_dir)? && ctx.exists(&package_dir.join("debian"))? {
return Ok(package_dir);
}
// Compute origversion from version: remove everything after first '-', after stripping epoch
let version_without_epoch = version.split_once(':').map(|(_, v)| v).unwrap_or(version);
let origversion = version_without_epoch
.split_once('-')
.map(|(v, _)| v)
.unwrap_or(version);
// Try package/package-origversion pattern
let package_dir = parent_dir
.join(package)
.join(format!("{}-{}", package, origversion));
if ctx.exists(&package_dir)? && ctx.exists(&package_dir.join("debian"))? {
return Ok(package_dir);
}
// Try 'package' only
let package_dir = parent_dir.join(package);
if ctx.exists(&package_dir)? && ctx.exists(&package_dir.join("debian"))? {
return Ok(package_dir);
}
// Try package-origversion only
let package_dir = parent_dir.join(format!("{}-{}", package, origversion));
if ctx.exists(&package_dir)? && ctx.exists(&package_dir.join("debian"))? {
return Ok(package_dir);
}
// List all directories under 'package/' and log them
let package_parent = parent_dir;
if ctx.exists(package_parent)? {
log::debug!(
"Listing all directories under '{}':",
package_parent.display()
);
let entries = ctx.list_files(package_parent)?;
let mut found_dirs = Vec::new();
for entry in entries {
if entry.is_dir() {
if let Some(file_name) = entry.file_name() {
found_dirs.push(file_name.to_string_lossy().into_owned());
}
log::debug!(" - {}", entry.display());
}
}
// If we found directories but none matched our patterns, provide helpful error
if !found_dirs.is_empty() {
return Err(format!(
"Could not find package directory for {} in {}. Found directories: {}",
package,
parent_dir.display(),
found_dirs.join(", ")
)
.into());
}
}
Err(format!(
"Could not find package directory for {} in {}",
package,
parent_dir.display()
)
.into())
}
fn find_dsc_file( fn find_dsc_file(
build_root: &str, build_root: &str,
package: &str, package: &str,
@@ -181,9 +98,7 @@ fn find_dsc_file(
let dsc_name = format!("{}_{}.dsc", package, version_without_epoch); let dsc_name = format!("{}_{}.dsc", package, version_without_epoch);
let dsc_path = PathBuf::from(build_root).join(&dsc_name); let dsc_path = PathBuf::from(build_root).join(&dsc_name);
// Check if the .dsc file exists in current context if !dsc_path.exists() {
let ctx = context::current();
if !ctx.exists(&dsc_path)? {
return Err(format!("Could not find .dsc file at {}", dsc_path.display()).into()); return Err(format!("Could not find .dsc file at {}", dsc_path.display()).into());
} }
Ok(dsc_path) Ok(dsc_path)
@@ -191,46 +106,28 @@ fn find_dsc_file(
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use serial_test::serial; async fn test_build_end_to_end(package: &str, series: &str, arch: Option<&str>, cross: bool) {
async fn test_build_end_to_end(
package: &str,
series: &str,
dist: Option<&str>,
arch: Option<&str>,
cross: bool,
) {
log::info!(
"Starting end-to-end test for package: {} (series: {}, arch: {:?}, cross: {})",
package,
series,
arch,
cross
);
let temp_dir = tempfile::tempdir().unwrap(); let temp_dir = tempfile::tempdir().unwrap();
let cwd = temp_dir.path(); let cwd = temp_dir.path();
log::debug!("Created temporary directory: {}", cwd.display());
log::info!("Pulling package {} from {}...", package, series); crate::pull::pull(
let package_info = package,
crate::package_info::lookup(package, None, Some(series), "", dist, None, None) "",
.await Some(series),
.expect("Cannot lookup package information"); "",
crate::pull::pull(&package_info, Some(cwd), None, true) "",
Some("ubuntu"),
Some(cwd),
None,
)
.await .await
.expect("Cannot pull package"); .expect("Cannot pull package");
log::info!("Successfully pulled package {}", package);
// Change directory to the package directory // Change directory to the package directory
let cwd = crate::deb::find_package_directory(cwd, package, &package_info.stanza.version) let cwd = cwd.join(package).join(package);
.expect("Cannot find package directory");
log::debug!("Package directory: {}", cwd.display());
log::info!("Starting binary package build...");
crate::deb::build_binary_package(arch, Some(series), Some(&cwd), cross, None) crate::deb::build_binary_package(arch, Some(series), Some(&cwd), cross, None)
.await
.expect("Cannot build binary package (deb)"); .expect("Cannot build binary package (deb)");
log::info!("Successfully built binary package");
// Check that the .deb files are present // Check that the .deb files are present
let parent_dir = cwd.parent().expect("Cannot find parent directory"); let parent_dir = cwd.parent().expect("Cannot find parent directory");
@@ -240,64 +137,17 @@ mod tests {
.filter(|entry| entry.path().extension().is_some_and(|ext| ext == "deb")) .filter(|entry| entry.path().extension().is_some_and(|ext| ext == "deb"))
.collect::<Vec<_>>(); .collect::<Vec<_>>();
log::info!("Found {} .deb files after build", deb_files.len());
for file in &deb_files {
log::debug!(" - {}", file.path().display());
}
assert!(!deb_files.is_empty(), "No .deb files found after build"); assert!(!deb_files.is_empty(), "No .deb files found after build");
log::info!(
"End-to-end test completed successfully for package: {}",
package
);
} }
// Tests below will be marked 'serial'
// As builds are using ephemeral contexts, tests running on the same
// process could use the ephemeral context of another thread and
// interfere with each other.
// FIXME: This is not ideal. In the future, we might want to
// either explicitely pass context (instead of shared state) or
// fork for building?
#[tokio::test] #[tokio::test]
#[test_log::test]
#[serial]
async fn test_deb_hello_ubuntu_end_to_end() { async fn test_deb_hello_ubuntu_end_to_end() {
test_build_end_to_end("hello", "noble", None, None, false).await; test_build_end_to_end("hello", "noble", None, false).await;
} }
/// This ensures that we can cross-build packages
#[tokio::test] #[tokio::test]
#[test_log::test]
#[cfg(target_arch = "x86_64")] #[cfg(target_arch = "x86_64")]
#[serial]
async fn test_deb_hello_ubuntu_cross_end_to_end() { async fn test_deb_hello_ubuntu_cross_end_to_end() {
test_build_end_to_end("hello", "noble", None, Some("riscv64"), true).await; test_build_end_to_end("hello", "noble", Some("riscv64"), true).await;
}
/// This ensures that we can build packages from sid, even on older
/// releases. It can sometimes be difficult with mmdebstrap issues
/// for example.
#[tokio::test]
#[test_log::test]
#[serial]
async fn test_deb_hello_debian_sid_end_to_end() {
test_build_end_to_end("hello", "sid", None, None, false).await;
}
/// This is a specific test case for the latest gcc package on Debian
/// The GCC package is complex and hard to build, with specific stages
/// and system-bound scripts. Building it requires specific things that
/// we want to ensure are not broken.
/// NOTE: Ideally, we want to run this in CI, but it takes more than 20h
/// to fully build the gcc-15 package on an amd64 builder, which is too
/// much time.
#[ignore]
#[cfg(target_arch = "x86_64")]
#[tokio::test]
#[test_log::test]
#[serial]
async fn test_deb_gcc_debian_end_to_end() {
test_build_end_to_end("gcc-15", "sid", None, None, false).await;
} }
} }

View File

@@ -2,28 +2,19 @@
/// Call 'sbuild' with the dsc file to build the package with unshare /// Call 'sbuild' with the dsc file to build the package with unshare
use crate::context; use crate::context;
use std::error::Error; use std::error::Error;
use std::path::Path;
pub fn build( pub fn build(
package: &str, package: &str,
version: &str, _version: &str,
arch: &str, arch: &str,
series: &str, series: &str,
build_root: &str, build_root: &str,
cross: bool, cross: bool,
) -> Result<(), Box<dyn Error>> { ) -> Result<(), Box<dyn Error>> {
let ctx = context::current(); let ctx = context::current();
// Find the actual package directory
let package_dir = crate::deb::find_package_directory(Path::new(build_root), package, version)?;
let package_dir_str = package_dir
.to_str()
.ok_or("Invalid package directory path")?;
let mut cmd = ctx.command("sbuild"); let mut cmd = ctx.command("sbuild");
cmd.current_dir(package_dir_str); cmd.current_dir(format!("{}/{}", build_root, package));
cmd.arg("--chroot-mode=unshare"); cmd.arg("--chroot-mode=unshare");
cmd.arg("--no-clean-source");
if cross { if cross {
cmd.arg(format!("--host={}", arch)); cmd.arg(format!("--host={}", arch));

View File

@@ -1,373 +0,0 @@
use chrono::NaiveDate;
use lazy_static::lazy_static;
use serde::Deserialize;
use std::error::Error;
use std::path::Path;
#[derive(Debug, Clone)]
/// Information about a specific distribution series
pub struct SeriesInformation {
/// Distribution series
pub series: String,
/// Codename, i.e. full name of series
pub codename: String,
/// Series version as numbers
pub version: Option<String>,
/// Series creation date
pub created: NaiveDate,
/// Series release date
pub release: Option<NaiveDate>,
/// Series end-of-life date
pub eol: Option<NaiveDate>,
}
#[derive(Debug, Deserialize)]
struct SeriesInfo {
local: String,
network: String,
}
#[derive(Debug, Deserialize)]
struct DistData {
base_url: String,
archive_keyring: String,
pockets: Vec<String>,
series: SeriesInfo,
}
#[derive(Debug, Deserialize)]
struct Data {
dist: std::collections::HashMap<String, DistData>,
}
const DATA_YAML: &str = include_str!("../distro_info.yml");
lazy_static! {
static ref DATA: Data = serde_yaml::from_str(DATA_YAML).unwrap();
}
fn parse_series_csv(content: &str) -> Result<Vec<SeriesInformation>, Box<dyn Error>> {
let mut rdr = csv::ReaderBuilder::new()
.flexible(true)
.from_reader(content.as_bytes());
let headers = rdr.headers()?.clone();
let series_idx = headers
.iter()
.position(|h| h == "series")
.ok_or("Column 'series' not found")?;
let codename_idx = headers
.iter()
.position(|h| h == "codename")
.ok_or("Column 'codename' not found")?;
let version_idx = headers
.iter()
.position(|h| h == "version")
.ok_or("Column 'version' not found")?;
let created_idx = headers
.iter()
.position(|h| h == "created")
.ok_or("Column 'created' not found")?;
let release_idx = headers
.iter()
.position(|h| h == "release")
.ok_or("Column 'release' not found")?;
let eol_idx = headers
.iter()
.position(|h| h == "eol")
.ok_or("Column 'eol' not found")?;
let mut series_info_list = Vec::new();
for result in rdr.records() {
let record = result?;
let series = record.get(series_idx).unwrap().to_string();
let codename = record.get(codename_idx).unwrap().to_string();
let version = record.get(version_idx).map(|s| s.to_string());
let created = record
.get(created_idx)
.map(|date_str| NaiveDate::parse_from_str(date_str, "%Y-%m-%d").unwrap())
.unwrap();
let release = record
.get(release_idx)
.map(|date_str| NaiveDate::parse_from_str(date_str, "%Y-%m-%d").unwrap());
let eol = record
.get(eol_idx)
.map(|date_str| NaiveDate::parse_from_str(date_str, "%Y-%m-%d").unwrap());
series_info_list.push(SeriesInformation {
series,
codename,
version,
created,
release,
eol,
});
}
// Revert to sort by most recent
series_info_list.reverse();
Ok(series_info_list)
}
/// Get time-ordered list of series information for a distribution, development series first
pub async fn get_ordered_series(dist: &str) -> Result<Vec<SeriesInformation>, Box<dyn Error>> {
let series_info = &DATA.dist.get(dist).unwrap().series;
let content = if Path::new(series_info.local.as_str()).exists() {
std::fs::read_to_string(format!("/usr/share/distro-info/{dist}.csv"))?
} else {
reqwest::get(series_info.network.as_str())
.await?
.text()
.await?
};
let series_info_list = parse_series_csv(&content)?;
Ok(series_info_list)
}
/// Get time-ordered list of series names for a distribution, development series first
pub async fn get_ordered_series_name(dist: &str) -> Result<Vec<String>, Box<dyn Error>> {
let series = get_ordered_series(dist).await?;
Ok(series.iter().map(|info| info.series.clone()).collect())
}
/// Get the latest released series for a dist (excluding future releases and special cases like sid)
pub async fn get_latest_released_series(dist: &str) -> Result<String, Box<dyn Error>> {
let series_info_list = get_ordered_series(dist).await?;
let today = chrono::Local::now().date_naive();
let mut released_series = Vec::new();
for series_info in series_info_list {
// Skip 'sid' and series without release dates or with future release dates
if series_info.series != "sid"
&& series_info.series != "experimental"
&& series_info.release.is_some()
&& series_info.release.unwrap() <= today
{
released_series.push(series_info);
}
}
// Sort by release date descending (newest first)
released_series.sort_by(|a, b| b.release.cmp(&a.release));
if let Some(latest) = released_series.first() {
Ok(latest.series.clone())
} else {
Err("No released series found".into())
}
}
/// Obtain the distribution (eg. debian, ubuntu) from a distribution series (eg. noble, bookworm)
pub async fn get_dist_from_series(series: &str) -> Result<String, Box<dyn Error>> {
for dist in DATA.dist.keys() {
if get_ordered_series_name(dist)
.await?
.contains(&series.to_string())
{
return Ok(dist.to_string());
}
}
Err(format!("Unknown series: {}", series).into())
}
/// Get the package pockets available for a given distribution
///
/// Example: get_dist_pockets(ubuntu) => ["proposed", "updates", ""]
pub fn get_dist_pockets(dist: &str) -> Vec<String> {
let mut pockets = DATA.dist.get(dist).unwrap().pockets.clone();
// Explicitely add 'main' pocket, which is just the empty string
pockets.push("".to_string());
pockets
}
/// Get the sources URL for a distribution, series, pocket, and component
pub fn get_sources_url(base_url: &str, series: &str, pocket: &str, component: &str) -> String {
let pocket_full = if pocket.is_empty() {
String::new()
} else {
format!("-{}", pocket)
};
format!("{base_url}/dists/{series}{pocket_full}/{component}/source/Sources.gz")
}
/// Get the archive base URL for a distribution
///
/// Example: ubuntu => http://archive.ubuntu.com/ubuntu
pub fn get_base_url(dist: &str) -> String {
DATA.dist.get(dist).unwrap().base_url.clone()
}
/// Obtain the URL for the archive keyring of a distribution series
pub async fn get_keyring_url(series: &str) -> Result<String, Box<dyn Error>> {
let dist = get_dist_from_series(series).await?;
let dist_data = DATA
.dist
.get(&dist)
.ok_or(format!("Unsupported distribution: {}", dist))?;
// For Debian, we need the series number to form the keyring URL
if dist == "debian" {
// Special case for 'sid' - use the latest released version
if series == "sid" || series == "experimental" {
let latest_released = get_latest_released_series("debian").await?;
let series_num = get_debian_series_number(&latest_released).await?.unwrap();
// Replace {series_num} placeholder with the latest released series number
Ok(dist_data
.archive_keyring
.replace("{series_num}", &series_num))
} else {
let series_num = get_debian_series_number(series).await?.unwrap();
// Replace {series_num} placeholder with the actual series number
Ok(dist_data
.archive_keyring
.replace("{series_num}", &series_num))
}
} else {
// For other distributions like Ubuntu, use the keyring directly
Ok(dist_data.archive_keyring.clone())
}
}
/// Obtain the URL for the 'Release' file of a distribution series
fn get_release_url(base_url: &str, series: &str, pocket: &str) -> String {
let pocket_full = if pocket.is_empty() {
String::new()
} else {
format!("-{}", pocket)
};
format!("{base_url}/dists/{series}{pocket_full}/Release")
}
/// Obtain the components of a distribution series by parsing the 'Release' file
pub async fn get_components(
base_url: &str,
series: &str,
pocket: &str,
) -> Result<Vec<String>, Box<dyn Error>> {
let url = get_release_url(base_url, series, pocket);
log::debug!("Fetching Release file from: {}", url);
let content = reqwest::get(&url).await?.text().await?;
for line in content.lines() {
if line.starts_with("Components:")
&& let Some((_, components)) = line.split_once(':')
{
return Ok(components
.split_whitespace()
.map(|s| s.to_string())
.collect());
}
}
Err("Components not found.".into())
}
/// Map a Debian series name to its version number
pub async fn get_debian_series_number(series: &str) -> Result<Option<String>, Box<dyn Error>> {
let series_info = &DATA.dist.get("debian").unwrap().series;
let content = if Path::new(series_info.local.as_str()).exists() {
std::fs::read_to_string(series_info.local.as_str())?
} else {
reqwest::get(series_info.network.as_str())
.await?
.text()
.await?
};
let mut rdr = csv::ReaderBuilder::new()
.flexible(true)
.from_reader(content.as_bytes());
let headers = rdr.headers()?.clone();
let series_idx = headers
.iter()
.position(|h| h == "series")
.ok_or("Column 'series' not found")?;
let version_idx = headers
.iter()
.position(|h| h == "version")
.ok_or("Column 'version' not found")?;
for result in rdr.records() {
let record = result?;
if let (Some(s), Some(v)) = (record.get(series_idx), record.get(version_idx))
&& s.to_lowercase() == series.to_lowercase()
{
return Ok(Some(v.to_string()));
}
}
Ok(None)
}
#[cfg(test)]
mod tests {
use super::*;
#[tokio::test]
async fn test_get_debian_series() {
let series = get_ordered_series_name("debian").await.unwrap();
assert!(series.contains(&"sid".to_string()));
assert!(series.contains(&"bookworm".to_string()));
}
#[tokio::test]
async fn test_get_ubuntu_series() {
let series = get_ordered_series_name("ubuntu").await.unwrap();
assert!(series.contains(&"noble".to_string()));
assert!(series.contains(&"jammy".to_string()));
}
#[tokio::test]
async fn test_get_dist_from_series() {
assert_eq!(get_dist_from_series("sid").await.unwrap(), "debian");
assert_eq!(get_dist_from_series("noble").await.unwrap(), "ubuntu");
}
#[tokio::test]
async fn test_get_debian_series_number() {
// Test with known Debian series
let bookworm_number = get_debian_series_number("bookworm").await.unwrap();
assert!(bookworm_number.is_some());
assert_eq!(bookworm_number.unwrap(), "12");
let trixie_number = get_debian_series_number("trixie").await.unwrap();
assert!(trixie_number.is_some());
assert_eq!(trixie_number.unwrap(), "13");
// Test with unknown series
let unknown_number = get_debian_series_number("unknown").await.unwrap();
assert!(unknown_number.is_none());
}
#[tokio::test]
async fn test_get_keyring_url_sid() {
// Test that 'sid' uses the latest released version for keyring URL
let sid_keyring = get_keyring_url("sid").await.unwrap();
let latest_released = get_latest_released_series("debian").await.unwrap();
let latest_keyring = get_keyring_url(&latest_released).await.unwrap();
// The keyring URL for 'sid' should be the same as the latest released version
assert_eq!(sid_keyring, latest_keyring);
}
#[tokio::test]
async fn test_get_latest_released_debian_series() {
// Test that we get a valid released series
let latest_released = get_latest_released_series("debian").await.unwrap();
// Should not be 'sid' or 'experimental'
assert_ne!(latest_released, "sid");
assert_ne!(latest_released, "experimental");
// Should have a version number
let version = get_debian_series_number(&latest_released).await.unwrap();
assert!(version.is_some());
}
}

View File

@@ -1,34 +1,10 @@
//! pkh: Debian packaging helper
//!
//! pkh allows working with Debian packages, with multiple actions/submodules
#![deny(missing_docs)]
/// Handle apt data (apt sources)
pub mod apt;
/// Build a Debian source package (into a .dsc)
pub mod build; pub mod build;
/// Parse or edit a Debian changelog of a source package
pub mod changelog; pub mod changelog;
/// Build a Debian package into a binary (.deb) pub mod context;
pub mod deb; pub mod deb;
/// Obtain general information about distribution, series, etc
pub mod distro_info;
/// Obtain information about one or multiple packages
pub mod package_info; pub mod package_info;
/// Download a source package locally
pub mod pull; pub mod pull;
/// Handle context for .deb building: locally, over ssh, in a chroot...
pub mod context;
/// Utility functions
pub(crate) mod utils;
/// Optional callback function (taking 4 arguments)
/// - Name of the current main operation (e.g. pulling package)
/// - Name of the current nested operation (e.g. cloning git repo)
/// - Progress, position, index of current operation (e.g. amount of data downloaded)
/// - Total amount for current operation (e.g. size of the file to download)
pub type ProgressCallback<'a> = Option<&'a dyn Fn(&str, &str, usize, usize)>; pub type ProgressCallback<'a> = Option<&'a dyn Fn(&str, &str, usize, usize)>;
/// Returns the architecture of current CPU, debian-compatible /// Returns the architecture of current CPU, debian-compatible

View File

@@ -7,6 +7,8 @@ use pkh::context::ContextConfig;
extern crate flate2; extern crate flate2;
use pkh::pull::pull;
use pkh::changelog::generate_entry; use pkh::changelog::generate_entry;
use indicatif_log_bridge::LogWrapper; use indicatif_log_bridge::LogWrapper;
@@ -37,7 +39,6 @@ fn main() {
.required(false), .required(false),
) )
.arg(arg!(-v --version <version> "Target package version").required(false)) .arg(arg!(-v --version <version> "Target package version").required(false))
.arg(arg!(--archive "Only use the archive to download package source, not git").required(false))
.arg(arg!(--ppa <ppa> "Download the package from a specific PPA").required(false)) .arg(arg!(--ppa <ppa> "Download the package from a specific PPA").required(false))
.arg(arg!(<package> "Target package")), .arg(arg!(<package> "Target package")),
) )
@@ -48,10 +49,10 @@ fn main() {
.arg(arg!(--backport "This changelog is for a backport entry").required(false)) .arg(arg!(--backport "This changelog is for a backport entry").required(false))
.arg(arg!(-v --version <version> "Target version").required(false)), .arg(arg!(-v --version <version> "Target version").required(false)),
) )
.subcommand(Command::new("build").about("Build the source package (into a .dsc)")) .subcommand(Command::new("build").about("Build the source package"))
.subcommand( .subcommand(
Command::new("deb") Command::new("deb")
.about("Build the source package into binary package (.deb)") .about("Build the binary package")
.arg(arg!(-s --series <series> "Target distribution series").required(false)) .arg(arg!(-s --series <series> "Target distribution series").required(false))
.arg(arg!(-a --arch <arch> "Target architecture").required(false)) .arg(arg!(-a --arch <arch> "Target architecture").required(false))
.arg(arg!(--cross "Cross-compile for target architecture (instead of qemu-binfmt)") .arg(arg!(--cross "Cross-compile for target architecture (instead of qemu-binfmt)")
@@ -93,37 +94,28 @@ fn main() {
let package = sub_matches.get_one::<String>("package").expect("required"); let package = sub_matches.get_one::<String>("package").expect("required");
let series = sub_matches.get_one::<String>("series").map(|s| s.as_str()); let series = sub_matches.get_one::<String>("series").map(|s| s.as_str());
let dist = sub_matches.get_one::<String>("dist").map(|s| s.as_str()); let dist = sub_matches.get_one::<String>("dist").map(|s| s.as_str());
let version = sub_matches.get_one::<String>("version").map(|s| s.as_str()); let version = sub_matches
let ppa = sub_matches.get_one::<String>("ppa").map(|s| s.as_str()); .get_one::<String>("version")
let archive = sub_matches.get_one::<bool>("archive").unwrap_or(&false); .map(|s| s.as_str())
.unwrap_or("");
let (pb, progress_callback) = ui::create_progress_bar(&multi); let ppa = sub_matches
.get_one::<String>("ppa")
// Convert PPA to base URL if provided .map(|s| s.as_str())
let base_url = ppa.and_then(|ppa_str| { .unwrap_or("");
// PPA format: user/ppa_name
let parts: Vec<&str> = ppa_str.split('/').collect();
if parts.len() == 2 {
Some(pkh::package_info::ppa_to_base_url(parts[0], parts[1]))
} else {
None
}
});
// Since pull is async, we need to block on it // Since pull is async, we need to block on it
if let Err(e) = rt.block_on(async { let (pb, progress_callback) = ui::create_progress_bar(&multi);
let package_info = pkh::package_info::lookup(
if let Err(e) = rt.block_on(pull(
package, package,
version, version,
series, series,
"", "",
ppa,
dist, dist,
base_url.as_deref(), None,
Some(&progress_callback), Some(&progress_callback),
) )) {
.await?;
pkh::pull::pull(&package_info, None, Some(&progress_callback), *archive).await
}) {
pb.finish_and_clear(); pb.finish_and_clear();
error!("{}", e); error!("{}", e);
std::process::exit(1); std::process::exit(1);
@@ -166,10 +158,9 @@ fn main() {
_ => None, _ => None,
}; };
if let Err(e) = rt.block_on(async { if let Err(e) =
pkh::deb::build_binary_package(arch, series, Some(cwd.as_path()), *cross, mode) pkh::deb::build_binary_package(arch, series, Some(cwd.as_path()), *cross, mode)
.await {
}) {
error!("{}", e); error!("{}", e);
std::process::exit(1); std::process::exit(1);
} }

View File

@@ -1,22 +1,15 @@
use chrono::NaiveDate;
use flate2::read::GzDecoder; use flate2::read::GzDecoder;
use std::collections::HashMap; use std::collections::HashMap;
use std::error::Error; use std::error::Error;
use std::io::Read; use std::io::Read;
use std::path::Path;
use crate::ProgressCallback; use crate::ProgressCallback;
use log::{debug, warn}; use log::{debug, warn};
/// Convert a PPA specification to a base URL const BASE_URL_UBUNTU: &str = "http://archive.ubuntu.com/ubuntu";
/// const BASE_URL_DEBIAN: &str = "http://deb.debian.org/debian";
/// # Arguments
/// * user: user for the PPA
/// * name: name of the PPA
///
/// # Returns
/// * The base URL for the PPA (e.g., "https://ppa.launchpadcontent.net/user/ppa_name/ubuntu/")
pub fn ppa_to_base_url(user: &str, name: &str) -> String {
format!("https://ppa.launchpadcontent.net/{}/{}/ubuntu", user, name)
}
async fn check_launchpad_repo(package: &str) -> Result<Option<String>, Box<dyn Error>> { async fn check_launchpad_repo(package: &str) -> Result<Option<String>, Box<dyn Error>> {
let url = format!("https://git.launchpad.net/ubuntu/+source/{}", package); let url = format!("https://git.launchpad.net/ubuntu/+source/{}", package);
@@ -32,86 +25,209 @@ async fn check_launchpad_repo(package: &str) -> Result<Option<String>, Box<dyn E
} }
} }
/// A File used in a source package fn parse_series_csv(content: &str) -> Result<Vec<String>, Box<dyn Error>> {
let mut rdr = csv::ReaderBuilder::new()
.flexible(true)
.from_reader(content.as_bytes());
let headers = rdr.headers()?.clone();
let series_idx = headers
.iter()
.position(|h| h == "series")
.ok_or("Column 'series' not found")?;
let created_idx = headers
.iter()
.position(|h| h == "created")
.ok_or("Column 'created' not found")?;
let mut entries = Vec::new();
for result in rdr.records() {
let record = result?;
if let (Some(s), Some(c)) = (record.get(series_idx), record.get(created_idx))
&& let Ok(date) = NaiveDate::parse_from_str(c, "%Y-%m-%d")
{
entries.push((s.to_string(), date));
}
}
// Sort by date descending (newest first)
entries.sort_by(|a, b| b.1.cmp(&a.1));
Ok(entries.into_iter().map(|(s, _)| s).collect())
}
async fn get_ordered_series(dist: &str) -> Result<Vec<String>, Box<dyn Error>> {
let content = if Path::new(format!("/usr/share/distro-info/{dist}.csv").as_str()).exists() {
std::fs::read_to_string(format!("/usr/share/distro-info/{dist}.csv"))?
} else {
reqwest::get(
format!("https://salsa.debian.org/debian/distro-info-data/-/raw/main/{dist}.csv")
.as_str(),
)
.await?
.text()
.await?
};
let mut series = parse_series_csv(&content)?;
// For Debian, ensure 'sid' is first if it's not (it usually doesn't have a date or is very old/new depending on file)
// Actually in the file sid has 1993 date.
// But we want to try 'sid' (unstable) first for Debian.
if dist == "debian" {
series.retain(|s| s != "sid");
series.insert(0, "sid".to_string());
}
Ok(series)
}
// Keep existing functions for compatibility or refactor them to use get_ordered_series
async fn get_series_from_url(url: &str) -> Result<Vec<String>, Box<dyn Error>> {
let content = reqwest::get(url).await?.text().await?;
parse_series_csv(&content)
}
fn get_series_from_file(path: &str) -> Result<Vec<String>, Box<dyn Error>> {
let content = std::fs::read_to_string(path)?;
parse_series_csv(&content)
}
pub async fn get_dist_series(dist: &str) -> Result<Vec<String>, Box<dyn Error>> {
if Path::new(format!("/usr/share/distro-info/{dist}.csv").as_str()).exists() {
get_series_from_file(format!("/usr/share/distro-info/{dist}.csv").as_str())
} else {
get_series_from_url(
format!("https://salsa.debian.org/debian/distro-info-data/-/raw/main/{dist}.csv")
.as_str(),
)
.await
}
}
pub async fn get_dist_from_series(series: &str) -> Result<String, Box<dyn Error>> {
let debian_series = get_dist_series("debian").await?;
if debian_series.contains(&series.to_string()) {
return Ok("debian".to_string());
}
let ubuntu_series = get_dist_series("ubuntu").await?;
if ubuntu_series.contains(&series.to_string()) {
return Ok("ubuntu".to_string());
}
Err(format!("Unknown series: {}", series).into())
}
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct FileEntry { pub struct FileEntry {
/// Name of the file
pub name: String, pub name: String,
/// Size of the file
pub size: u64, pub size: u64,
/// SHA256 hash for the file
pub sha256: String, pub sha256: String,
} }
/// A package 'stanza' as found is 'Sources.gz' files, containing basic information about a source package
#[derive(Debug)] #[derive(Debug)]
pub struct PackageStanza { pub struct PackageStanza {
/// Name of the package
pub package: String, pub package: String,
/// Version number for the package
pub version: String, pub version: String,
/// Directory field in the stanza
pub directory: String, pub directory: String,
/// Source package format (e.g. '3.0 (quilt)')
pub format: String, pub format: String,
/// Vcs-Git field in the stanza
pub vcs_git: Option<String>, pub vcs_git: Option<String>,
/// Vcs-Browser field in the stanza
pub vcs_browser: Option<String>, pub vcs_browser: Option<String>,
/// Files present in the source package
pub files: Vec<FileEntry>, pub files: Vec<FileEntry>,
} }
/// Source package information
#[derive(Debug)] #[derive(Debug)]
pub struct PackageInfo { pub struct PackageInfo {
/// Source 'stanza' for the package, containing basic information
pub stanza: PackageStanza,
/// Distribution for the package
pub dist: String, pub dist: String,
/// Distribution series for the package
pub series: String, pub series: String,
/// Preferred VCS for the source package pub stanza: PackageStanza,
///
/// Should be Launchpad on Ubuntu, and Salsa on Debian
pub preferred_vcs: Option<String>, pub preferred_vcs: Option<String>,
/// URL for the files of the source package
pub archive_url: String, pub archive_url: String,
} }
impl PackageInfo { impl PackageInfo {
/// Returns true if the package is a Debian native package (no orig)
pub fn is_native(&self) -> bool { pub fn is_native(&self) -> bool {
self.stanza.format.contains("(native)") self.stanza.format.contains("(native)")
} }
} }
struct DebianSources { fn get_dist_pockets(dist: &str) -> Vec<&'static str> {
splitted_sources: std::str::Split<'static, &'static str>, match dist {
"ubuntu" => vec!["proposed", "updates", ""],
"debian" => vec!["proposed-updates", "updates", ""],
_ => vec![""],
}
} }
impl DebianSources {
fn new(data: &[u8]) -> Result<DebianSources, Box<dyn Error>> { fn get_sources_url(base_url: &str, series: &str, pocket: &str, component: &str) -> String {
// Gz-decode 'Sources.gz' file into a string, and split it on stanzas let pocket_full = if pocket.is_empty() {
String::new()
} else {
format!("-{}", pocket)
};
format!("{base_url}/dists/{series}{pocket_full}/{component}/source/Sources.gz")
}
fn get_base_url(dist: &str) -> &str {
match dist {
"ubuntu" => BASE_URL_UBUNTU,
"debian" => BASE_URL_DEBIAN,
_ => panic!("Unknown distribution"),
}
}
/*
* Obtain the URL for the 'Release' file of a distribution series
*/
fn get_release_url(base_url: &str, series: &str, pocket: &str) -> String {
let pocket_full = if pocket.is_empty() {
String::new()
} else {
format!("-{}", pocket)
};
format!("{base_url}/dists/{series}{pocket_full}/Release")
}
/*
* Obtain the components of a distribution series by parsing the 'Release' file
*/
async fn get_components(
base_url: &str,
series: &str,
pocket: &str,
) -> Result<Vec<String>, Box<dyn Error>> {
let url = get_release_url(base_url, series, pocket);
debug!("Fetching Release file from: {}", url);
let content = reqwest::get(&url).await?.text().await?;
for line in content.lines() {
if line.starts_with("Components:")
&& let Some((_, components)) = line.split_once(':')
{
return Ok(components
.split_whitespace()
.map(|s| s.to_string())
.collect());
}
}
Err("Components not found.".into())
}
/*
* Parse a 'Sources.gz' debian package file data, to look for a target package and
* return the data for that package stanza
*/
fn parse_sources(
data: &[u8],
target_package: &str,
target_version: Option<&str>,
) -> Result<Option<PackageStanza>, Box<dyn Error>> {
let mut d = GzDecoder::new(data); let mut d = GzDecoder::new(data);
let mut s = String::new(); let mut s = String::new();
d.read_to_string(&mut s)?; d.read_to_string(&mut s)?;
// Convert the string to a static lifetime by leaking it for stanza in s.split("\n\n") {
let static_str = Box::leak(s.into_boxed_str());
let splitted = static_str.split("\n\n");
Ok(DebianSources {
splitted_sources: splitted,
})
}
}
impl Iterator for DebianSources {
type Item = PackageStanza;
fn next(&mut self) -> Option<Self::Item> {
let stanza = self.splitted_sources.next()?;
// Parse stanza into a hashmap of strings, the fields
let mut fields: HashMap<String, String> = HashMap::new(); let mut fields: HashMap<String, String> = HashMap::new();
let mut current_key = String::new(); let mut current_key = String::new();
@@ -132,13 +248,20 @@ impl Iterator for DebianSources {
} }
} }
let pkg = fields.get("Package"); if let Some(pkg) = fields.get("Package")
if pkg.is_none() { && pkg == target_package
// Skip empty stanza {
return self.next(); // Check version if requested
if let Some(ver) = target_version {
if let Some(pkg_ver) = fields.get("Version") {
if pkg_ver != ver {
continue;
}
} else {
continue;
}
} }
// Parse package files
let mut files = Vec::new(); let mut files = Vec::new();
if let Some(checksums) = fields.get("Checksums-Sha256") { if let Some(checksums) = fields.get("Checksums-Sha256") {
for line in checksums.lines() { for line in checksums.lines() {
@@ -153,9 +276,9 @@ impl Iterator for DebianSources {
} }
} }
Some(PackageStanza { return Ok(Some(PackageStanza {
package: fields.get("Package").unwrap().to_string(), package: pkg.clone(),
version: fields.get("Version").unwrap().to_string(), version: fields.get("Version").cloned().unwrap_or_default(),
directory: fields.get("Directory").cloned().unwrap_or_default(), directory: fields.get("Directory").cloned().unwrap_or_default(),
format: fields format: fields
.get("Format") .get("Format")
@@ -164,35 +287,20 @@ impl Iterator for DebianSources {
vcs_git: fields.get("Vcs-Git").cloned(), vcs_git: fields.get("Vcs-Git").cloned(),
vcs_browser: fields.get("Vcs-Browser").cloned(), vcs_browser: fields.get("Vcs-Browser").cloned(),
files, files,
}) }));
} }
}
Ok(None)
} }
/// Parse a 'Sources.gz' debian package file data, to look for a target package and pub async fn get(
/// return the data for that package stanza
fn parse_sources(
data: &[u8],
target_package: &str,
target_version: Option<&str>,
) -> Result<Option<PackageStanza>, Box<dyn Error>> {
let mut sources = DebianSources::new(data)?;
// Find the right package, with the right version if requested
Ok(sources.find(|s| {
s.package == target_package
&& (target_version.is_none() || s.version == target_version.unwrap())
}))
}
/// Get package information from a package, distribution series, and pocket
async fn get(
package_name: &str, package_name: &str,
series: &str, series: &str,
pocket: &str, pocket: &str,
version: Option<&str>, version: Option<&str>,
base_url: Option<&str>,
) -> Result<PackageInfo, Box<dyn Error>> { ) -> Result<PackageInfo, Box<dyn Error>> {
let dist = crate::distro_info::get_dist_from_series(series).await?; let dist = get_dist_from_series(series).await?;
// Handle Ubuntu case: Vcs-Git does not usually point to Launchpad but Salsa // Handle Ubuntu case: Vcs-Git does not usually point to Launchpad but Salsa
// We need to check manually if there is a launchpad repository for the package // We need to check manually if there is a launchpad repository for the package
@@ -204,22 +312,13 @@ async fn get(
preferred_vcs = Some(lp_url); preferred_vcs = Some(lp_url);
} }
// Determine the base URL to use (either provided PPA URL or default archive) let base_url = get_base_url(&dist);
let distro_base_url = crate::distro_info::get_base_url(&dist);
let base_url = if let Some(ppa_url) = base_url {
ppa_url.to_string()
} else {
distro_base_url.clone()
};
// If using a custom base URL (PPA), disable VCS lookup to force archive download let components = get_components(base_url, series, pocket).await?;
let from_ppa = base_url != distro_base_url;
let components = crate::distro_info::get_components(&base_url, series, pocket).await?;
debug!("Found components: {:?}", components); debug!("Found components: {:?}", components);
for component in components { for component in components {
let url = crate::distro_info::get_sources_url(&base_url, series, pocket, &component); let url = get_sources_url(base_url, series, pocket, &component);
debug!("Fetching sources from: {}", url); debug!("Fetching sources from: {}", url);
@@ -250,11 +349,6 @@ async fn get(
preferred_vcs = Some(vcs.clone()); preferred_vcs = Some(vcs.clone());
} }
// If downloading from PPA, make sure we don't use a VCS
if from_ppa {
preferred_vcs = None;
}
let archive_url = format!("{base_url}/{0}", stanza.directory); let archive_url = format!("{base_url}/{0}", stanza.directory);
return Ok(PackageInfo { return Ok(PackageInfo {
dist, dist,
@@ -273,16 +367,14 @@ async fn get(
.into()) .into())
} }
/// Try to find package information in a distribution, trying all series and pockets pub async fn find_package(
async fn find_package(
package_name: &str, package_name: &str,
dist: &str, dist: &str,
pocket: &str, pocket: &str,
version: Option<&str>, version: Option<&str>,
base_url: Option<&str>,
progress: ProgressCallback<'_>, progress: ProgressCallback<'_>,
) -> Result<PackageInfo, Box<dyn Error>> { ) -> Result<PackageInfo, Box<dyn Error>> {
let series_list = crate::distro_info::get_ordered_series_name(dist).await?; let series_list = get_ordered_series(dist).await?;
for (i, series) in series_list.iter().enumerate() { for (i, series) in series_list.iter().enumerate() {
if let Some(cb) = progress { if let Some(cb) = progress {
@@ -290,13 +382,13 @@ async fn find_package(
} }
let pockets = if pocket.is_empty() { let pockets = if pocket.is_empty() {
crate::distro_info::get_dist_pockets(dist) get_dist_pockets(dist)
} else { } else {
vec![pocket.to_string()] vec![pocket]
}; };
for p in pockets { for p in pockets {
match get(package_name, series, &p, version, base_url).await { match get(package_name, series, p, version).await {
Ok(info) => { Ok(info) => {
if i > 0 { if i > 0 {
warn!( warn!(
@@ -321,72 +413,6 @@ async fn find_package(
Err(format!("Package '{}' not found.", package_name).into()) Err(format!("Package '{}' not found.", package_name).into())
} }
/// Lookup package information for a source package
///
/// This function obtains package information either directly from a specific series
/// or by searching across all series in a distribution.
///
/// # Arguments
/// * `package` - The name of the package to look up
/// * `version` - Optional specific version to look for
/// * `series` - Optional distribution series (e.g., "noble", "bookworm")
/// * `pocket` - Pocket to search in (e.g., "updates", "security", or "" for main)
/// * `dist` - Optional distribution name (e.g., "ubuntu", "debian")
/// * `base_url` - Optional base URL for the package archive (e.g., "https://ppa.launchpadcontent.net/user/ppa/ubuntu/")
/// * `progress` - Optional progress callback
pub async fn lookup(
package: &str,
version: Option<&str>,
series: Option<&str>,
pocket: &str,
dist: Option<&str>,
base_url: Option<&str>,
progress: ProgressCallback<'_>,
) -> Result<PackageInfo, Box<dyn Error>> {
// Obtain the package information, either directly in a series or with a search in all series
let package_info = if let Some(s) = series {
if let Some(cb) = progress {
cb(
&format!("Resolving package info for {}...", package),
"",
0,
0,
);
}
// Get the package information from that series and pocket
get(package, s, pocket, version, base_url).await?
} else {
let dist = dist.unwrap_or_else(||
// Use auto-detection to see if current distro is ubuntu, or fallback to debian by default
if std::process::Command::new("lsb_release").arg("-i").arg("-s").output()
.map(|o| String::from_utf8_lossy(&o.stdout).trim().to_lowercase()).unwrap_or_default() == "ubuntu" {
"ubuntu"
} else {
"debian"
}
);
if let Some(cb) = progress {
cb(
&format!(
"Searching for package {} in {}...",
package,
if base_url.is_none() { dist } else { "ppa" }
),
"",
0,
0,
);
}
// Try to find the package in all series from that dist
find_package(package, dist, pocket, version, base_url, progress).await?
};
Ok(package_info)
}
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
@@ -408,6 +434,26 @@ mod tests {
assert!(url.is_none()); assert!(url.is_none());
} }
#[tokio::test]
async fn test_get_debian_series() {
let series = get_dist_series("debian").await.unwrap();
assert!(series.contains(&"sid".to_string()));
assert!(series.contains(&"bookworm".to_string()));
}
#[tokio::test]
async fn test_get_ubuntu_series() {
let series = get_dist_series("ubuntu").await.unwrap();
assert!(series.contains(&"noble".to_string()));
assert!(series.contains(&"jammy".to_string()));
}
#[tokio::test]
async fn test_get_dist_from_series() {
assert_eq!(get_dist_from_series("sid").await.unwrap(), "debian");
assert_eq!(get_dist_from_series("noble").await.unwrap(), "ubuntu");
}
#[test] #[test]
fn test_parse_sources() { fn test_parse_sources() {
use flate2::Compression; use flate2::Compression;
@@ -445,7 +491,7 @@ Version: 1.0
#[tokio::test] #[tokio::test]
async fn test_find_package_fallback() { async fn test_find_package_fallback() {
// python2.7 is in bullseye but not above // python2.7 is in bullseye but not above
let info = find_package("python2.7", "debian", "", None, None, None) let info = find_package("python2.7", "debian", "", None, None)
.await .await
.unwrap(); .unwrap();
assert_eq!(info.stanza.package, "python2.7"); assert_eq!(info.stanza.package, "python2.7");
@@ -455,7 +501,7 @@ Version: 1.0
#[tokio::test] #[tokio::test]
async fn test_find_package_devel() { async fn test_find_package_devel() {
// hello is in sid // hello is in sid
let info = find_package("hello", "debian", "", None, None, None) let info = find_package("hello", "debian", "", None, None)
.await .await
.unwrap(); .unwrap();
assert_eq!(info.stanza.package, "hello"); assert_eq!(info.stanza.package, "hello");

View File

@@ -1,8 +1,8 @@
use std::cmp::min; use std::cmp::min;
use std::error::Error; use std::error::Error;
use std::path::Path; use std::path::Path;
use std::path::PathBuf;
use crate::package_info;
use crate::package_info::PackageInfo; use crate::package_info::PackageInfo;
use std::process::Command; use std::process::Command;
@@ -91,91 +91,23 @@ use futures_util::StreamExt;
use tar::Archive; use tar::Archive;
use xz2::read::XzDecoder; use xz2::read::XzDecoder;
fn copy_dir_all(src: &Path, dst: &Path) -> Result<(), Box<dyn Error>> { fn extract_archive(path: &Path, dest: &Path) -> Result<(), Box<dyn Error>> {
if !dst.exists() { let file = File::open(path)?;
std::fs::create_dir_all(dst)?;
}
for entry in std::fs::read_dir(src)? {
let entry = entry?;
let src_path = entry.path();
let dst_path = dst.join(entry.file_name());
if src_path.is_dir() {
copy_dir_all(&src_path, &dst_path)?;
} else {
std::fs::copy(&src_path, &dst_path)?;
}
}
Ok(())
}
/// Helper function to extract tar archive with progress tracking
fn extract_tar_archive<D, F>(
file_path: &Path,
dest: &Path,
progress: ProgressCallback<'_>,
decoder_factory: F,
) -> Result<Vec<String>, Box<dyn Error>>
where
D: std::io::Read,
F: Fn(File) -> D,
{
let file = File::open(file_path)?;
let decoder = decoder_factory(file);
let mut archive = Archive::new(decoder);
// Get total number of entries for progress tracking
let total_entries = archive.entries()?.count();
let mut current_entry = 0;
// Reset the archive to read entries again
let file = File::open(file_path)?;
let decoder = decoder_factory(file);
let mut archive = Archive::new(decoder);
let mut extracted_files = Vec::new();
for entry in archive.entries()? {
let mut entry = entry?;
let path = entry.path()?.to_path_buf();
let dest_path = dest.join(&path);
// Create parent directories if needed
if let Some(parent) = dest_path.parent() {
std::fs::create_dir_all(parent)?;
}
// Extract the file
entry.unpack(&dest_path)?;
extracted_files.push(dest_path.to_string_lossy().to_string());
current_entry += 1;
// Report progress
if let Some(cb) = progress {
cb("", "Extracting...", current_entry, total_entries);
}
}
Ok(extracted_files)
}
fn extract_archive(
path: &Path,
dest: &Path,
progress: ProgressCallback<'_>,
) -> Result<Vec<String>, Box<dyn Error>> {
let filename = path.file_name().unwrap().to_string_lossy(); let filename = path.file_name().unwrap().to_string_lossy();
if filename.ends_with(".tar.gz") || filename.ends_with(".tgz") { if filename.ends_with(".tar.gz") || filename.ends_with(".tgz") {
extract_tar_archive(path, dest, progress, GzDecoder::new) let tar = GzDecoder::new(file);
let mut archive = Archive::new(tar);
archive.unpack(dest)?;
} else if filename.ends_with(".tar.xz") || filename.ends_with(".txz") { } else if filename.ends_with(".tar.xz") || filename.ends_with(".txz") {
extract_tar_archive(path, dest, progress, XzDecoder::new) let tar = XzDecoder::new(file);
let mut archive = Archive::new(tar);
archive.unpack(dest)?;
} else { } else {
Err(format!("Unsupported archive format: {}", filename).into()) return Err(format!("Unsupported archive format: {}", filename).into());
} }
Ok(())
} }
fn checkout_pristine_tar(package_dir: &Path, filename: &str) -> Result<(), Box<dyn Error>> { fn checkout_pristine_tar(package_dir: &Path, filename: &str) -> Result<(), Box<dyn Error>> {
@@ -365,130 +297,95 @@ async fn fetch_archive_sources(
progress: ProgressCallback<'_>, progress: ProgressCallback<'_>,
) -> Result<(), Box<dyn Error>> { ) -> Result<(), Box<dyn Error>> {
let package_dir = if let Some(path) = cwd { let package_dir = if let Some(path) = cwd {
path path.join(&info.stanza.package)
} else { } else {
&Path::new(".").to_path_buf() Path::new(&info.stanza.package).to_path_buf()
}; };
std::fs::create_dir_all(package_dir)?; std::fs::create_dir_all(&package_dir)?;
for file in &info.stanza.files { for file in &info.stanza.files {
let url = format!("{}/{}", info.archive_url, file.name); let url = format!("{}/{}", info.archive_url, file.name);
download_file_checksum(&url, &file.sha256, package_dir, progress).await?; download_file_checksum(&url, &file.sha256, &package_dir, progress).await?;
}
// Extract all tar archives, merging extracted directories // Extract the debian tarball or diff
if file.name.ends_with(".tar.gz") || file.name.ends_with(".tar.xz") { let debian_file = info
.stanza
.files
.iter()
.find(|f| f.name.contains(".debian.tar.") || f.name.contains(".diff.gz"));
if let Some(file) = debian_file {
let path = package_dir.join(&file.name); let path = package_dir.join(&file.name);
let extract_dir = package_dir.join(&info.stanza.package); let extract_dir = package_dir.join(&info.stanza.package);
let extracted = extract_archive(&path, &extract_dir, progress)?; if (file.name.ends_with(".tar.xz") || file.name.ends_with(".tar.gz"))
&& let Err(e) = extract_archive(&path, &extract_dir)
// Special case: the debian tar does only contain 'debian'
if file.name.contains("debian.tar.") {
continue;
}
// List root directories extracted and use the first one as the source directory
debug!("Root directories extracted:");
let mut source_dir: Option<PathBuf> = None;
for file in &extracted {
let path = Path::new(file);
// Check if this is a directory and is at the archive root level
// (i.e., the path relative to extract_dir has no parent components)
if let Ok(relative_path) = path.strip_prefix(&extract_dir)
&& relative_path.components().count() == 1
&& path.is_dir()
{ {
debug!("- {}", relative_path.file_name().unwrap().to_string_lossy()); return Err(format!("Failed to extract {}: {}", file.name, e).into());
// Use the first directory found as the source
if source_dir.is_none() {
source_dir = Some(path.to_path_buf());
}
}
} }
// Use the extracted directory as the source, assuming there is only one // Remove archive after extraction
if let Some(src_dir) = source_dir { std::fs::remove_file(&path)?;
let target_dir = package_dir.join(&info.stanza.package);
if target_dir.exists() {
// Target exists, we need to merge contents
for sub_entry in std::fs::read_dir(&src_dir)? {
let sub_entry = sub_entry?;
let sub_path = sub_entry.path();
let target_path = target_dir.join(sub_entry.file_name());
if sub_path.is_dir() {
std::fs::create_dir_all(&target_path)?;
// Recursively copy directory contents
copy_dir_all(&sub_path, &target_path)?;
} else {
std::fs::copy(&sub_path, &target_path)?;
}
}
std::fs::remove_dir_all(&src_dir)?;
} else {
std::fs::rename(&src_dir, &target_dir)?;
}
}
}
// Extract and apply .diff.gz if present (old packages)
if file.name.ends_with(".diff.gz") {
let diff_gz_path = package_dir.join(&file.name);
let source_dir = package_dir.join(&info.stanza.package);
// Create the .diff file path by replacing .gz with empty string
let diff_path = diff_gz_path.with_extension("");
// Decompress the .diff.gz file directly to .diff
let input_file = File::open(&diff_gz_path)?;
let mut decoder = GzDecoder::new(input_file);
let mut output_file = File::create(&diff_path)?;
std::io::copy(&mut decoder, &mut output_file)?;
// Use relative path for the diff file (it's in the parent directory)
let relative_diff_path =
format!("../{}", diff_path.file_name().unwrap().to_string_lossy());
// Apply the patch using the patch command with relative path
let output = Command::new("patch")
.current_dir(&source_dir)
.arg("-p1")
.arg("--input")
.arg(&relative_diff_path)
.output()?;
if !output.status.success() {
let stderr = String::from_utf8_lossy(&output.stderr);
return Err(
format!("Failed to apply patch: {}\n{}", diff_path.display(), stderr).into(),
);
}
debug!("Successfully applied patch: {}", diff_path.display());
// Clean up the extracted .diff file
std::fs::remove_file(&diff_path)?;
}
} }
Ok(()) Ok(())
} }
/// Pull a source package locally using pre-retrieved package information
///
/// This function takes a PackageInfo struct and downloads the package using the preferred method
/// (either git or direct archive download), as well as orig tarball, inside 'package' directory.
/// The source will be extracted under 'package/package'.
pub async fn pull( pub async fn pull(
package_info: &PackageInfo, package: &str,
_version: &str,
series: Option<&str>,
pocket: &str,
_ppa: &str,
dist: Option<&str>,
cwd: Option<&Path>, cwd: Option<&Path>,
progress: ProgressCallback<'_>, progress: ProgressCallback<'_>,
force_archive: bool, ) -> Result<PackageInfo, Box<dyn Error>> {
) -> Result<(), Box<dyn Error>> { let version_opt = if _version.is_empty() {
let package = &package_info.stanza.package; None
let series = &package_info.series; } else {
Some(_version)
};
/* Obtain the package information, either directly in a series or with a search in all series */
let package_info = if let Some(s) = series {
if let Some(cb) = progress {
cb(
&format!("Resolving package info for {}...", package),
"",
0,
0,
);
}
// Get the package information from that series and pocket
package_info::get(package, s, pocket, version_opt).await?
} else {
let dist = dist.unwrap_or_else(||
// Use auto-detection to see if current distro is ubuntu, or fallback to debian by default
if std::process::Command::new("lsb_release").arg("-i").arg("-s").output()
.map(|o| String::from_utf8_lossy(&o.stdout).trim().to_lowercase()).unwrap_or_default() == "ubuntu" {
"ubuntu"
} else {
"debian"
}
);
if let Some(cb) = progress {
cb(
&format!("Searching for package {} in {}...", package, dist),
"",
0,
0,
);
}
// Try to find the package in all series from that dist
package_info::find_package(package, dist, pocket, version_opt, progress).await?
};
let package_dir = if let Some(path) = cwd { let package_dir = if let Some(path) = cwd {
path.join(package) path.join(package)
} else { } else {
@@ -496,20 +393,15 @@ pub async fn pull(
}; };
/* Fetch the package: either via git (preferred VCS) or the archive */ /* Fetch the package: either via git (preferred VCS) or the archive */
if let Some(ref url) = package_info.preferred_vcs if let Some(ref url) = package_info.preferred_vcs {
&& !force_archive
{
// We have found a preferred VCS (git repository) for the package, so // We have found a preferred VCS (git repository) for the package, so
// we fetch the package from that repo. // we fetch the package from that repo.
// Depending on target series, we pick target branch; if latest series is specified, // Depending on target series, we pick target branch; if no series is specified,
// we target the development branch, i.e. the default branch // we target the development branch, i.e. the default branch
let branch_name = if crate::distro_info::get_ordered_series_name(package_info.dist.as_str()) let branch_name = if let Some(s) = series {
.await?[0]
!= *series
{
if package_info.dist == "ubuntu" { if package_info.dist == "ubuntu" {
Some(format!("{}/{}", package_info.dist, series)) Some(format!("{}/{}", package_info.dist, s))
} else { } else {
// Debian does not have reliable branch naming... // Debian does not have reliable branch naming...
// For now, we skip that part and clone default // For now, we skip that part and clone default
@@ -549,7 +441,7 @@ pub async fn pull(
if let Some(cb) = progress { if let Some(cb) = progress {
cb("Fetching orig tarball...", "", 0, 0); cb("Fetching orig tarball...", "", 0, 0);
} }
fetch_orig_tarball(package_info, Some(&package_dir), progress).await?; fetch_orig_tarball(&package_info, Some(&package_dir), progress).await?;
} else { } else {
debug!("Native package, skipping orig tarball fetch."); debug!("Native package, skipping orig tarball fetch.");
} }
@@ -557,43 +449,35 @@ pub async fn pull(
if let Some(cb) = progress { if let Some(cb) = progress {
cb("Fetching dsc file...", "", 0, 0); cb("Fetching dsc file...", "", 0, 0);
} }
fetch_dsc_file(package_info, Some(&package_dir), progress).await?; fetch_dsc_file(&package_info, Some(&package_dir), progress).await?;
} else { } else {
// Fallback to archive fetching // Fallback to archive fetching
if let Some(cb) = progress { if let Some(cb) = progress {
cb("Downloading from archive...", "", 0, 0); cb("Downloading from archive...", "", 0, 0);
} }
fetch_archive_sources(package_info, Some(&package_dir), progress).await?; fetch_archive_sources(&package_info, Some(&package_dir), progress).await?;
} }
Ok(()) Ok(package_info)
} }
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
async fn test_pull_package_end_to_end( async fn test_pull_package_end_to_end(package: &str, series: Option<&str>, dist: Option<&str>) {
package: &str,
series: Option<&str>,
dist: Option<&str>,
archive: Option<bool>,
) {
// This test verifies that 'pkh pull' clones the repo and fetches the tarball. // This test verifies that 'pkh pull' clones the repo and fetches the tarball.
// For determinism, we require for tests that either a distro or series is specified, // For determinism, we require for tests that either a distro or series is specified,
// as no distribution would mean fallback to system distro // as no distribution would mean fallback to system distro
assert!(dist.is_some() || series.is_some()); assert!(dist != None || series != None);
// Use a temp directory as working directory // Use a temp directory as working directory
let temp_dir = tempfile::tempdir().unwrap(); let temp_dir = tempfile::tempdir().unwrap();
let cwd = temp_dir.path(); let cwd = temp_dir.path();
// Main 'pull' command: the one we want to test // Main 'pull' command: the one we want to test
let info = crate::package_info::lookup(package, None, series, "", dist, None, None) let info = pull(package, "", series, "", "", dist, Some(cwd), None)
.await
.unwrap();
pull(&info, Some(cwd), None, archive.unwrap_or(false))
.await .await
.unwrap(); .unwrap();
@@ -632,7 +516,7 @@ mod tests {
} }
} }
// Check for orig tarball in package dir (only for non-native packages) // Check for orig tarball in package dir
let mut found_tarball = false; let mut found_tarball = false;
let mut found_dsc = false; let mut found_dsc = false;
for entry in std::fs::read_dir(package_dir).unwrap() { for entry in std::fs::read_dir(package_dir).unwrap() {
@@ -646,48 +530,39 @@ mod tests {
} }
} }
// Only check for orig tarball if the package is not native
if !info.is_native() {
assert!(found_tarball, "Orig tarball not found in package dir"); assert!(found_tarball, "Orig tarball not found in package dir");
}
assert!(found_dsc, "DSC file not found in package dir"); assert!(found_dsc, "DSC file not found in package dir");
} }
#[tokio::test] #[tokio::test]
async fn test_pull_hello_ubuntu_end_to_end() { async fn test_pull_hello_ubuntu_end_to_end() {
test_pull_package_end_to_end("hello", Some("noble"), None, None).await; test_pull_package_end_to_end("hello", Some("noble"), None).await;
} }
#[tokio::test] #[tokio::test]
async fn test_pull_hello_debian_end_to_end() { async fn test_pull_hello_debian_end_to_end() {
test_pull_package_end_to_end("hello", Some("bookworm"), None, None).await; test_pull_package_end_to_end("hello", Some("bookworm"), None).await;
}
/// Specific test for a package using a .diff.gz, instead of .debian and .orig
#[tokio::test]
async fn test_pull_linux_riscv_ubuntu_end_to_end() {
test_pull_package_end_to_end("linux-riscv", Some("noble"), None, Some(true)).await;
} }
#[tokio::test] #[tokio::test]
async fn test_pull_2048_universe_ubuntu_end_to_end() { async fn test_pull_2048_universe_ubuntu_end_to_end() {
test_pull_package_end_to_end("2048", Some("noble"), None, None).await; test_pull_package_end_to_end("2048", Some("noble"), None).await;
} }
#[tokio::test] #[tokio::test]
async fn test_pull_1oom_contrib_debian_end_to_end() { async fn test_pull_1oom_contrib_debian_end_to_end() {
test_pull_package_end_to_end("1oom", Some("trixie"), None, None).await; test_pull_package_end_to_end("1oom", Some("trixie"), None).await;
} }
#[tokio::test] #[tokio::test]
async fn test_pull_agg_svn_fallback_ok() { async fn test_pull_agg_svn_fallback_ok() {
test_pull_package_end_to_end("agg", Some("trixie"), None, None).await; test_pull_package_end_to_end("agg", Some("trixie"), None).await;
} }
#[tokio::test] #[tokio::test]
async fn test_pull_hello_debian_latest_end_to_end() { async fn test_pull_hello_debian_latest_end_to_end() {
test_pull_package_end_to_end("hello", None, Some("debian"), None).await; test_pull_package_end_to_end("hello", None, Some("debian")).await;
} }
#[tokio::test] #[tokio::test]
async fn test_pull_hello_ubuntu_latest_end_to_end() { async fn test_pull_hello_ubuntu_latest_end_to_end() {
test_pull_package_end_to_end("hello", None, Some("ubuntu"), None).await; test_pull_package_end_to_end("hello", None, Some("ubuntu")).await;
} }
} }

View File

@@ -1,32 +0,0 @@
use gpgme::{Context, Protocol};
/// Check if a GPG key matching 'email' exists
/// Returns the key ID if found, None otherwise
pub fn find_signing_key_for_email(
email: &str,
) -> Result<Option<String>, Box<dyn std::error::Error>> {
// Create a new GPG context
let mut ctx = Context::from_protocol(Protocol::OpenPgp)?;
// List all secret keys
let keys = ctx.secret_keys()?;
// Find a key that matches the email and can sign
for key_result in keys {
let key = key_result?;
// Check if the key has signing capability
if key.can_sign() {
// Check user IDs for email match
for user_id in key.user_ids() {
if let Ok(userid_email) = user_id.email()
&& userid_email.eq_ignore_ascii_case(email)
&& let Ok(fingerprint) = key.fingerprint()
{
return Ok(Some(fingerprint.to_string()));
}
}
}
}
Ok(None)
}

View File

@@ -1 +0,0 @@
pub mod gpg;