Compare commits

...

29 Commits

Author SHA1 Message Date
593793373a deb: consider all kinds of package dirs
Some checks failed
CI / build (push) Failing after 12m32s
2026-01-12 11:29:18 +01:00
21bb76153e deb: consider package directories with version
Some checks failed
CI / build (push) Failing after 1m6s
2026-01-12 10:37:09 +01:00
bd10a37c2a pull: fmt
Some checks failed
CI / build (push) Failing after 8m1s
2026-01-11 22:19:46 +01:00
91c812a530 pull: allow to force pull from archive
Some checks failed
CI / build (push) Failing after 1m2s
2026-01-11 20:43:01 +01:00
70e6d8c051 pull: refactor to remove series argument
All checks were successful
CI / build (push) Successful in 9m31s
2026-01-11 12:36:19 +01:00
2f43ed1597 ci: fix clippy
All checks were successful
CI / build (push) Successful in 9m7s
2026-01-11 12:22:00 +01:00
182cc086c0 ci: install clippy
Some checks failed
CI / build (push) Has been cancelled
2026-01-11 12:18:19 +01:00
a2d4f885b5 doc: update README, documentation 2026-01-11 12:16:27 +01:00
650adc28a3 pull: split into package_info::lookup and pull
Some checks failed
CI / build (push) Failing after 1m47s
2026-01-11 12:12:19 +01:00
b724d46f2c deb: fix concurrent testing (by making them serial)
All checks were successful
CI / build (push) Successful in 8m34s
Co-authored-by: Valentin Haudiquet <valentin.haudiquet@canonical.com>
Co-committed-by: Valentin Haudiquet <valentin.haudiquet@canonical.com>
2026-01-11 00:32:03 +00:00
216eee8f33 deb: add tests logs back
Some checks failed
CI / build (push) Has been cancelled
2026-01-11 01:14:24 +01:00
64b51563e7 deb: cross uses new apt sources parser 2026-01-10 00:46:04 +01:00
d2914c63c6 deb/ephemeral: use a lockfile for chroot tarballs
Some checks failed
CI / build (push) Failing after 6m47s
2026-01-09 23:54:10 +01:00
42e6165f78 context/unshare: mount proc after forking
Some checks failed
CI / build (push) Failing after 6m46s
2026-01-09 23:36:27 +01:00
a444a5d8d2 deb: fix bug in find_dsc_file
Some checks failed
CI / build (push) Failing after 5m35s
2026-01-09 23:15:13 +01:00
dd62baa455 context/unshare: mount proc and dev/pts 2026-01-09 23:08:59 +01:00
464e25de24 apt: refactor into multiple modules
Some checks failed
CI / build (push) Failing after 7m56s
2026-01-09 18:02:22 +01:00
1b659ce6f4 build, changelog: fix signing when building source package
Some checks failed
CI / build (push) Failing after 7m8s
2026-01-09 17:21:07 +01:00
e2838bf5aa deb: test refactoring
Some checks failed
CI / build (push) Failing after 7m18s
2026-01-08 19:14:26 +01:00
a6df6070d4 build: fix signature handling 2026-01-08 18:53:19 +01:00
6fb4ccfa88 deb: test end to end build of hello
Some checks failed
CI / build (push) Failing after 7m40s
2026-01-08 18:16:14 +01:00
126a6e0d76 deb: ensure universe is enabled on Ubuntu by default
Some checks failed
CI / build (push) Has been cancelled
Added apt source parser, module apt
2026-01-08 18:15:50 +01:00
f3f78ef0e3 ci: update build-depends
All checks were successful
CI / build (push) Successful in 8m27s
2026-01-06 18:17:01 +01:00
f3417c7a16 deb: default to ephemeral context with local builds
Some checks failed
CI / build (push) Failing after 1m28s
2026-01-06 18:15:05 +01:00
1c9f6cccd2 build: only sign if a gpg key able to sign is present
Some checks failed
CI / build (push) Failing after 1m50s
2026-01-06 18:07:34 +01:00
b3365afe5b docs: added documentation, enforced documentation
All checks were successful
CI / build (push) Successful in 7m21s
2026-01-01 18:37:40 +01:00
5e1b0988fd package_info: refactor sources parsing even more
All checks were successful
CI / build (push) Successful in 7m46s
2026-01-01 15:20:59 +01:00
a567506831 package_info: refactor sources parsing with iterator
All checks were successful
CI / build (push) Successful in 7m12s
2025-12-31 19:23:49 +01:00
1538e9ee19 deb: cross-compilation, ephemeral contexts, local builds
All checks were successful
CI / build (push) Successful in 7m18s
Multiple changes:
- New contexts (schroot, unshare)
- Cross-building quirks, with ephemeral contexts and repositories management
- Contexts with parents, global context manager, better lifetime handling
- Local building of binary packages
- Pull: pulling dsc files by default
- Many small bugfixes and changes

Co-authored-by: Valentin Haudiquet <valentin.haudiquet@canonical.com>
Co-committed-by: Valentin Haudiquet <valentin.haudiquet@canonical.com>
2025-12-25 17:10:44 +00:00
27 changed files with 2741 additions and 475 deletions

View File

@@ -2,7 +2,7 @@ name: CI
on:
push:
branches: [ "main" ]
branches: [ "main", "ci-test" ]
pull_request:
branches: [ "main" ]
@@ -12,23 +12,40 @@ env:
jobs:
build:
runs-on: ubuntu-latest
container:
image: ubuntu:24.04
options: --privileged --cap-add SYS_ADMIN --security-opt apparmor:unconfined
steps:
- name: Set up container image
run: |
apt-get update
apt-get install -y nodejs sudo curl wget ca-certificates build-essential
- uses: actions/checkout@v6
- uses: dtolnay/rust-toolchain@stable
with:
components: rustfmt
components: rustfmt, clippy
- name: Check format
run: cargo fmt --check
- name: Install build dependencies
run: |
sudo apt-get update
sudo apt-get install -y pkg-config libssl-dev
sudo apt-get install -y pkg-config libssl-dev libgpg-error-dev libgpgme-dev
- name: Build
run: cargo build
env:
RUSTFLAGS: -Dwarnings
- name: Lint
run: cargo clippy --all-targets --all-features
env:
RUSTFLAGS: -Dwarnings
- name: Install runtime system dependencies
run: |
sudo apt-get update
sudo apt-get install -y pristine-tar sbuild mmdebstrap dpkg-dev
- name: Run tests
run: cargo test
sudo apt-get install -y git pristine-tar sbuild mmdebstrap util-linux dpkg-dev
- name: Setup subuid/subgid
run: |
usermod --add-subuids 100000-200000 --add-subgids 100000-200000 ${USER:-root}
- name: Run tests with verbose logging (timeout 30min)
env:
RUST_LOG: debug
run: timeout 30m cargo test -- --nocapture

View File

@@ -27,6 +27,9 @@ xz2 = "0.1"
serde_json = "1.0.145"
directories = "6.0.0"
ssh2 = "0.9.5"
gpgme = "0.11"
[dev-dependencies]
test-log = "0.2.19"
serial_test = "3.3.1"
tempfile = "3.10.1"

View File

@@ -24,8 +24,10 @@ Options:
Commands and workflows include:
```
Commands:
pull Get a source package from the archive or git
pull Pull a source package from the archive or git
chlog Auto-generate changelog entry, editing it, committing it afterwards
build Build the source package (into a .dsc)
deb Build the source package into binary package (.deb)
help Print this message or the help of the given subcommand(s)
```
@@ -96,7 +98,7 @@ Missing features:
- [ ] Three build modes:
- [ ] Build locally (discouraged)
- [x] Build using sbuild+unshare, with binary emulation (default)
- [ ] Cross-compilation
- [x] Cross-compilation
- [ ] Async build
- [ ] `pkh status`
- [ ] Show build status

0
src/apt/keyring.rs Normal file
View File

1
src/apt/mod.rs Normal file
View File

@@ -0,0 +1 @@
pub mod sources;

336
src/apt/sources.rs Normal file
View File

@@ -0,0 +1,336 @@
//! APT sources.list management
//! Provides a simple structure for managing APT repository sources
use crate::context;
use std::error::Error;
use std::path::Path;
use std::sync::Arc;
/// Represents a single source entry in sources.list
#[derive(Debug, Clone)]
pub struct SourceEntry {
/// Is the source enabled?
pub enabled: bool,
/// Source components (universe, main, contrib)
pub components: Vec<String>,
/// Source architectures (amd64, riscv64, arm64)
pub architectures: Vec<String>,
/// Source URI
pub uri: String,
/// Source suites (series-pocket)
pub suite: Vec<String>,
}
impl SourceEntry {
/// Parse a string describing a source entry in deb822 format
pub fn from_deb822(data: &str) -> Option<Self> {
let mut current_entry = SourceEntry {
enabled: true,
components: Vec::new(),
architectures: Vec::new(),
uri: String::new(),
suite: Vec::new(),
};
for line in data.lines() {
let line = line.trim();
if line.starts_with('#') {
continue;
}
// Empty line: end of an entry, or beginning
if line.is_empty() {
if !current_entry.uri.is_empty() {
return Some(current_entry);
} else {
continue;
}
}
if let Some((key, value)) = line.split_once(':') {
let key = key.trim();
let value = value.trim();
match key {
"Types" => {
// We only care about deb types
}
"URIs" => current_entry.uri = value.to_string(),
"Suites" => {
current_entry.suite =
value.split_whitespace().map(|s| s.to_string()).collect();
}
"Components" => {
current_entry.components =
value.split_whitespace().map(|s| s.to_string()).collect();
}
"Architectures" => {
current_entry.architectures =
value.split_whitespace().map(|s| s.to_string()).collect();
}
_ => {}
}
}
}
// End of entry, or empty file?
if !current_entry.uri.is_empty() {
Some(current_entry)
} else {
None
}
}
/// Parse a line describing a legacy source entry
pub fn from_legacy(data: &str) -> Option<Self> {
let line = data.lines().next()?.trim();
if line.is_empty() || line.starts_with("#") {
return None;
}
// Parse legacy deb line format: deb [arch=... / signed_by=] uri suite [components...]
// Extract bracket parameters first
let mut architectures = Vec::new();
let mut line_without_brackets = line.to_string();
// Find and process bracket parameters
if let Some(start_bracket) = line.find('[')
&& let Some(end_bracket) = line.find(']')
{
let bracket_content = &line[start_bracket + 1..end_bracket];
// Parse parameters inside brackets
for param in bracket_content.split_whitespace() {
if param.starts_with("arch=") {
let arch_values = param.split('=').nth(1).unwrap_or("");
architectures = arch_values
.split(',')
.map(|s| s.trim().to_string())
.collect();
}
// signed-by parameter is parsed but not stored
}
// Remove the bracket section from the line
line_without_brackets = line[..start_bracket].to_string() + &line[end_bracket + 1..];
}
// Trim and split the remaining line
let line_without_brackets = line_without_brackets.trim();
let parts: Vec<&str> = line_without_brackets.split_whitespace().collect();
// We need at least: deb, uri, suite
if parts.len() < 3 || parts[0] != "deb" {
return None;
}
let uri = parts[1].to_string();
let suite = vec![parts[2].to_string()];
let components: Vec<String> = parts[3..].iter().map(|&s| s.to_string()).collect();
Some(SourceEntry {
enabled: true,
components,
architectures,
uri,
suite,
})
}
/// Convert this source entry to legacy format
pub fn to_legacy(&self) -> String {
let mut result = String::new();
// Legacy entries contain one suite per line
for suite in &self.suite {
// Start with "deb" type
result.push_str("deb");
// Add architectures if present
if !self.architectures.is_empty() {
result.push_str(" [arch=");
result.push_str(&self.architectures.join(","));
result.push(']');
}
// Add URI and suite
result.push(' ');
result.push_str(&self.uri);
result.push(' ');
result.push_str(suite);
// Add components
if !self.components.is_empty() {
result.push(' ');
result.push_str(&self.components.join(" "));
}
result.push('\n');
}
result
}
}
/// Parse a 'source list' string in deb822 format into a SourceEntry vector
pub fn parse_deb822(data: &str) -> Vec<SourceEntry> {
data.split("\n\n")
.flat_map(SourceEntry::from_deb822)
.collect()
}
/// Parse a 'source list' string in legacy format into a SourceEntry vector
pub fn parse_legacy(data: &str) -> Vec<SourceEntry> {
data.split("\n")
.flat_map(SourceEntry::from_legacy)
.collect()
}
/// Load sources from context (or current context by default)
pub fn load(ctx: Option<Arc<crate::context::Context>>) -> Result<Vec<SourceEntry>, Box<dyn Error>> {
let mut sources = Vec::new();
let ctx = ctx.unwrap_or_else(context::current);
// Try DEB822 format first (Ubuntu 24.04+ and Debian Trixie+)
if let Ok(entries) = load_deb822(&ctx, "/etc/apt/sources.list.d/ubuntu.sources") {
sources.extend(entries);
} else if let Ok(entries) = load_deb822(&ctx, "/etc/apt/sources.list.d/debian.sources") {
sources.extend(entries);
}
// Fall back to legacy format
if let Ok(entries) = load_legacy(&ctx, "/etc/apt/sources.list") {
sources.extend(entries);
}
Ok(sources)
}
/// Save sources back to context
pub fn save_legacy(
ctx: Option<Arc<crate::context::Context>>,
sources: Vec<SourceEntry>,
path: &str,
) -> Result<(), Box<dyn Error>> {
let ctx = if let Some(c) = ctx {
c
} else {
context::current()
};
let content = sources
.into_iter()
.map(|s| s.to_legacy())
.collect::<Vec<_>>()
.join("\n");
ctx.write_file(Path::new(path), &content)?;
Ok(())
}
/// Load sources from DEB822 format
fn load_deb822(ctx: &context::Context, path: &str) -> Result<Vec<SourceEntry>, Box<dyn Error>> {
let path = Path::new(path);
if path.exists() {
let content = ctx.read_file(path)?;
return Ok(parse_deb822(&content));
}
Ok(Vec::new())
}
/// Load sources from legacy format
fn load_legacy(ctx: &context::Context, path: &str) -> Result<Vec<SourceEntry>, Box<dyn Error>> {
let path = Path::new(path);
if path.exists() {
let content = ctx.read_file(path)?;
return Ok(content.lines().flat_map(SourceEntry::from_legacy).collect());
}
Ok(Vec::new())
}
#[cfg(test)]
mod tests {
use super::*;
#[tokio::test]
async fn test_parse_deb822() {
let deb822 = "\
Types: deb\n\
URIs: http://fr.archive.ubuntu.com/ubuntu/\n\
Suites: questing questing-updates questing-backports\n\
Components: main restricted universe multiverse\n\
Signed-By: /usr/share/keyrings/ubuntu-archive-keyring.gpg\n\
Architectures: amd64\n\
\n\
Types: deb\n\
URIs: http://security.ubuntu.com/ubuntu/\n\
Suites: questing-security\n\
Components: main restricted universe multiverse\n\
Signed-By: /usr/share/keyrings/ubuntu-archive-keyring.gpg\n\
Architectures: amd64\n\
\n\
Types: deb\n\
URIs: http://ports.ubuntu.com/ubuntu-ports/\n\
Suites: questing questing-updates questing-backports\n\
Components: main restricted universe multiverse\n\
Signed-By: /usr/share/keyrings/ubuntu-archive-keyring.gpg\n\
Architectures: riscv64\n\
";
let sources = parse_deb822(deb822);
assert_eq!(sources.len(), 3);
assert_eq!(sources[0].uri, "http://fr.archive.ubuntu.com/ubuntu/");
assert_eq!(sources[0].architectures, vec!["amd64"]);
assert_eq!(
sources[0].suite,
vec!["questing", "questing-updates", "questing-backports"]
);
assert_eq!(
sources[0].components,
vec!["main", "restricted", "universe", "multiverse"]
);
assert_eq!(sources[1].uri, "http://security.ubuntu.com/ubuntu/");
assert_eq!(sources[1].architectures, vec!["amd64"]);
assert_eq!(sources[1].suite, vec!["questing-security"]);
assert_eq!(
sources[1].components,
vec!["main", "restricted", "universe", "multiverse"]
);
assert_eq!(sources[2].uri, "http://ports.ubuntu.com/ubuntu-ports/");
assert_eq!(sources[2].architectures.len(), 1);
assert_eq!(sources[2].architectures, vec!["riscv64"]);
assert_eq!(
sources[2].suite,
vec!["questing", "questing-updates", "questing-backports"]
);
assert_eq!(
sources[2].components,
vec!["main", "restricted", "universe", "multiverse"]
);
}
#[tokio::test]
async fn test_parse_legacy() {
let legacy = "\
deb [signed-by=\"/usr/share/keyrings/ubuntu-archive-keyring.gpg\" arch=amd64] http://archive.ubuntu.com/ubuntu resolute main universe\n\
deb [arch=amd64,i386 signed-by=\"/usr/share/keyrings/ubuntu-archive-keyring.gpg\"] http://archive.ubuntu.com/ubuntu resolute-updates main\n\
deb [signed-by=\"/usr/share/keyrings/ubuntu-archive-keyring.gpg\"] http://security.ubuntu.com/ubuntu resolute-security main\n\
";
let sources = parse_legacy(legacy);
assert_eq!(sources.len(), 3);
assert_eq!(sources[0].uri, "http://archive.ubuntu.com/ubuntu");
assert_eq!(sources[0].suite, vec!["resolute"]);
assert_eq!(sources[0].components, vec!["main", "universe"]);
assert_eq!(sources[0].architectures, vec!["amd64"]);
assert_eq!(sources[1].uri, "http://archive.ubuntu.com/ubuntu");
assert_eq!(sources[1].suite, vec!["resolute-updates"]);
assert_eq!(sources[1].components, vec!["main"]);
assert_eq!(sources[1].architectures, vec!["amd64", "i386"]);
assert_eq!(sources[2].uri, "http://security.ubuntu.com/ubuntu");
assert_eq!(sources[2].suite, vec!["resolute-security"]);
assert_eq!(sources[2].components, vec!["main"]);
}
}

View File

@@ -2,24 +2,67 @@ use std::error::Error;
use std::path::Path;
use std::process::Command;
use crate::changelog::parse_changelog_footer;
use crate::utils::gpg;
/// Build a Debian source package (to a .dsc)
pub fn build_source_package(cwd: Option<&Path>) -> Result<(), Box<dyn Error>> {
let cwd = cwd.unwrap_or_else(|| Path::new("."));
let status = Command::new("dpkg-buildpackage")
// Parse changelog to get maintainer information from the last modification entry
let changelog_path = cwd.join("debian/changelog");
let (maintainer_name, maintainer_email) = parse_changelog_footer(&changelog_path)?;
// Check if a GPG key matching the maintainer's email exists
let signing_key = match gpg::find_signing_key_for_email(&maintainer_email) {
Ok(key) => key,
Err(e) => {
// If GPG is not available or there's an error, continue without signing
log::warn!("Failed to check for GPG key: {}", e);
None
}
};
// Build command arguments
let mut command = Command::new("dpkg-buildpackage");
command
.current_dir(cwd)
.args(["-S", "-I", "-i", "-nc", "-d"])
.status()?;
.arg("-S")
.arg("-I")
.arg("-i")
.arg("-nc")
.arg("-d");
// If a signing key is found, use it for signing
if let Some(key_id) = &signing_key {
command.arg(format!("--sign-keyid={}", key_id));
log::info!("Using GPG key {} for signing", key_id);
} else {
command.arg("--no-sign");
log::info!(
"No GPG key found for {} ({}), building without signing",
maintainer_name,
maintainer_email
);
}
let status = command.status()?;
if !status.success() {
return Err(format!("dpkg-buildpackage failed with status: {}", status).into());
}
if signing_key.is_some() {
println!("Package built and signed successfully!");
} else {
println!("Package built successfully (unsigned).");
}
Ok(())
}
#[cfg(test)]
mod tests {
use super::*;
// We are not testing the build part, as for now this is just a wrapper
// around dpkg-buildpackage.

View File

@@ -5,9 +5,7 @@ use std::fs::File;
use std::io::{self, BufRead, Read, Write};
use std::path::Path;
/*
* Automatically generate a changelog entry from a commit history and previous changelog
*/
/// Automatically generate a changelog entry from a commit history and previous changelog
pub fn generate_entry(
changelog_file: &str,
cwd: Option<&Path>,
@@ -61,10 +59,8 @@ pub fn generate_entry(
Ok(())
}
/*
* Compute the next (most probable) version number of a package, from old version and
* conditions on changes (is ubuntu upload, is a no change rebuild, is a non-maintainer upload)
*/
/// Compute the next (most probable) version number of a package, from old version and
/// conditions on changes (is ubuntu upload, is a no change rebuild, is a non-maintainer upload)
fn compute_new_version(
old_version: &str,
is_ubuntu: bool,
@@ -87,9 +83,7 @@ fn compute_new_version(
increment_suffix(old_version, "")
}
/*
* Increment a version number by 1, for a given suffix
*/
/// Increment a version number by 1, for a given suffix
fn increment_suffix(version: &str, suffix: &str) -> String {
// If suffix is empty, we just look for trailing digits
// If suffix is not empty, we look for suffix followed by digits
@@ -120,9 +114,8 @@ fn increment_suffix(version: &str, suffix: &str) -> String {
}
}
/*
* Parse a changelog file first entry header, to obtain (package, version, series)
*/
/// Parse a changelog file first entry header
/// Returns (package, version, series) tuple from the last modification entry
pub fn parse_changelog_header(
path: &Path,
) -> Result<(String, String, String), Box<dyn std::error::Error>> {
@@ -143,6 +136,33 @@ pub fn parse_changelog_header(
}
}
/// Parse a changelog file footer to extract maintainer information
/// Returns (name, email) tuple from the last modification entry
pub fn parse_changelog_footer(path: &Path) -> Result<(String, String), Box<dyn std::error::Error>> {
let mut file = File::open(path)?;
let mut content = String::new();
file.read_to_string(&mut content)?;
// Find the last maintainer line (format: -- Name <email> Date)
let re = Regex::new(r"--\s*([^<]+?)\s*<([^>]+)>\s*")?;
if let Some(first_match) = re.captures_iter(&content).next() {
let name = first_match
.get(1)
.map_or("", |m| m.as_str())
.trim()
.to_string();
let email = first_match
.get(2)
.map_or("", |m| m.as_str())
.trim()
.to_string();
Ok((name, email))
} else {
Err(format!("No maintainer information found in {}", path.display()).into())
}
}
/*
* Obtain all commit messages as a list since a tagged version in a git repository
*/

View File

@@ -2,16 +2,38 @@ use serde::{Deserialize, Serialize};
use std::ffi::OsStr;
use std::io;
use std::path::{Path, PathBuf};
use std::sync::Arc;
use std::sync::Mutex;
use super::local::LocalDriver;
use super::schroot::SchrootDriver;
use super::ssh::SshDriver;
use super::unshare::UnshareDriver;
/// A ContextDriver is the interface for the logic happening inside a context
pub trait ContextDriver {
fn ensure_available(&self, src: &Path, dest_root: &str) -> io::Result<PathBuf>;
fn retrieve_path(&self, src: &Path, dest: &Path) -> io::Result<()>;
fn list_files(&self, path: &Path) -> io::Result<Vec<PathBuf>>;
fn run(&self, program: &str, args: &[String]) -> io::Result<std::process::ExitStatus>;
fn run_output(&self, program: &str, args: &[String]) -> io::Result<std::process::Output>;
fn prepare_work_dir(&self) -> io::Result<String>;
fn run(
&self,
program: &str,
args: &[String],
env: &[(String, String)],
cwd: Option<&str>,
) -> io::Result<std::process::ExitStatus>;
fn run_output(
&self,
program: &str,
args: &[String],
env: &[(String, String)],
cwd: Option<&str>,
) -> io::Result<std::process::Output>;
fn create_temp_dir(&self) -> io::Result<String>;
fn copy_path(&self, src: &Path, dest: &Path) -> io::Result<()>;
fn read_file(&self, path: &Path) -> io::Result<String>;
fn write_file(&self, path: &Path, content: &str) -> io::Result<()>;
fn exists(&self, path: &Path) -> io::Result<bool>;
}
/// Represents an execution environment (Local or via SSH).
@@ -20,24 +42,98 @@ pub trait ContextDriver {
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
#[serde(tag = "type")]
#[derive(Default)]
pub enum Context {
pub enum ContextConfig {
/// Local context: actions executed locally
#[serde(rename = "local")]
#[default]
Local,
/// SSH context: actions over an SSH connection
#[serde(rename = "ssh")]
Ssh {
/// Host for the SSH connection
host: String,
/// User for the SSH connection
user: Option<String>,
/// TCP port for the SSH connection
port: Option<u16>,
},
/// Schroot context: using `schroot`
#[serde(rename = "schroot")]
Schroot {
/// Name of the schroot
name: String,
/// Optional parent context for the Schroot context
parent: Option<String>,
},
/// Unshare context: chroot with dropped permissions (using `unshare`)
#[serde(rename = "unshare")]
Unshare {
/// Path to use for chrooting
path: String,
/// Optional parent context for the Unshare context
parent: Option<String>,
},
}
/// A context, allowing to run commands, read and write files, etc
pub struct Context {
/// Configuration for the context
pub config: ContextConfig,
/// Parent context for the context
///
/// For example, you could have a chroot context over an ssh connection
pub parent: Option<Arc<Context>>,
/// ContextDriver for the context, implementing the logic for actions
driver: Mutex<Option<Box<dyn ContextDriver + Send + Sync>>>,
}
impl Context {
pub fn command<S: AsRef<OsStr>>(&self, program: S) -> ContextCommand {
/// Create a context from configuration
pub fn new(config: ContextConfig) -> Self {
let parent = match &config {
ContextConfig::Schroot {
parent: Some(parent_name),
..
}
| ContextConfig::Unshare {
parent: Some(parent_name),
..
} => {
let config_lock = crate::context::manager::MANAGER.get_config();
let parent_config = config_lock
.contexts
.get(parent_name)
.cloned()
.expect("Parent context not found");
Some(Arc::new(Context::new(parent_config)))
}
_ => None,
};
Self {
config,
parent,
driver: Mutex::new(None),
}
}
/// Create a context with an explicit parent context
pub fn with_parent(config: ContextConfig, parent: Arc<Context>) -> Self {
Self {
config,
parent: Some(parent),
driver: Mutex::new(None),
}
}
/// Make a command inside context
pub fn command<S: AsRef<OsStr>>(&self, program: S) -> ContextCommand<'_> {
ContextCommand {
driver: self.driver(),
context: self,
program: program.as_ref().to_string_lossy().to_string(),
args: Vec::new(),
env: Vec::new(),
cwd: None,
}
}
@@ -46,11 +142,15 @@ impl Context {
/// If Local: Returns the absolute path of `src`.
/// If Remote: Copies `src` to `dest_root` on the remote and returns the path to the copied entity.
pub fn ensure_available(&self, src: &Path, dest_root: &str) -> io::Result<PathBuf> {
self.driver().ensure_available(src, dest_root)
self.driver()
.as_ref()
.unwrap()
.ensure_available(src, dest_root)
}
pub fn prepare_work_dir(&self) -> io::Result<String> {
self.driver().prepare_work_dir()
/// Create a temp directory inside context
pub fn create_temp_dir(&self) -> io::Result<String> {
self.driver().as_ref().unwrap().create_temp_dir()
}
/// Retrieve a file or directory from the context to the local filesystem.
@@ -58,22 +158,68 @@ impl Context {
/// The `src` path is on the context, `dest` is on the local machine.
/// If `src` is a directory, it is copied recursively.
pub fn retrieve_path(&self, src: &Path, dest: &Path) -> io::Result<()> {
self.driver().retrieve_path(src, dest)
self.driver().as_ref().unwrap().retrieve_path(src, dest)
}
/// List files in a directory on the context.
pub fn list_files(&self, path: &Path) -> io::Result<Vec<PathBuf>> {
self.driver().list_files(path)
self.driver().as_ref().unwrap().list_files(path)
}
fn driver(&self) -> Box<dyn ContextDriver> {
match self {
Context::Local => Box::new(LocalDriver),
Context::Ssh { host, user, port } => Box::new(SshDriver {
host: host.clone(),
user: user.clone(),
port: *port,
}),
/// Copy a path inside context
pub fn copy_path(&self, src: &Path, dest: &Path) -> io::Result<()> {
self.driver().as_ref().unwrap().copy_path(src, dest)
}
/// Read a file inside context
pub fn read_file(&self, path: &Path) -> io::Result<String> {
self.driver().as_ref().unwrap().read_file(path)
}
/// Write a file inside context
pub fn write_file(&self, path: &Path, content: &str) -> io::Result<()> {
self.driver().as_ref().unwrap().write_file(path, content)
}
/// Check if a file or directory exists inside context
pub fn exists(&self, path: &Path) -> io::Result<bool> {
self.driver().as_ref().unwrap().exists(path)
}
/// Create and obtain a specific driver for the context
pub fn driver(
&self,
) -> std::sync::MutexGuard<'_, Option<Box<dyn ContextDriver + Send + Sync>>> {
let mut driver_lock = self.driver.lock().unwrap();
if driver_lock.is_none() {
let driver: Box<dyn ContextDriver + Send + Sync> = match &self.config {
ContextConfig::Local => Box::new(LocalDriver),
ContextConfig::Ssh { host, user, port } => Box::new(SshDriver {
host: host.clone(),
user: user.clone(),
port: *port,
}),
ContextConfig::Schroot { name, .. } => Box::new(SchrootDriver {
name: name.clone(),
session: std::sync::Mutex::new(None),
parent: self.parent.clone(),
}),
ContextConfig::Unshare { path, .. } => Box::new(UnshareDriver {
path: path.clone(),
parent: self.parent.clone(),
}),
};
*driver_lock = Some(driver);
}
driver_lock
}
/// Clone a context
pub fn clone_raw(&self) -> Self {
Self {
config: self.config.clone(),
parent: self.parent.clone(),
driver: std::sync::Mutex::new(None),
}
}
}
@@ -85,19 +231,22 @@ impl Context {
/// and call `status()` or `output()`.
///
/// It delegates the actual work to a `ContextDriver`.
pub struct ContextCommand {
driver: Box<dyn ContextDriver>,
pub struct ContextCommand<'a> {
context: &'a Context,
program: String,
args: Vec<String>,
env: Vec<(String, String)>,
cwd: Option<String>,
}
impl ContextCommand {
impl<'a> ContextCommand<'a> {
/// Add an argument to current command
pub fn arg<S: AsRef<OsStr>>(&mut self, arg: S) -> &mut Self {
self.args.push(arg.as_ref().to_string_lossy().to_string());
self
}
// Support chaining args
/// Add multiple command arguments
pub fn args<I, S>(&mut self, args: I) -> &mut Self
where
I: IntoIterator<Item = S>,
@@ -109,12 +258,55 @@ impl ContextCommand {
self
}
pub fn status(&mut self) -> io::Result<std::process::ExitStatus> {
self.driver.run(&self.program, &self.args)
/// Set environment variable for command
pub fn env<K, V>(&mut self, key: K, val: V) -> &mut Self
where
K: AsRef<OsStr>,
V: AsRef<OsStr>,
{
self.env.push((
key.as_ref().to_string_lossy().to_string(),
val.as_ref().to_string_lossy().to_string(),
));
self
}
// Capture output
/// Set multiple environment variables for command
pub fn envs<I, K, V>(&mut self, vars: I) -> &mut Self
where
I: IntoIterator<Item = (K, V)>,
K: AsRef<OsStr>,
V: AsRef<OsStr>,
{
for (key, val) in vars {
self.env(key, val);
}
self
}
/// Set current working directory for command
pub fn current_dir<P: AsRef<OsStr>>(&mut self, dir: P) -> &mut Self {
self.cwd = Some(dir.as_ref().to_string_lossy().to_string());
self
}
/// Run command and obtain exit status
pub fn status(&mut self) -> io::Result<std::process::ExitStatus> {
self.context.driver().as_ref().unwrap().run(
&self.program,
&self.args,
&self.env,
self.cwd.as_deref(),
)
}
/// Run command, capturing output
pub fn output(&mut self) -> io::Result<std::process::Output> {
self.driver.run_output(&self.program, &self.args)
self.context.driver().as_ref().unwrap().run_output(
&self.program,
&self.args,
&self.env,
self.cwd.as_deref(),
)
}
}

View File

@@ -4,21 +4,55 @@ use super::api::ContextDriver;
use std::io;
use std::path::{Path, PathBuf};
use std::process::Command;
use std::time::SystemTime;
pub struct LocalDriver;
impl ContextDriver for LocalDriver {
fn ensure_available(&self, src: &Path, _dest_root: &str) -> io::Result<PathBuf> {
src.canonicalize()
fn ensure_available(&self, src: &Path, dest_root: &str) -> io::Result<PathBuf> {
let dest_root_path = Path::new(dest_root);
let dest = dest_root_path.join(src.file_name().unwrap_or(src.as_os_str()));
if src != dest {
// Copy src inside dest_root
self.copy_path(src, &dest)?;
}
dest.canonicalize()
}
fn prepare_work_dir(&self) -> io::Result<String> {
// TODO: Fix that, we should not always use '..' as work directory locally
Ok("..".to_string())
fn create_temp_dir(&self) -> io::Result<String> {
// Generate a unique temporary directory name with random string
let base_timestamp = SystemTime::now()
.duration_since(SystemTime::UNIX_EPOCH)
.unwrap()
.as_secs();
let mut attempt = 0;
loop {
let work_dir_name = if attempt == 0 {
format!("pkh-{}", base_timestamp)
} else {
format!("pkh-{}-{}", base_timestamp, attempt)
};
let temp_dir_path = std::env::temp_dir().join(&work_dir_name);
// Check if directory already exists
if temp_dir_path.exists() {
attempt += 1;
continue;
}
// Create the directory
std::fs::create_dir_all(&temp_dir_path)?;
// Return the path as a string
return Ok(temp_dir_path.to_string_lossy().to_string());
}
}
fn retrieve_path(&self, _src: &Path, _dest: &Path) -> io::Result<()> {
Ok(())
fn retrieve_path(&self, src: &Path, dest: &Path) -> io::Result<()> {
self.copy_path(src, dest)
}
fn list_files(&self, path: &Path) -> io::Result<Vec<PathBuf>> {
@@ -30,11 +64,64 @@ impl ContextDriver for LocalDriver {
Ok(entries)
}
fn run(&self, program: &str, args: &[String]) -> io::Result<std::process::ExitStatus> {
Command::new(program).args(args).status()
fn run(
&self,
program: &str,
args: &[String],
env: &[(String, String)],
cwd: Option<&str>,
) -> io::Result<std::process::ExitStatus> {
let mut cmd = Command::new(program);
cmd.args(args).envs(env.iter().map(|(k, v)| (k, v)));
if let Some(dir) = cwd {
cmd.current_dir(dir);
}
cmd.status()
}
fn run_output(&self, program: &str, args: &[String]) -> io::Result<std::process::Output> {
Command::new(program).args(args).output()
fn run_output(
&self,
program: &str,
args: &[String],
env: &[(String, String)],
cwd: Option<&str>,
) -> io::Result<std::process::Output> {
let mut cmd = Command::new(program);
cmd.args(args).envs(env.iter().map(|(k, v)| (k, v)));
if let Some(dir) = cwd {
cmd.current_dir(dir);
}
cmd.output()
}
fn copy_path(&self, src: &Path, dest: &Path) -> io::Result<()> {
copy_dir_recursive(src, dest)
}
fn read_file(&self, path: &Path) -> io::Result<String> {
std::fs::read_to_string(path)
}
fn write_file(&self, path: &Path, content: &str) -> io::Result<()> {
std::fs::write(path, content)
}
fn exists(&self, path: &Path) -> io::Result<bool> {
Ok(path.exists())
}
}
fn copy_dir_recursive(src: &Path, dest: &Path) -> io::Result<()> {
if src.is_dir() {
std::fs::create_dir_all(dest)?;
for entry in std::fs::read_dir(src)? {
let entry = entry?;
let path = entry.path();
let dest_path = dest.join(entry.file_name());
copy_dir_recursive(&path, &dest_path)?;
}
} else {
std::fs::copy(src, dest)?;
}
Ok(())
}

View File

@@ -4,22 +4,40 @@ use std::collections::HashMap;
use std::fs;
use std::io;
use std::path::PathBuf;
use std::sync::Arc;
use std::sync::RwLock;
use super::api::Context;
use super::api::{Context, ContextConfig};
#[derive(Debug, Serialize, Deserialize, Default, Clone)]
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct Config {
pub current_context: Option<String>,
pub contexts: HashMap<String, Context>,
pub context: String,
pub contexts: HashMap<String, ContextConfig>,
}
pub struct ContextManager {
config_path: PathBuf,
config: Config,
impl Default for Config {
fn default() -> Self {
let mut contexts = HashMap::new();
contexts.insert("local".to_string(), ContextConfig::Local);
Self {
context: "local".to_string(),
contexts,
}
}
}
/// Helper managing contexts
pub struct ContextManager {
context: RwLock<Arc<Context>>,
config_path: PathBuf,
config: RwLock<Config>,
}
pub static MANAGER: std::sync::LazyLock<ContextManager> =
std::sync::LazyLock::new(|| ContextManager::new().expect("Cannot setup context manager"));
impl ContextManager {
pub fn new() -> io::Result<Self> {
fn new() -> io::Result<Self> {
let proj_dirs = ProjectDirs::from("com", "pkh", "pkh").ok_or_else(|| {
io::Error::new(
io::ErrorKind::NotFound,
@@ -31,67 +49,108 @@ impl ContextManager {
let config_path = config_dir.join("contexts.json");
let config = if config_path.exists() {
// Load existing configuration file
let content = fs::read_to_string(&config_path)?;
serde_json::from_str(&content)
.map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))?
} else {
let mut cfg = Config::default();
cfg.contexts.insert("local".to_string(), Context::Local);
cfg.current_context = Some("local".to_string());
cfg
// Create a new configuration file
Config::default()
};
Ok(Self {
context: RwLock::new(Arc::new(Self::make_context(
config.context.as_str(),
&config,
))),
config_path,
config,
config: RwLock::new(config),
})
}
/// Obtain current ContextManager configuration
pub fn get_config(&self) -> std::sync::RwLockReadGuard<'_, Config> {
self.config.read().unwrap()
}
/// Make a ContextManager using a specific configuration path
pub fn with_path(path: PathBuf) -> Self {
let config = Config::default();
Self {
context: RwLock::new(Arc::new(Self::make_context("local", &config))),
config_path: path,
config: Config::default(),
config: RwLock::new(config),
}
}
/// Save current context configuration to disk
pub fn save(&self) -> io::Result<()> {
let content = serde_json::to_string_pretty(&self.config)
let config = self.config.read().unwrap();
let content = serde_json::to_string_pretty(&*config)
.map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))?;
fs::write(&self.config_path, content)?;
Ok(())
}
fn make_context(name: &str, config: &Config) -> Context {
let context_config = config
.contexts
.get(name)
.cloned()
.expect("Context not found in config");
Context::new(context_config)
}
/// List contexts from configuration
pub fn list_contexts(&self) -> Vec<String> {
self.config.contexts.keys().cloned().collect()
self.config
.read()
.unwrap()
.contexts
.keys()
.cloned()
.collect()
}
pub fn get_context(&self, name: &str) -> Option<&Context> {
self.config.contexts.get(name)
}
pub fn add_context(&mut self, name: &str, context: Context) -> io::Result<()> {
self.config.contexts.insert(name.to_string(), context);
/// Add a context to configuration
pub fn add_context(&self, name: &str, config: ContextConfig) -> io::Result<()> {
self.config
.write()
.unwrap()
.contexts
.insert(name.to_string(), config);
self.save()
}
pub fn remove_context(&mut self, name: &str) -> io::Result<()> {
if self.config.contexts.remove(name).is_some() {
if self.config.current_context.as_deref() == Some(name) {
self.config.current_context = Some("local".to_string());
if !self.config.contexts.contains_key("local") {
self.config
.contexts
.insert("local".to_string(), Context::Local);
}
/// Remove context from configuration
pub fn remove_context(&self, name: &str) -> io::Result<()> {
let mut config = self.config.write().unwrap();
if name == "local" {
return Err(io::Error::new(
io::ErrorKind::InvalidInput,
"Cannot remove local context",
));
}
if config.contexts.remove(name).is_some() {
// If we are removing the current context, fallback to local
if name == config.context {
config.context = "local".to_string();
self.set_current_ephemeral(Self::make_context("local", &config));
}
drop(config); // Drop write lock before saving
self.save()?;
}
Ok(())
}
pub fn set_current(&mut self, name: &str) -> io::Result<()> {
if self.config.contexts.contains_key(name) {
self.config.current_context = Some(name.to_string());
/// Set current context from name (modifying configuration)
pub fn set_current(&self, name: &str) -> io::Result<()> {
let mut config = self.config.write().unwrap();
if config.contexts.contains_key(name) {
config.context = name.to_string();
self.set_current_ephemeral(Self::make_context(name, &config));
drop(config); // Drop write lock before saving
self.save()?;
Ok(())
} else {
@@ -102,16 +161,19 @@ impl ContextManager {
}
}
pub fn current(&self) -> Context {
self.config
.current_context
.as_deref()
.and_then(|name| self.config.contexts.get(name))
.cloned()
.unwrap_or(Context::Local)
/// Set current context, without modifying configuration
pub fn set_current_ephemeral(&self, context: Context) {
*self.context.write().unwrap() = context.into();
}
pub fn current_name(&self) -> Option<String> {
self.config.current_context.clone()
/// Obtain current context handle
pub fn current(&self) -> Arc<Context> {
self.context.read().unwrap().clone()
}
/// Obtain current context name
/// Will not work for ephemeral context (obtained from config)
pub fn current_name(&self) -> String {
self.config.read().unwrap().context.clone()
}
}

View File

@@ -1,16 +1,22 @@
mod api;
mod local;
mod manager;
mod schroot;
mod ssh;
mod unshare;
pub use api::{Context, ContextCommand};
pub use api::{Context, ContextCommand, ContextConfig};
pub use manager::ContextManager;
use std::sync::Arc;
pub fn current_context() -> Context {
match ContextManager::new() {
Ok(mgr) => mgr.current(),
Err(_) => Context::Local,
}
/// Obtain global context manager
pub fn manager() -> &'static ContextManager {
&manager::MANAGER
}
/// Obtain current context
pub fn current() -> Arc<Context> {
manager::MANAGER.current()
}
#[cfg(test)]
@@ -25,11 +31,16 @@ mod tests {
let src_file = temp_dir.path().join("src.txt");
fs::write(&src_file, "local").unwrap();
let ctx = Context::Local;
let ctx = Context::new(ContextConfig::Local);
let dest = ctx.ensure_available(&src_file, "/tmp").unwrap();
// Should return canonical path
assert_eq!(dest, src_file.canonicalize().unwrap());
// Should return a path that exists and has the same content
assert!(dest.exists());
let content = fs::read_to_string(&dest).unwrap();
assert_eq!(content, "local");
// The dest should be in the /tmp directory
assert!(dest.starts_with("/tmp"));
}
#[test]
@@ -37,18 +48,17 @@ mod tests {
let temp_file = NamedTempFile::new().unwrap();
let path = temp_file.path().to_path_buf();
let mut mgr = ContextManager::with_path(path.clone());
let mgr = ContextManager::with_path(path.clone());
// Add
let ssh_ctx = Context::Ssh {
let ssh_cfg = ContextConfig::Ssh {
host: "10.0.0.1".into(),
user: Some("admin".into()),
port: Some(2222),
};
mgr.add_context("myserver", ssh_ctx.clone()).unwrap();
mgr.add_context("myserver", ssh_cfg.clone()).unwrap();
assert!(mgr.get_context("myserver").is_some());
assert_eq!(mgr.get_context("myserver").unwrap(), &ssh_ctx);
assert!(mgr.list_contexts().contains(&"myserver".to_string()));
// List
let list = mgr.list_contexts();
@@ -56,13 +66,11 @@ mod tests {
// Set Current
mgr.set_current("myserver").unwrap();
assert_eq!(mgr.current(), ssh_ctx);
assert_eq!(mgr.current_name(), Some("myserver".to_string()));
assert_eq!(mgr.current_name(), "myserver".to_string());
// Remove
mgr.remove_context("myserver").unwrap();
assert!(mgr.get_context("myserver").is_none());
assert_eq!(mgr.current(), Context::Local);
assert!(!mgr.list_contexts().contains(&"myserver".to_string()));
}
#[test]
@@ -71,18 +79,72 @@ mod tests {
let config_path = temp_dir.path().join("contexts.json");
{
let mut mgr = ContextManager::with_path(config_path.clone());
mgr.add_context("persistent", Context::Local).unwrap();
let mgr = ContextManager::with_path(config_path.clone());
mgr.add_context("persistent", ContextConfig::Local).unwrap();
mgr.set_current("persistent").unwrap();
}
let content = fs::read_to_string(&config_path).unwrap();
let loaded_config: super::manager::Config = serde_json::from_str(&content).unwrap();
assert_eq!(
loaded_config.current_context,
Some("persistent".to_string())
);
assert_eq!(loaded_config.context, "persistent".to_string());
assert!(loaded_config.contexts.contains_key("persistent"));
}
#[test]
fn test_context_fallback_on_removal() {
let temp_file = NamedTempFile::new().unwrap();
let path = temp_file.path().to_path_buf();
let mgr = ContextManager::with_path(path);
// 1. Add and set a context
mgr.add_context("temp", ContextConfig::Local).unwrap();
mgr.set_current("temp").unwrap();
assert_eq!(mgr.current_name(), "temp");
// 2. Remove it
mgr.remove_context("temp").unwrap();
// 3. Should have fallen back to local
assert_eq!(mgr.current_name(), "local");
assert!(mgr.list_contexts().contains(&"local".to_string()));
}
#[test]
fn test_context_file_ops() {
let temp_dir = tempfile::tempdir().unwrap();
let ctx = Context::new(ContextConfig::Local);
let file_path = temp_dir.path().join("test.txt");
let content = "hello world";
// 1. Write file
ctx.write_file(&file_path, content).unwrap();
// 2. Read file
let read_content = ctx.read_file(&file_path).unwrap();
assert_eq!(read_content, content);
// 3. Copy path
let dest_path = temp_dir.path().join("test_copy.txt");
ctx.copy_path(&file_path, &dest_path).unwrap();
let copied_content = ctx.read_file(&dest_path).unwrap();
assert_eq!(copied_content, content);
// 4. Recursive copy
let subdir = temp_dir.path().join("subdir");
std::fs::create_dir_all(&subdir).unwrap();
let subfile = subdir.join("subfile.txt");
ctx.write_file(&subfile, "subcontent").unwrap();
let subdir_copy = temp_dir.path().join("subdir_copy");
ctx.copy_path(&subdir, &subdir_copy).unwrap();
assert!(subdir_copy.exists());
assert!(subdir_copy.join("subfile.txt").exists());
assert_eq!(
ctx.read_file(&subdir_copy.join("subfile.txt")).unwrap(),
"subcontent"
);
}
}

275
src/context/schroot.rs Normal file
View File

@@ -0,0 +1,275 @@
/// Schroot context: execute commands in a schroot session
/// Not tested, will need more work!
use super::api::ContextDriver;
use std::io;
use std::path::{Path, PathBuf};
use std::sync::Arc;
pub struct SchrootDriver {
pub name: String,
pub session: std::sync::Mutex<Option<String>>,
pub parent: Option<Arc<super::api::Context>>,
}
use super::api::{Context, ContextConfig};
impl SchrootDriver {
fn parent(&self) -> Arc<Context> {
self.parent
.clone()
.unwrap_or_else(|| Arc::new(Context::new(ContextConfig::Local)))
}
fn ensure_session(&self) -> io::Result<String> {
let mut session_lock = self.session.lock().unwrap();
if let Some(id) = session_lock.as_ref() {
return Ok(id.clone());
}
// Create new session
let output = self
.parent()
.command("schroot")
.arg("-b")
.arg("-c")
.arg(&self.name)
.output()?;
if !output.status.success() {
return Err(io::Error::other(format!(
"Failed to create schroot session: {}",
String::from_utf8_lossy(&output.stderr)
)));
}
let session_id = String::from_utf8(output.stdout)
.map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))?
.trim()
.to_string();
*session_lock = Some(session_id.clone());
Ok(session_id)
}
fn get_session_location(&self, session_id: &str) -> io::Result<String> {
let output = self
.parent()
.command("schroot")
.arg("--location")
.arg("-c")
.arg(session_id)
.output()?;
if !output.status.success() {
return Err(io::Error::other(format!(
"Failed to get schroot location: {}",
String::from_utf8_lossy(&output.stderr)
)));
}
Ok(String::from_utf8(output.stdout)
.map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))?
.trim()
.to_string())
}
}
impl ContextDriver for SchrootDriver {
fn ensure_available(&self, src: &Path, _dest_root: &str) -> io::Result<PathBuf> {
src.canonicalize()
}
fn retrieve_path(&self, src: &Path, dest: &Path) -> io::Result<()> {
let session_id = self.ensure_session()?;
let location = self.get_session_location(&session_id)?;
let path_in_chroot = src.strip_prefix("/").unwrap_or(src);
let host_src = Path::new(&location).join(path_in_chroot);
self.parent().retrieve_path(&host_src, dest)
}
fn list_files(&self, path: &Path) -> io::Result<Vec<PathBuf>> {
let session_id = self.ensure_session()?;
let location = self.get_session_location(&session_id)?;
let path_in_chroot = path.strip_prefix("/").unwrap_or(path);
let host_path = Path::new(&location).join(path_in_chroot);
let files = self.parent().list_files(&host_path)?;
let mut chroot_files = Vec::new();
// TODO: Check if we *need* to strip the prefix.
// If we don't, we can just return `files`.
for file in files {
if let Ok(rel) = file.strip_prefix(&location) {
chroot_files.push(Path::new("/").join(rel));
} else {
chroot_files.push(file);
}
}
Ok(chroot_files)
}
fn run(
&self,
program: &str,
args: &[String],
env: &[(String, String)],
cwd: Option<&str>,
) -> io::Result<std::process::ExitStatus> {
let session_id = self.ensure_session()?;
// Construct the schroot command
// schroot -p -r -c session_id -- program args...
// If cwd is specified, we wrap in sh -c "cd cwd && ..."
let mut command_args = vec![
"-p".to_string(),
"-r".to_string(),
"-c".to_string(),
session_id,
"--".to_string(),
];
let mut actual_program = program.to_string();
let mut actual_args = args.to_vec();
// Simplest: Wrap everything in `sh -c` if CWD or ENV is needed.
if cwd.is_some() || !env.is_empty() {
let mut shell_cmd = String::new();
if let Some(dir) = cwd {
shell_cmd.push_str(&format!("cd {} && ", dir));
}
if !env.is_empty() {
shell_cmd.push_str("env ");
for (k, v) in env {
shell_cmd.push_str(&format!("{}={} ", k, v));
}
}
shell_cmd.push_str(&format!("{} {}", program, args.join(" ")));
actual_program = "sh".to_string();
actual_args = vec!["-c".to_string(), shell_cmd];
}
command_args.push(actual_program);
command_args.extend(actual_args);
self.parent().command("schroot").args(command_args).status()
}
fn run_output(
&self,
program: &str,
args: &[String],
env: &[(String, String)],
cwd: Option<&str>,
) -> io::Result<std::process::Output> {
let session_id = self.ensure_session()?;
let mut command_args = vec![
"-r".to_string(),
"-c".to_string(),
session_id,
"--".to_string(),
];
let mut actual_program = program.to_string();
let mut actual_args = args.to_vec();
if cwd.is_some() || !env.is_empty() {
let mut shell_cmd = String::new();
if let Some(dir) = cwd {
shell_cmd.push_str(&format!("cd {} && ", dir));
}
if !env.is_empty() {
shell_cmd.push_str("env ");
for (k, v) in env {
shell_cmd.push_str(&format!("{}={} ", k, v));
}
}
shell_cmd.push_str(&format!("{} {}", program, args.join(" ")));
actual_program = "sh".to_string();
actual_args = vec!["-c".to_string(), shell_cmd];
}
command_args.push(actual_program);
command_args.extend(actual_args);
self.parent().command("schroot").args(command_args).output()
}
fn create_temp_dir(&self) -> io::Result<String> {
let output = self.run_output("mktemp", &["-d".to_string()], &[], None)?;
if !output.status.success() {
return Err(io::Error::other("schroot mktemp failed"));
}
Ok(String::from_utf8_lossy(&output.stdout).trim().to_string())
}
fn copy_path(&self, src: &Path, dest: &Path) -> io::Result<()> {
let status = self.run(
"cp",
&[
"-a".to_string(),
src.to_string_lossy().to_string(),
dest.to_string_lossy().to_string(),
],
&[],
None,
)?;
if !status.success() {
return Err(io::Error::other("schroot copy failed"));
}
Ok(())
}
fn read_file(&self, path: &Path) -> io::Result<String> {
let output = self.run_output("cat", &[path.to_string_lossy().to_string()], &[], None)?;
if !output.status.success() {
return Err(io::Error::other(format!(
"schroot read failed: {}",
String::from_utf8_lossy(&output.stderr)
)));
}
String::from_utf8(output.stdout).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))
}
fn write_file(&self, path: &Path, content: &str) -> io::Result<()> {
let status = self.run(
"sh",
&[
"-c".to_string(),
format!(
"echo -ne '{}' > '{}'",
content.replace("'", "'\\''"),
path.to_string_lossy()
),
],
&[],
None,
)?;
if !status.success() {
return Err(io::Error::other("schroot write failed"));
}
Ok(())
}
fn exists(&self, path: &Path) -> io::Result<bool> {
let status = self.run(
"test",
&["-e".to_string(), path.to_string_lossy().to_string()],
&[],
None,
)?;
Ok(status.success())
}
}

View File

@@ -2,7 +2,9 @@
/// Context driver: Copies over SFTP with ssh2, executes commands over ssh2 channels
use super::api::ContextDriver;
use log::debug;
use ssh2;
use std::fs;
use std::io::Write;
use std::io::{self, Read};
use std::net::TcpStream;
#[cfg(unix)]
@@ -90,12 +92,30 @@ impl ContextDriver for SshDriver {
Ok(files)
}
fn run(&self, program: &str, args: &[String]) -> io::Result<std::process::ExitStatus> {
fn run(
&self,
program: &str,
args: &[String],
env: &[(String, String)],
cwd: Option<&str>,
) -> io::Result<std::process::ExitStatus> {
let sess = connect_ssh(&self.host, self.user.as_deref(), self.port)?;
let mut channel = sess.channel_session().map_err(io::Error::other)?;
// Construct command line
let mut cmd_line = program.to_string();
// Construct command line with env vars
// TODO: No, use ssh2 channel.set_env
let mut cmd_line = String::new();
for (key, value) in env {
cmd_line.push_str(&format!(
"export {}='{}'; ",
key,
value.replace("'", "'\\''")
));
}
if let Some(dir) = cwd {
cmd_line.push_str(&format!("cd {} && ", dir));
}
cmd_line.push_str(program);
for arg in args {
cmd_line.push(' ');
cmd_line.push_str(arg); // TODO: escape
@@ -119,12 +139,29 @@ impl ContextDriver for SshDriver {
Ok(ExitStatus::from_raw(code))
}
fn run_output(&self, program: &str, args: &[String]) -> io::Result<std::process::Output> {
fn run_output(
&self,
program: &str,
args: &[String],
env: &[(String, String)],
cwd: Option<&str>,
) -> io::Result<std::process::Output> {
let sess = connect_ssh(&self.host, self.user.as_deref(), self.port)?;
let mut channel = sess.channel_session().map_err(io::Error::other)?;
// Construct command line
let mut cmd_line = program.to_string();
// Construct command line with env vars
let mut cmd_line = String::new();
for (key, value) in env {
cmd_line.push_str(&format!(
"export {}='{}'; ",
key,
value.replace("'", "'\\''")
));
}
if let Some(dir) = cwd {
cmd_line.push_str(&format!("cd {} && ", dir));
}
cmd_line.push_str(program);
for arg in args {
cmd_line.push(' ');
cmd_line.push_str(arg); // TODO: escape
@@ -155,7 +192,7 @@ impl ContextDriver for SshDriver {
})
}
fn prepare_work_dir(&self) -> io::Result<String> {
fn create_temp_dir(&self) -> io::Result<String> {
let sess = connect_ssh(&self.host, self.user.as_deref(), self.port)?;
let mut channel = sess.channel_session().map_err(io::Error::other)?;
@@ -173,6 +210,49 @@ impl ContextDriver for SshDriver {
Ok(stdout.trim().to_string())
}
fn copy_path(&self, src: &Path, dest: &Path) -> io::Result<()> {
let sess = connect_ssh(&self.host, self.user.as_deref(), self.port)?;
let mut channel = sess.channel_session().map_err(io::Error::other)?;
// TODO: use sftp
let cmd = format!("cp -a {:?} {:?}", src, dest);
debug!("Executing remote copy: {}", cmd);
channel.exec(&cmd).map_err(io::Error::other)?;
channel.wait_close().map_err(io::Error::other)?;
if channel.exit_status().unwrap_or(-1) != 0 {
return Err(io::Error::other(format!("Remote copy failed: {}", cmd)));
}
Ok(())
}
fn read_file(&self, path: &Path) -> io::Result<String> {
let sess = connect_ssh(&self.host, self.user.as_deref(), self.port)?;
let sftp = sess.sftp().map_err(io::Error::other)?;
let mut remote_file = sftp.open(path).map_err(io::Error::other)?;
let mut content = String::new();
remote_file.read_to_string(&mut content)?;
Ok(content)
}
fn write_file(&self, path: &Path, content: &str) -> io::Result<()> {
let sess = connect_ssh(&self.host, self.user.as_deref(), self.port)?;
let sftp = sess.sftp().map_err(io::Error::other)?;
if let Some(parent) = path.parent() {
let _ = sftp.mkdir(parent, 0o755);
}
let mut remote_file = sftp.create(path).map_err(io::Error::other)?;
remote_file.write_all(content.as_bytes())?;
Ok(())
}
fn exists(&self, path: &Path) -> io::Result<bool> {
let sess = connect_ssh(&self.host, self.user.as_deref(), self.port)?;
let sftp = sess.sftp().map_err(io::Error::other)?;
match sftp.stat(path) {
Ok(_) => Ok(true),
Err(_) => Ok(false),
}
}
}
impl SshDriver {

218
src/context/unshare.rs Normal file
View File

@@ -0,0 +1,218 @@
use super::api::{Context, ContextCommand, ContextDriver};
use log::debug;
use std::io;
use std::path::{Path, PathBuf};
use std::sync::Arc;
pub struct UnshareDriver {
pub path: String,
pub parent: Option<Arc<super::api::Context>>,
}
/// Recursively copy a directory and all its contents
fn copy_dir_recursive(src: &Path, dest: &Path) -> io::Result<()> {
// Create the destination directory
std::fs::create_dir_all(dest)?;
// Iterate through the source directory
for entry in std::fs::read_dir(src)? {
let entry = entry?;
let src_path = entry.path();
let dest_path = dest.join(entry.file_name());
if src_path.is_dir() {
// Recursively copy subdirectories
copy_dir_recursive(&src_path, &dest_path)?;
} else {
// Copy files
std::fs::copy(&src_path, &dest_path)?;
}
}
Ok(())
}
impl ContextDriver for UnshareDriver {
fn ensure_available(&self, src: &Path, dest_root: &str) -> io::Result<PathBuf> {
// Construct the destination path inside the chroot
let dest_dir = Path::new(&self.path).join(dest_root.trim_start_matches('/'));
debug!(
"unshare/ensure_available: copy '{}' to '{}'",
src.display(),
dest_dir.display()
);
// Ensure the destination directory exists
std::fs::create_dir_all(&dest_dir)?;
// Get the filename from the source path
let filename = src
.file_name()
.ok_or_else(|| io::Error::new(io::ErrorKind::InvalidInput, "Invalid source path"))?;
// Construct the full destination path
let dest_path = dest_dir.join(filename);
// Copy the file or directory into the chroot
if src.is_dir() {
copy_dir_recursive(src, &dest_path)?;
debug!(
"Copied directory {} to {}",
src.display(),
dest_path.display()
);
} else {
std::fs::copy(src, &dest_path)?;
debug!("Copied file {} to {}", src.display(), dest_path.display());
}
// Return the path as it appears inside the chroot (without the chroot prefix)
Ok(Path::new(dest_root).join(filename))
}
fn retrieve_path(&self, src: &Path, dest: &Path) -> io::Result<()> {
let host_src = Path::new(&self.path).join(src.to_string_lossy().trim_start_matches('/'));
self.parent().retrieve_path(&host_src, dest)
}
fn list_files(&self, path: &Path) -> io::Result<Vec<PathBuf>> {
let host_path = Path::new(&self.path).join(path.to_string_lossy().trim_start_matches('/'));
let host_entries = self.parent().list_files(&host_path)?;
let mut entries = Vec::new();
let prefix = Path::new(&self.path);
for entry in host_entries {
if let Ok(rel_path) = entry.strip_prefix(prefix) {
entries.push(Path::new("/").join(rel_path));
} else {
entries.push(entry);
}
}
Ok(entries)
}
fn run(
&self,
program: &str,
args: &[String],
env: &[(String, String)],
cwd: Option<&str>,
) -> io::Result<std::process::ExitStatus> {
self.command(program, args, env, cwd).status()
}
fn run_output(
&self,
program: &str,
args: &[String],
env: &[(String, String)],
cwd: Option<&str>,
) -> io::Result<std::process::Output> {
self.command(program, args, env, cwd).output()
}
fn create_temp_dir(&self) -> io::Result<String> {
// Create a temporary directory inside the chroot with unique naming
let base_timestamp = std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.unwrap()
.as_secs();
let mut attempt = 0;
loop {
let work_dir_name = if attempt == 0 {
format!("pkh-build-{}", base_timestamp)
} else {
format!("pkh-build-{}-{}", base_timestamp, attempt)
};
let work_dir_inside_chroot = format!("/tmp/{}", work_dir_name);
let host_path = Path::new(&self.path).join("tmp").join(&work_dir_name);
// Check if directory already exists
if host_path.exists() {
attempt += 1;
continue;
}
// Create the directory on the host filesystem
std::fs::create_dir_all(&host_path)?;
debug!(
"Created work directory: {} (host: {})",
work_dir_inside_chroot,
host_path.display()
);
// Return the path as it appears inside the chroot
return Ok(work_dir_inside_chroot);
}
}
fn copy_path(&self, src: &Path, dest: &Path) -> io::Result<()> {
let host_src = Path::new(&self.path).join(src.to_string_lossy().trim_start_matches('/'));
let host_dest = Path::new(&self.path).join(dest.to_string_lossy().trim_start_matches('/'));
self.parent().copy_path(&host_src, &host_dest)
}
fn read_file(&self, path: &Path) -> io::Result<String> {
let host_path = Path::new(&self.path).join(path.to_string_lossy().trim_start_matches('/'));
self.parent().read_file(&host_path)
}
fn write_file(&self, path: &Path, content: &str) -> io::Result<()> {
let host_path = Path::new(&self.path).join(path.to_string_lossy().trim_start_matches('/'));
self.parent().write_file(&host_path, content)
}
fn exists(&self, path: &Path) -> io::Result<bool> {
let host_path = Path::new(&self.path).join(path.to_string_lossy().trim_start_matches('/'));
self.parent().exists(&host_path)
}
}
impl UnshareDriver {
fn parent(&self) -> &Context {
self.parent
.as_ref()
.expect("UnshareDriver requires a parent context")
}
fn command(
&self,
program: &str,
args: &[String],
env: &[(String, String)],
cwd: Option<&str>,
) -> ContextCommand<'_> {
let mut cmd = self.parent().command("unshare");
cmd.envs(env.iter().cloned());
cmd.arg("--map-user=65536")
.arg("--map-group=65536")
.arg("--pid")
.arg("--ipc")
.arg("--uts")
.arg("--user")
.arg("--cgroup")
.arg("--map-auto")
.arg("-r")
.arg("--mount")
.arg("--fork")
.arg("-R")
.arg(&self.path);
if let Some(dir) = cwd {
cmd.arg("-w").arg(dir);
}
cmd.arg("--").arg("bash").arg("-c").arg(format!(
"mount -t proc proc /proc; mount -t devpts devpts /dev/pts; mount --bind /dev/pts/ptmx /dev/ptmx; {} {}",
program,
args.join(" ")
));
cmd
}
}

View File

@@ -1,157 +0,0 @@
use crate::context::{Context, current_context};
use std::error::Error;
use std::path::{Path, PathBuf};
pub fn build_binary_package(
arch: Option<&str>,
series: Option<&str>,
cwd: Option<&Path>,
) -> Result<(), Box<dyn Error>> {
let cwd = cwd.unwrap_or_else(|| Path::new("."));
// Parse changelog to get package name and version
let changelog_path = cwd.join("debian/changelog");
let (package, version, _series) = crate::changelog::parse_changelog_header(&changelog_path)?;
// Find .dsc file
let dsc_path = find_dsc_file(cwd, &package, &version)?;
println!("Building {} using sbuild...", dsc_path.display());
// Identify all related files from .dsc
let mut files_to_ensure = get_dsc_related_files(&dsc_path)?;
// Ensure dsc itself is included (usually first)
if !files_to_ensure.contains(&dsc_path) {
files_to_ensure.insert(0, dsc_path.clone());
}
// Prepare Environment
let ctx = current_context();
let build_root = ctx.prepare_work_dir()?;
// Ensure availability of all needed files for the build
let remote_dsc_path = upload_package_files(&ctx, &files_to_ensure, &build_root, &dsc_path)?;
println!(
"Building {} on {}...",
remote_dsc_path.display(),
build_root
);
// Run sbuild
run_sbuild(&ctx, &remote_dsc_path, arch, series, &build_root)?;
// Retrieve artifacts
// Always retrieve to the directory containing the .dsc file
let local_output_dir = dsc_path
.parent()
.ok_or("Could not determine parent directory of dsc file")?;
println!("Retrieving artifacts to {}...", local_output_dir.display());
// Only retrieve .deb files
let remote_files = ctx.list_files(Path::new(&build_root))?;
for remote_file in remote_files {
if remote_file.extension().is_some_and(|ext| ext == "deb") {
let file_name = remote_file.file_name().ok_or("Invalid remote filename")?;
let local_dest = local_output_dir.join(file_name);
ctx.retrieve_path(&remote_file, &local_dest)?;
}
}
Ok(())
}
fn find_dsc_file(cwd: &Path, package: &str, version: &str) -> Result<PathBuf, Box<dyn Error>> {
let parent = cwd.parent().ok_or("Cannot find parent directory")?;
let dsc_name = format!("{}_{}.dsc", package, version);
let dsc_path = parent.join(&dsc_name);
if !dsc_path.exists() {
return Err(format!("Could not find .dsc file at {}", dsc_path.display()).into());
}
Ok(dsc_path)
}
fn get_dsc_related_files(dsc_path: &Path) -> Result<Vec<PathBuf>, Box<dyn Error>> {
let content = std::fs::read_to_string(dsc_path)?;
let parent = dsc_path.parent().unwrap(); // dsc_path exists so parent exists
let mut files = Vec::new();
let mut in_files = false;
for line in content.lines() {
if line.starts_with("Files:") {
in_files = true;
continue;
}
if in_files {
if line.starts_with(' ') {
let parts: Vec<&str> = line.split_whitespace().collect();
if parts.len() >= 3 {
let filename = parts[2];
let filepath = parent.join(filename);
if filepath.exists() {
files.push(filepath);
} else {
return Err(
format!("Referenced file {} not found", filepath.display()).into()
);
}
}
} else {
in_files = false;
}
}
}
Ok(files)
}
fn upload_package_files(
ctx: &Context,
files: &[PathBuf],
dest_root: &str,
local_dsc_path: &Path,
) -> Result<PathBuf, Box<dyn Error>> {
let mut remote_dsc_path = PathBuf::new();
for file in files {
let remote_path = ctx.ensure_available(file, dest_root)?;
// Check if this is the dsc file by comparing file names
if let (Some(f_name), Some(dsc_name)) = (file.file_name(), local_dsc_path.file_name())
&& f_name == dsc_name
{
remote_dsc_path = remote_path;
}
}
if remote_dsc_path.as_os_str().is_empty() {
return Err("Failed to determine remote path for .dsc file".into());
}
Ok(remote_dsc_path)
}
fn run_sbuild(
ctx: &Context,
dsc_path: &Path,
arch: Option<&str>,
series: Option<&str>,
output_dir: &str,
) -> Result<(), Box<dyn Error>> {
let mut cmd = ctx.command("sbuild");
cmd.arg("--chroot-mode=unshare");
if let Some(a) = arch {
cmd.arg(format!("--arch={}", a));
}
if let Some(s) = series {
cmd.arg(format!("--dist={}", s));
}
// Add output directory argument
cmd.arg(format!("--build-dir={}", output_dir));
let status = cmd.arg(dsc_path).status()?;
if !status.success() {
return Err(format!("sbuild failed with status: {}", status).into());
}
Ok(())
}

149
src/deb/cross.rs Normal file
View File

@@ -0,0 +1,149 @@
use crate::context;
use std::collections::HashMap;
use std::error::Error;
/// Set environment variables for cross-compilation
pub fn setup_environment(
env: &mut HashMap<String, String>,
arch: &str,
) -> Result<(), Box<dyn Error>> {
let dpkg_architecture = String::from_utf8(
context::current()
.command("dpkg-architecture")
.arg("-a")
.arg(arch)
.output()?
.stdout,
)?;
let env_var_regex = regex::Regex::new(r"(?<key>.*)=(?<value>.*)").unwrap();
for l in dpkg_architecture.lines() {
let capture = env_var_regex.captures(l).unwrap();
let key = capture.name("key").unwrap().as_str().to_string();
let value = capture.name("value").unwrap().as_str().to_string();
env.insert(key.clone(), value.clone());
if key == "DEB_HOST_GNU_TYPE" {
env.insert("CROSS_COMPILE".to_string(), format!("{value}-"));
}
}
env.insert("DEB_BUILD_PROFILES".to_string(), "cross".to_string());
env.insert("DEB_BUILD_OPTIONS".to_string(), "nocheck".to_string());
Ok(())
}
/// Ensure that repositories for target architecture are available
/// This also handles the 'ports.ubuntu.com' vs 'archive.ubuntu.com' on Ubuntu
pub fn ensure_repositories(arch: &str, series: &str) -> Result<(), Box<dyn Error>> {
let ctx = context::current();
let local_arch = crate::get_current_arch();
// Add target ('host') architecture
ctx.command("dpkg")
.arg("--add-architecture")
.arg(arch)
.status()?;
// Check if we are on Ubuntu
let os_release = String::from_utf8(ctx.command("cat").arg("/etc/os-release").output()?.stdout)?;
if !os_release.contains("ID=ubuntu") {
return Ok(());
}
// Load existing sources
let mut sources = crate::apt::sources::load(Some(ctx.clone()))?;
// Ensure all components are enabled for the primary architecture
for source in &mut sources {
if source.uri.contains("archive.ubuntu.com") || source.uri.contains("security.ubuntu.com") {
// Scope to local_arch if not already scoped
if source.architectures.is_empty() {
source.architectures.push(local_arch.clone());
}
// Ensure all components are present
let required_components = ["main", "restricted", "universe", "multiverse"];
for &comp in &required_components {
if !source.components.contains(&comp.to_string()) {
source.components.push(comp.to_string());
}
}
// Ensure all suites (pockets) are enabled, excluding 'proposed'
let required_suites = [
series.to_string(),
format!("{}-updates", series),
format!("{}-backports", series),
format!("{}-security", series),
];
for suite in required_suites {
if !source.suite.contains(&suite) {
source.suite.push(suite);
}
}
}
}
// Check if ports repository already exists for the target architecture
let has_ports = sources
.iter()
.any(|s| s.uri.contains("ports.ubuntu.com") && s.architectures.contains(&arch.to_string()));
if !has_ports {
// Add ports repository for the target architecture
let ports_entry = crate::apt::sources::SourceEntry {
enabled: true,
components: vec![
"main".to_string(),
"restricted".to_string(),
"universe".to_string(),
"multiverse".to_string(),
],
architectures: vec![arch.to_string()],
uri: "http://ports.ubuntu.com/ubuntu-ports".to_string(),
suite: vec![
format!("{series}"),
format!("{series}-updates"),
format!("{series}-backports"),
format!("{series}-security"),
],
};
sources.push(ports_entry);
}
// Save the updated sources
// Try to save in DEB822 format first, fall back to legacy format
let deb822_path = "/etc/apt/sources.list.d/ubuntu.sources";
if ctx
.command("test")
.arg("-f")
.arg(deb822_path)
.status()?
.success()
{
// For DEB822 format, we need to reconstruct the file content
let mut content = String::new();
for source in &sources {
if !source.enabled {
continue;
}
content.push_str("Types: deb\n");
content.push_str(&format!("URIs: {}\n", source.uri));
content.push_str(&format!("Suites: {}\n", source.suite.join(" ")));
content.push_str(&format!("Components: {}\n", source.components.join(" ")));
content.push_str("Signed-By: /usr/share/keyrings/ubuntu-archive-keyring.gpg\n");
content.push_str(&format!(
"Architectures: {}\n",
source.architectures.join(" ")
));
content.push('\n');
}
ctx.write_file(std::path::Path::new(deb822_path), &content)?;
} else {
// Fall back to legacy format
crate::apt::sources::save_legacy(Some(ctx.clone()), sources, "/etc/apt/sources.list")?;
}
Ok(())
}

207
src/deb/ephemeral.rs Normal file
View File

@@ -0,0 +1,207 @@
use crate::context;
use crate::context::{Context, ContextConfig};
use directories::ProjectDirs;
use std::error::Error;
use std::fs;
use std::path::{Path, PathBuf};
use tar::Archive;
use xz2::read::XzDecoder;
/// An ephemeral unshare context guard that creates and manages a temporary chroot environment
/// for building packages with unshare permissions.
pub struct EphemeralContextGuard {
previous_context: String,
chroot_path: PathBuf,
}
impl EphemeralContextGuard {
/// Create a new ephemeral unshare context for the specified series
pub fn new(series: &str) -> Result<Self, Box<dyn Error>> {
let current_context_name = context::manager().current_name();
// Create a temporary directory for the chroot
let chroot_path_str = context::current().create_temp_dir()?;
let chroot_path = PathBuf::from(chroot_path_str);
log::debug!(
"Creating new chroot for {} at {}...",
series,
chroot_path.display()
);
// Download and extract the chroot tarball
Self::download_and_extract_chroot(series, &chroot_path)?;
// Switch to an ephemeral context to build the package in the chroot
context::manager().set_current_ephemeral(Context::new(ContextConfig::Unshare {
path: chroot_path.to_string_lossy().to_string(),
parent: Some(current_context_name.clone()),
}));
Ok(Self {
previous_context: current_context_name,
chroot_path,
})
}
fn download_and_extract_chroot(
series: &str,
chroot_path: &PathBuf,
) -> Result<(), Box<dyn Error>> {
// Get project directories for caching
let proj_dirs = ProjectDirs::from("com", "pkh", "pkh")
.ok_or("Could not determine project directories")?;
let cache_dir = proj_dirs.cache_dir();
fs::create_dir_all(cache_dir)?;
// Create tarball filename based on series
let tarball_filename = format!("{}-buildd.tar.xz", series);
let tarball_path = cache_dir.join(&tarball_filename);
// Check for existing lockfile, and wait for a timeout if it exists
// After timeout, warn the user
let lockfile_path = tarball_path.with_extension("lock");
let ctx = context::current();
// Check if lockfile exists and wait for it to be removed
let mut wait_time = 0;
let timeout = 300; // 5 minutes timeout
let poll_interval = 5; // Check every 5 seconds
while ctx.exists(&lockfile_path)? {
if wait_time >= timeout {
log::warn!(
"Lockfile {} exists and has been present for more than {} seconds. \
Another process may be downloading the chroot tarball. Continuing anyway...",
lockfile_path.display(),
timeout
);
break;
}
log::info!(
"Lockfile {} exists, waiting for download to complete... ({}s/{})",
lockfile_path.display(),
wait_time,
timeout
);
std::thread::sleep(std::time::Duration::from_secs(poll_interval));
wait_time += poll_interval;
}
// Download tarball if it doesn't exist
if !tarball_path.exists() {
log::debug!("Downloading chroot tarball for {}...", series);
Self::download_chroot_tarball(series, &tarball_path)?;
} else {
log::debug!("Using cached chroot tarball for {}", series);
}
// Extract tarball to chroot directory
log::debug!("Extracting chroot tarball to {}...", chroot_path.display());
Self::extract_tarball(&tarball_path, chroot_path)?;
Ok(())
}
fn download_chroot_tarball(series: &str, tarball_path: &Path) -> Result<(), Box<dyn Error>> {
let ctx = context::current();
// Create a lock file to make sure that noone tries to use the file while it's not fully downloaded
let lockfile_path = tarball_path.with_extension("lock");
ctx.command("touch")
.arg(lockfile_path.to_string_lossy().to_string())
.status()?;
// Use mmdebstrap to download the tarball to the cache directory
let status = ctx
.command("mmdebstrap")
.arg("--variant=buildd")
.arg("--mode=unshare")
.arg("--include=mount")
.arg("--format=tar")
.arg(series)
.arg(tarball_path.to_string_lossy().to_string())
.status()?;
if !status.success() {
// Remove file on error
let _ = ctx
.command("rm")
.arg("-f")
.arg(tarball_path.to_string_lossy().to_string())
.status();
let _ = ctx
.command("rm")
.arg("-f")
.arg(lockfile_path.to_string_lossy().to_string())
.status();
return Err(format!("Failed to download chroot tarball for series {}", series).into());
}
// Remove lockfile: tarball is fully downloaded
let _ = ctx
.command("rm")
.arg("-f")
.arg(lockfile_path.to_string_lossy().to_string())
.status();
Ok(())
}
fn extract_tarball(
tarball_path: &PathBuf,
chroot_path: &PathBuf,
) -> Result<(), Box<dyn Error>> {
// Create the chroot directory
fs::create_dir_all(chroot_path)?;
// Open the tarball file
let tarball_file = std::fs::File::open(tarball_path)?;
let xz_decoder = XzDecoder::new(tarball_file);
let mut archive = Archive::new(xz_decoder);
// Extract all files to the chroot directory
archive.unpack(chroot_path)?;
Ok(())
}
}
impl Drop for EphemeralContextGuard {
fn drop(&mut self) {
log::debug!("Cleaning up ephemeral context ({:?})...", &self.chroot_path);
// Reset to normal context
if let Err(e) = context::manager().set_current(&self.previous_context) {
log::error!("Failed to restore context {}: {}", self.previous_context, e);
}
// Remove chroot directory
// We use the restored context to execute the cleanup command
let result = context::current()
.command("sudo")
.arg("rm")
.arg("-rf")
.arg(&self.chroot_path)
.status();
match result {
Ok(status) => {
if !status.success() {
log::error!(
"Failed to remove chroot directory {}",
self.chroot_path.display()
);
}
}
Err(e) => {
log::error!(
"Failed to execute cleanup command for {}: {}",
self.chroot_path.display(),
e
);
}
}
}
}

191
src/deb/local.rs Normal file
View File

@@ -0,0 +1,191 @@
/// Local binary package building
/// Directly calling 'debian/rules' in current context
use crate::context;
use crate::deb::find_dsc_file;
use std::collections::HashMap;
use std::error::Error;
use std::path::Path;
use crate::apt;
use crate::deb::cross;
pub fn build(
package: &str,
version: &str,
arch: &str,
series: &str,
build_root: &str,
cross: bool,
) -> Result<(), Box<dyn Error>> {
// Environment
let mut env = HashMap::<String, String>::new();
env.insert("LANG".to_string(), "C".to_string());
env.insert("DEBIAN_FRONTEND".to_string(), "noninteractive".to_string());
let ctx = context::current();
if cross {
log::debug!("Setting up environment for local cross build...");
cross::setup_environment(&mut env, arch)?;
cross::ensure_repositories(arch, series)?;
}
// UBUNTU: Ensure 'universe' repository is enabled
let mut sources = apt::sources::load(None)?;
let mut modified = false;
for source in &mut sources {
if source.uri.contains("ubuntu") && !source.components.contains(&"universe".to_string()) {
source.components.push("universe".to_string());
modified = true;
}
}
if modified {
apt::sources::save_legacy(None, sources, "/etc/apt/sources.list")?;
}
// Update package lists
log::debug!("Updating package lists for local build...");
let status = ctx
.command("apt-get")
.envs(env.clone())
.arg("update")
.status()?;
if !status.success() {
return Err(
"Could not execute apt-get update. If this is a local build, try executing with sudo."
.into(),
);
}
// Install essential packages
log::debug!("Installing essential packages for local build...");
let mut cmd = ctx.command("apt-get");
cmd.envs(env.clone())
.arg("-y")
.arg("install")
.arg("build-essential")
.arg("dose-builddebcheck")
.arg("fakeroot");
if cross {
cmd.arg(format!("crossbuild-essential-{arch}"));
cmd.arg(format!("libc6-{arch}-cross"));
cmd.arg(format!("libc6-dev-{arch}-cross"));
cmd.arg("dpkg-cross");
cmd.arg(format!("libc6:{arch}"));
cmd.arg(format!("libc6-dev:{arch}"));
}
let status = cmd.status()?;
if !status.success() {
return Err("Could not install essential packages for the build".into());
}
// Find the actual package directory
let package_dir = crate::deb::find_package_directory(Path::new(build_root), package, version)?;
let package_dir_str = package_dir
.to_str()
.ok_or("Invalid package directory path")?;
// Install build dependencies
log::debug!("Installing build dependencies...");
let mut cmd = ctx.command("apt-get");
cmd.current_dir(package_dir_str)
.envs(env.clone())
.arg("-y")
.arg("build-dep");
if cross {
cmd.arg(format!("--host-architecture={arch}"));
}
let status = cmd.arg("./").status()?;
// If build-dep fails, we try to explain the failure using dose-debcheck
if !status.success() {
dose3_explain_dependencies(package, version, arch, build_root, cross)?;
return Err("Could not install build-dependencies for the build".into());
}
// Run the build step
log::debug!("Building (debian/rules build) package...");
let status = ctx
.command("debian/rules")
.current_dir(package_dir_str)
.envs(env.clone())
.arg("build")
.status()?;
if !status.success() {
return Err("Error while building the package".into());
}
// Run the 'binary' step to produce deb
let status = ctx
.command("fakeroot")
.current_dir(package_dir_str)
.envs(env.clone())
.arg("debian/rules")
.arg("binary")
.status()?;
if !status.success() {
return Err(
"Error while building the binary artifacts (.deb) from the built package".into(),
);
}
Ok(())
}
fn dose3_explain_dependencies(
package: &str,
version: &str,
arch: &str,
build_root: &str,
cross: bool,
) -> Result<(), Box<dyn Error>> {
let ctx = context::current();
// Construct the list of Packages files
let mut bg_args = Vec::new();
let mut cmd = ctx.command("apt-get");
cmd.arg("indextargets")
.arg("--format")
.arg("$(FILENAME)")
.arg("Created-By: Packages");
let output = cmd.output()?;
if output.status.success() {
let filenames = String::from_utf8_lossy(&output.stdout);
for file in filenames.lines() {
let file = file.trim();
if !file.is_empty() {
bg_args.push(file.to_string());
}
}
}
// Transform the dsc file into a 'Source' stanza (replacing 'Source' with 'Package')
let dsc_path = find_dsc_file(build_root, package, version)?;
let mut dsc_content = ctx.read_file(&dsc_path)?;
dsc_content = dsc_content.replace("Source", "Package");
ctx.write_file(
Path::new(&format!("{build_root}/dsc-processed")),
&dsc_content,
)?;
// Call dose-builddebcheck
let local_arch = crate::get_current_arch();
let mut cmd = ctx.command("dose-builddebcheck");
cmd.arg("--verbose")
.arg("--failures")
.arg("--explain")
.arg("--summary")
.arg(format!("--deb-native-arch={}", local_arch));
if cross {
cmd.arg(format!("--deb-host-arch={}", arch))
.arg("--deb-profiles=cross")
.arg(format!("--deb-foreign-archs={}", arch));
}
cmd.args(bg_args).arg(format!("{build_root}/dsc-processed"));
cmd.status()?;
Ok(())
}

269
src/deb/mod.rs Normal file
View File

@@ -0,0 +1,269 @@
mod cross;
mod ephemeral;
mod local;
mod sbuild;
use crate::context;
use std::error::Error;
use std::path::{Path, PathBuf};
/// Build mode for the binary build
#[derive(PartialEq)]
pub enum BuildMode {
/// Use `sbuild` for the build, configured in unshare mode
Sbuild,
/// Local build, directly on the context
Local,
}
/// Build package in 'cwd' to a .deb
pub fn build_binary_package(
arch: Option<&str>,
series: Option<&str>,
cwd: Option<&Path>,
cross: bool,
mode: Option<BuildMode>,
) -> Result<(), Box<dyn Error>> {
let cwd = cwd.unwrap_or_else(|| Path::new("."));
// Parse changelog to get package name, version and series
let changelog_path = cwd.join("debian/changelog");
let (package, version, package_series) =
crate::changelog::parse_changelog_header(&changelog_path)?;
let series = if let Some(s) = series {
s
} else {
&package_series
};
let current_arch = crate::get_current_arch();
let arch = arch.unwrap_or(&current_arch);
// Make sure we select a specific mode, either using user-requested
// or by using default for user-supplied parameters
let mode = if let Some(m) = mode {
m
} else {
// By default, we use local build
BuildMode::Local
};
// Create an ephemeral unshare context for all Local builds
let _guard = if mode == BuildMode::Local {
Some(ephemeral::EphemeralContextGuard::new(series)?)
} else {
None
};
// Prepare build directory
let ctx = context::current();
let build_root = ctx.create_temp_dir()?;
// Ensure availability of all needed files for the build
let parent_dir = cwd.parent().ok_or("Cannot find parent directory")?;
ctx.ensure_available(parent_dir, &build_root)?;
let parent_dir_name = parent_dir
.file_name()
.ok_or("Cannot find parent directory name")?;
let build_root = format!("{}/{}", build_root, parent_dir_name.to_str().unwrap());
// Run the build using target build mode
match mode {
BuildMode::Local => local::build(&package, &version, arch, series, &build_root, cross)?,
BuildMode::Sbuild => sbuild::build(&package, &version, arch, series, &build_root, cross)?,
};
// Retrieve produced .deb files
let remote_files = ctx.list_files(Path::new(&build_root))?;
for remote_file in remote_files {
if remote_file.extension().is_some_and(|ext| ext == "deb") {
let file_name = remote_file.file_name().ok_or("Invalid remote filename")?;
let local_dest = parent_dir.join(file_name);
ctx.retrieve_path(&remote_file, &local_dest)?;
}
}
Ok(())
}
/// Find the current package directory by trying both patterns:
/// - package/package
/// - package/package-origversion
pub(crate) fn find_package_directory(
parent_dir: &Path,
package: &str,
version: &str,
) -> Result<PathBuf, Box<dyn Error>> {
let ctx = context::current();
// Try package/package pattern first
let package_dir = parent_dir.join(package).join(package);
if ctx.exists(&package_dir)? {
return Ok(package_dir);
}
// Compute origversion from version: remove everything after first '-', after stripping epoch
let version_without_epoch = version.split_once(':').map(|(_, v)| v).unwrap_or(version);
let origversion = version_without_epoch
.split_once('-')
.map(|(v, _)| v)
.unwrap_or(version);
// Try package/package-origversion pattern
let package_dir = parent_dir
.join(package)
.join(format!("{}-{}", package, origversion));
if ctx.exists(&package_dir)? {
return Ok(package_dir);
}
// Try 'package' only
let package_dir = parent_dir.join(package);
if ctx.exists(&package_dir)? {
return Ok(package_dir);
}
// Try package-origversion only
let package_dir = parent_dir.join(format!("{}-{}", package, origversion));
if ctx.exists(&package_dir)? {
return Ok(package_dir);
}
// List all directories under 'package/' and log them
let package_parent = parent_dir;
if ctx.exists(package_parent)? {
log::debug!(
"Listing all directories under '{}':",
package_parent.display()
);
let entries = ctx.list_files(package_parent)?;
let mut found_dirs = Vec::new();
for entry in entries {
if entry.is_dir() {
if let Some(file_name) = entry.file_name() {
found_dirs.push(file_name.to_string_lossy().into_owned());
}
log::debug!(" - {}", entry.display());
}
}
// If we found directories but none matched our patterns, provide helpful error
if !found_dirs.is_empty() {
return Err(format!(
"Could not find package directory for {} in {}. Found directories: {}",
package,
parent_dir.display(),
found_dirs.join(", ")
)
.into());
}
}
Err(format!(
"Could not find package directory for {} in {}",
package,
parent_dir.display()
)
.into())
}
fn find_dsc_file(
build_root: &str,
package: &str,
version: &str,
) -> Result<PathBuf, Box<dyn Error>> {
// Strip epoch from version (e.g., "1:2.3.4-5" -> "2.3.4-5")
let version_without_epoch = version.split_once(':').map(|(_, v)| v).unwrap_or(version);
let dsc_name = format!("{}_{}.dsc", package, version_without_epoch);
let dsc_path = PathBuf::from(build_root).join(&dsc_name);
// Check if the .dsc file exists in current context
let ctx = context::current();
if !ctx.exists(&dsc_path)? {
return Err(format!("Could not find .dsc file at {}", dsc_path.display()).into());
}
Ok(dsc_path)
}
#[cfg(test)]
mod tests {
use serial_test::serial;
async fn test_build_end_to_end(
package: &str,
series: &str,
dist: Option<&str>,
arch: Option<&str>,
cross: bool,
) {
log::info!(
"Starting end-to-end test for package: {} (series: {}, arch: {:?}, cross: {})",
package,
series,
arch,
cross
);
let temp_dir = tempfile::tempdir().unwrap();
let cwd = temp_dir.path();
log::debug!("Created temporary directory: {}", cwd.display());
log::info!("Pulling package {} from {}...", package, series);
let package_info = crate::package_info::lookup(package, None, Some(series), "", dist, None)
.await
.expect("Cannot lookup package information");
crate::pull::pull(&package_info, Some(cwd), None, true)
.await
.expect("Cannot pull package");
log::info!("Successfully pulled package {}", package);
// Change directory to the package directory
let cwd = crate::deb::find_package_directory(cwd, package, &package_info.stanza.version)
.expect("Cannot find package directory");
log::debug!("Package directory: {}", cwd.display());
log::info!("Starting binary package build...");
crate::deb::build_binary_package(arch, Some(series), Some(&cwd), cross, None)
.expect("Cannot build binary package (deb)");
log::info!("Successfully built binary package");
// Check that the .deb files are present
let parent_dir = cwd.parent().expect("Cannot find parent directory");
let deb_files = std::fs::read_dir(parent_dir)
.expect("Cannot read build directory")
.filter_map(|entry| entry.ok())
.filter(|entry| entry.path().extension().is_some_and(|ext| ext == "deb"))
.collect::<Vec<_>>();
log::info!("Found {} .deb files after build", deb_files.len());
for file in &deb_files {
log::debug!(" - {}", file.path().display());
}
assert!(!deb_files.is_empty(), "No .deb files found after build");
log::info!(
"End-to-end test completed successfully for package: {}",
package
);
}
// Tests below will be marked 'serial'
// As builds are using ephemeral contexts, tests running on the same
// process could use the ephemeral context of another thread and
// interfere with each other.
// FIXME: This is not ideal. In the future, we might want to
// either explicitely pass context (instead of shared state) or
// fork for building?
#[tokio::test]
#[test_log::test]
#[serial]
async fn test_deb_hello_ubuntu_end_to_end() {
test_build_end_to_end("hello", "noble", None, None, false).await;
}
#[tokio::test]
#[test_log::test]
#[cfg(target_arch = "x86_64")]
#[serial]
async fn test_deb_hello_ubuntu_cross_end_to_end() {
test_build_end_to_end("hello", "noble", None, Some("riscv64"), true).await;
}
}

43
src/deb/sbuild.rs Normal file
View File

@@ -0,0 +1,43 @@
/// Sbuild binary package building
/// Call 'sbuild' with the dsc file to build the package with unshare
use crate::context;
use std::error::Error;
use std::path::Path;
pub fn build(
package: &str,
version: &str,
arch: &str,
series: &str,
build_root: &str,
cross: bool,
) -> Result<(), Box<dyn Error>> {
let ctx = context::current();
// Find the actual package directory
let package_dir = crate::deb::find_package_directory(Path::new(build_root), package, version)?;
let package_dir_str = package_dir
.to_str()
.ok_or("Invalid package directory path")?;
let mut cmd = ctx.command("sbuild");
cmd.current_dir(package_dir_str);
cmd.arg("--chroot-mode=unshare");
cmd.arg("--no-clean-source");
if cross {
cmd.arg(format!("--host={}", arch));
} else {
cmd.arg(format!("--arch={}", arch));
}
cmd.arg(format!("--dist={}", series));
// Add output directory argument
cmd.arg(format!("--build-dir={}", build_root));
let status = cmd.status()?;
if !status.success() {
return Err(format!("sbuild failed with status: {}", status).into());
}
Ok(())
}

View File

@@ -1,8 +1,42 @@
//! pkh: Debian packaging helper
//!
//! pkh allows working with Debian packages, with multiple actions/submodules
#![deny(missing_docs)]
/// Handle apt data (apt sources)
pub mod apt;
/// Build a Debian source package (into a .dsc)
pub mod build;
/// Parse or edit a Debian changelog of a source package
pub mod changelog;
pub mod context;
/// Build a Debian package into a binary (.deb)
pub mod deb;
/// Obtain information about one or multiple packages
pub mod package_info;
/// Download a source package locally
pub mod pull;
/// Handle context for .deb building: locally, over ssh, in a chroot...
pub mod context;
/// Utility functions
pub(crate) mod utils;
/// Optional callback function (taking 4 arguments)
/// - Name of the current main operation (e.g. pulling package)
/// - Name of the current nested operation (e.g. cloning git repo)
/// - Progress, position, index of current operation (e.g. amount of data downloaded)
/// - Total amount for current operation (e.g. size of the file to download)
pub type ProgressCallback<'a> = Option<&'a dyn Fn(&str, &str, usize, usize)>;
/// Returns the architecture of current CPU, debian-compatible
pub fn get_current_arch() -> String {
match std::env::consts::ARCH {
"x86" => "i386".to_string(),
"x86_64" => "amd64".to_string(),
"arm" => "armhf".to_string(),
"aarch64" => "arm64".to_string(),
"powerpc64" => "ppc64el".to_string(),
x => x.to_string(),
}
}

View File

@@ -3,11 +3,10 @@ use std::io::Write;
extern crate clap;
use clap::{Command, arg, command};
use pkh::context::ContextConfig;
extern crate flate2;
use pkh::pull::pull;
use pkh::changelog::generate_entry;
use indicatif_log_bridge::LogWrapper;
@@ -48,12 +47,16 @@ fn main() {
.arg(arg!(--backport "This changelog is for a backport entry").required(false))
.arg(arg!(-v --version <version> "Target version").required(false)),
)
.subcommand(Command::new("build").about("Build the source package"))
.subcommand(Command::new("build").about("Build the source package (into a .dsc)"))
.subcommand(
Command::new("deb")
.about("Build the binary package")
.about("Build the source package into binary package (.deb)")
.arg(arg!(-s --series <series> "Target distribution series").required(false))
.arg(arg!(-a --arch <arch> "Target architecture").required(false)),
.arg(arg!(-a --arch <arch> "Target architecture").required(false))
.arg(arg!(--cross "Cross-compile for target architecture (instead of qemu-binfmt)")
.long_help("Cross-compile for target architecture (instead of using qemu-binfmt)\nNote that most packages cannot be cross-compiled").required(false))
.arg(arg!(--mode <mode> "Change build mode [sbuild, local]").required(false)
.long_help("Change build mode [sbuild, local]\nDefault will chose depending on other parameters, don't provide if unsure")),
)
.subcommand(
Command::new("context")
@@ -71,7 +74,10 @@ fn main() {
.about("Remove a context")
.arg(arg!(<name> "Context name"))
)
.subcommand(Command::new("ls").about("List contexts"))
.subcommand(
Command::new("ls")
.about("List contexts")
)
.subcommand(Command::new("show").about("Show current context"))
.subcommand(
Command::new("use")
@@ -86,28 +92,27 @@ fn main() {
let package = sub_matches.get_one::<String>("package").expect("required");
let series = sub_matches.get_one::<String>("series").map(|s| s.as_str());
let dist = sub_matches.get_one::<String>("dist").map(|s| s.as_str());
let version = sub_matches
.get_one::<String>("version")
.map(|s| s.as_str())
.unwrap_or("");
let ppa = sub_matches
let version = sub_matches.get_one::<String>("version").map(|s| s.as_str());
let _ppa = sub_matches
.get_one::<String>("ppa")
.map(|s| s.as_str())
.unwrap_or("");
// Since pull is async, we need to block on it
let (pb, progress_callback) = ui::create_progress_bar(&multi);
if let Err(e) = rt.block_on(pull(
package,
version,
series,
"",
ppa,
dist,
None,
Some(&progress_callback),
)) {
// Since pull is async, we need to block on it
if let Err(e) = rt.block_on(async {
let package_info = pkh::package_info::lookup(
package,
version,
series,
"",
dist,
Some(&progress_callback),
)
.await?;
pkh::pull::pull(&package_info, None, Some(&progress_callback), false).await
}) {
pb.finish_and_clear();
error!("{}", e);
std::process::exit(1);
@@ -142,22 +147,23 @@ fn main() {
let cwd = std::env::current_dir().unwrap();
let series = sub_matches.get_one::<String>("series").map(|s| s.as_str());
let arch = sub_matches.get_one::<String>("arch").map(|s| s.as_str());
let cross = sub_matches.get_one::<bool>("cross").unwrap_or(&false);
let mode: Option<&str> = sub_matches.get_one::<String>("mode").map(|s| s.as_str());
let mode: Option<pkh::deb::BuildMode> = match mode {
Some("sbuild") => Some(pkh::deb::BuildMode::Sbuild),
Some("local") => Some(pkh::deb::BuildMode::Local),
_ => None,
};
if let Err(e) = pkh::deb::build_binary_package(arch, series, Some(cwd.as_path())) {
if let Err(e) =
pkh::deb::build_binary_package(arch, series, Some(cwd.as_path()), *cross, mode)
{
error!("{}", e);
std::process::exit(1);
}
}
Some(("context", sub_matches)) => {
use pkh::context::{Context, ContextManager};
let mut mgr = match ContextManager::new() {
Ok(mgr) => mgr,
Err(e) => {
error!("Failed to initialize context manager: {}", e);
std::process::exit(1);
}
};
let mgr = pkh::context::manager();
match sub_matches.subcommand() {
Some(("create", args)) => {
@@ -168,7 +174,7 @@ fn main() {
.unwrap_or("local");
let context = match type_str {
"local" => Context::Local,
"local" => ContextConfig::Local,
"ssh" => {
let endpoint = args
.get_one::<String>("endpoint")
@@ -191,7 +197,7 @@ fn main() {
})
});
Context::Ssh { host, user, port }
ContextConfig::Ssh { host, user, port }
}
_ => {
error!("Unknown context type: {}", type_str);
@@ -217,20 +223,14 @@ fn main() {
let contexts = mgr.list_contexts();
let current = mgr.current_name();
for ctx in contexts {
if Some(&ctx) == current.as_ref() {
if ctx == current {
println!("* {}", ctx);
} else {
println!(" {}", ctx);
}
}
}
Some(("show", _)) => {
if let Some(name) = mgr.current_name() {
println!("{}", name);
} else {
println!("No context set (defaulting to local)");
}
}
Some(("show", _)) => {}
Some(("use", args)) => {
let name = args.get_one::<String>("name").unwrap();
if let Err(e) = mgr.set_current(name) {

View File

@@ -56,7 +56,8 @@ fn parse_series_csv(content: &str) -> Result<Vec<String>, Box<dyn Error>> {
Ok(entries.into_iter().map(|(s, _)| s).collect())
}
async fn get_ordered_series(dist: &str) -> Result<Vec<String>, Box<dyn Error>> {
/// Get time-ordered list of series for a distribution, development series first
pub async fn get_ordered_series(dist: &str) -> Result<Vec<String>, Box<dyn Error>> {
let content = if Path::new(format!("/usr/share/distro-info/{dist}.csv").as_str()).exists() {
std::fs::read_to_string(format!("/usr/share/distro-info/{dist}.csv"))?
} else {
@@ -71,9 +72,8 @@ async fn get_ordered_series(dist: &str) -> Result<Vec<String>, Box<dyn Error>> {
let mut series = parse_series_csv(&content)?;
// For Debian, ensure 'sid' is first if it's not (it usually doesn't have a date or is very old/new depending on file)
// Actually in the file sid has 1993 date.
// But we want to try 'sid' (unstable) first for Debian.
// For Debian, ensure 'sid' is first if it's not
// We want to try 'sid' (unstable) first for Debian.
if dist == "debian" {
series.retain(|s| s != "sid");
series.insert(0, "sid".to_string());
@@ -93,6 +93,7 @@ fn get_series_from_file(path: &str) -> Result<Vec<String>, Box<dyn Error>> {
parse_series_csv(&content)
}
/// Obtain a list of series from a distribution
pub async fn get_dist_series(dist: &str) -> Result<Vec<String>, Box<dyn Error>> {
if Path::new(format!("/usr/share/distro-info/{dist}.csv").as_str()).exists() {
get_series_from_file(format!("/usr/share/distro-info/{dist}.csv").as_str())
@@ -105,7 +106,8 @@ pub async fn get_dist_series(dist: &str) -> Result<Vec<String>, Box<dyn Error>>
}
}
async fn get_dist_from_series(series: &str) -> Result<String, Box<dyn Error>> {
/// Obtain the distribution (eg. debian, ubuntu) from a distribution series (eg. noble, bookworm)
pub async fn get_dist_from_series(series: &str) -> Result<String, Box<dyn Error>> {
let debian_series = get_dist_series("debian").await?;
if debian_series.contains(&series.to_string()) {
return Ok("debian".to_string());
@@ -117,34 +119,55 @@ async fn get_dist_from_series(series: &str) -> Result<String, Box<dyn Error>> {
Err(format!("Unknown series: {}", series).into())
}
/// A File used in a source package
#[derive(Debug, Clone)]
pub struct FileEntry {
/// Name of the file
pub name: String,
/// Size of the file
pub size: u64,
/// SHA256 hash for the file
pub sha256: String,
}
/// A package 'stanza' as found is 'Sources.gz' files, containing basic information about a source package
#[derive(Debug)]
pub struct PackageStanza {
/// Name of the package
pub package: String,
/// Version number for the package
pub version: String,
/// Directory field in the stanza
pub directory: String,
/// Source package format (e.g. '3.0 (quilt)')
pub format: String,
/// Vcs-Git field in the stanza
pub vcs_git: Option<String>,
/// Vcs-Browser field in the stanza
pub vcs_browser: Option<String>,
/// Files present in the source package
pub files: Vec<FileEntry>,
}
/// Source package information
#[derive(Debug)]
pub struct PackageInfo {
pub dist: String,
pub series: String,
/// Source 'stanza' for the package, containing basic information
pub stanza: PackageStanza,
/// Distribution for the package
pub dist: String,
/// Distribution series for the package
pub series: String,
/// Preferred VCS for the source package
///
/// Should be Launchpad on Ubuntu, and Salsa on Debian
pub preferred_vcs: Option<String>,
/// URL for the files of the source package
pub archive_url: String,
}
impl PackageInfo {
/// Returns true if the package is a Debian native package (no orig)
pub fn is_native(&self) -> bool {
self.stanza.format.contains("(native)")
}
@@ -175,9 +198,7 @@ fn get_base_url(dist: &str) -> &str {
}
}
/*
* Obtain the URL for the 'Release' file of a distribution series
*/
/// Obtain the URL for the 'Release' file of a distribution series
fn get_release_url(base_url: &str, series: &str, pocket: &str) -> String {
let pocket_full = if pocket.is_empty() {
String::new()
@@ -187,9 +208,7 @@ fn get_release_url(base_url: &str, series: &str, pocket: &str) -> String {
format!("{base_url}/dists/{series}{pocket_full}/Release")
}
/*
* Obtain the components of a distribution series by parsing the 'Release' file
*/
/// Obtain the components of a distribution series by parsing the 'Release' file
async fn get_components(
base_url: &str,
series: &str,
@@ -214,20 +233,32 @@ async fn get_components(
Err("Components not found.".into())
}
/*
* Parse a 'Sources.gz' debian package file data, to look for a target package and
* return the data for that package stanza
*/
fn parse_sources(
data: &[u8],
target_package: &str,
target_version: Option<&str>,
) -> Result<Option<PackageStanza>, Box<dyn Error>> {
let mut d = GzDecoder::new(data);
let mut s = String::new();
d.read_to_string(&mut s)?;
struct DebianSources {
splitted_sources: std::str::Split<'static, &'static str>,
}
impl DebianSources {
fn new(data: &[u8]) -> Result<DebianSources, Box<dyn Error>> {
// Gz-decode 'Sources.gz' file into a string, and split it on stanzas
let mut d = GzDecoder::new(data);
let mut s = String::new();
d.read_to_string(&mut s)?;
for stanza in s.split("\n\n") {
// Convert the string to a static lifetime by leaking it
let static_str = Box::leak(s.into_boxed_str());
let splitted = static_str.split("\n\n");
Ok(DebianSources {
splitted_sources: splitted,
})
}
}
impl Iterator for DebianSources {
type Item = PackageStanza;
fn next(&mut self) -> Option<Self::Item> {
let stanza = self.splitted_sources.next()?;
// Parse stanza into a hashmap of strings, the fields
let mut fields: HashMap<String, String> = HashMap::new();
let mut current_key = String::new();
@@ -248,53 +279,60 @@ fn parse_sources(
}
}
if let Some(pkg) = fields.get("Package")
&& pkg == target_package
{
// Check version if requested
if let Some(ver) = target_version {
if let Some(pkg_ver) = fields.get("Version") {
if pkg_ver != ver {
continue;
}
} else {
continue;
}
}
let mut files = Vec::new();
if let Some(checksums) = fields.get("Checksums-Sha256") {
for line in checksums.lines() {
let parts: Vec<&str> = line.split_whitespace().collect();
if parts.len() >= 3 {
files.push(FileEntry {
sha256: parts[0].to_string(),
size: parts[1].parse().unwrap_or(0),
name: parts[2].to_string(),
});
}
}
}
return Ok(Some(PackageStanza {
package: pkg.clone(),
version: fields.get("Version").cloned().unwrap_or_default(),
directory: fields.get("Directory").cloned().unwrap_or_default(),
format: fields
.get("Format")
.cloned()
.unwrap_or_else(|| "1.0".to_string()),
vcs_git: fields.get("Vcs-Git").cloned(),
vcs_browser: fields.get("Vcs-Browser").cloned(),
files,
}));
let pkg = fields.get("Package");
if pkg.is_none() {
// Skip empty stanza
return self.next();
}
}
Ok(None)
// Parse package files
let mut files = Vec::new();
if let Some(checksums) = fields.get("Checksums-Sha256") {
for line in checksums.lines() {
let parts: Vec<&str> = line.split_whitespace().collect();
if parts.len() >= 3 {
files.push(FileEntry {
sha256: parts[0].to_string(),
size: parts[1].parse().unwrap_or(0),
name: parts[2].to_string(),
});
}
}
}
Some(PackageStanza {
package: fields.get("Package").unwrap().to_string(),
version: fields.get("Version").unwrap().to_string(),
directory: fields.get("Directory").cloned().unwrap_or_default(),
format: fields
.get("Format")
.cloned()
.unwrap_or_else(|| "1.0".to_string()),
vcs_git: fields.get("Vcs-Git").cloned(),
vcs_browser: fields.get("Vcs-Browser").cloned(),
files,
})
}
}
pub async fn get(
/// Parse a 'Sources.gz' debian package file data, to look for a target package and
/// return the data for that package stanza
fn parse_sources(
data: &[u8],
target_package: &str,
target_version: Option<&str>,
) -> Result<Option<PackageStanza>, Box<dyn Error>> {
let mut sources = DebianSources::new(data)?;
// Find the right package, with the right version if requested
Ok(sources.find(|s| {
s.package == target_package
&& (target_version.is_none() || s.version == target_version.unwrap())
}))
}
/// Get package information from a package, distribution series, and pocket
async fn get(
package_name: &str,
series: &str,
pocket: &str,
@@ -367,7 +405,8 @@ pub async fn get(
.into())
}
pub async fn find_package(
/// Try to find package information in a distribution, trying all series and pockets
async fn find_package(
package_name: &str,
dist: &str,
pocket: &str,
@@ -413,6 +452,58 @@ pub async fn find_package(
Err(format!("Package '{}' not found.", package_name).into())
}
/// Lookup package information for a source package
///
/// This function obtains package information either directly from a specific series
/// or by searching across all series in a distribution.
pub async fn lookup(
package: &str,
version: Option<&str>,
series: Option<&str>,
pocket: &str,
dist: Option<&str>,
progress: ProgressCallback<'_>,
) -> Result<PackageInfo, Box<dyn Error>> {
// Obtain the package information, either directly in a series or with a search in all series
let package_info = if let Some(s) = series {
if let Some(cb) = progress {
cb(
&format!("Resolving package info for {}...", package),
"",
0,
0,
);
}
// Get the package information from that series and pocket
get(package, s, pocket, version).await?
} else {
let dist = dist.unwrap_or_else(||
// Use auto-detection to see if current distro is ubuntu, or fallback to debian by default
if std::process::Command::new("lsb_release").arg("-i").arg("-s").output()
.map(|o| String::from_utf8_lossy(&o.stdout).trim().to_lowercase()).unwrap_or_default() == "ubuntu" {
"ubuntu"
} else {
"debian"
}
);
if let Some(cb) = progress {
cb(
&format!("Searching for package {} in {}...", package, dist),
"",
0,
0,
);
}
// Try to find the package in all series from that dist
find_package(package, dist, pocket, version, progress).await?
};
Ok(package_info)
}
#[cfg(test)]
mod tests {
use super::*;

View File

@@ -2,7 +2,6 @@ use std::cmp::min;
use std::error::Error;
use std::path::Path;
use crate::package_info;
use crate::package_info::PackageInfo;
use std::process::Command;
@@ -262,22 +261,51 @@ async fn fetch_orig_tarball(
Ok(())
}
async fn fetch_dsc_file(
info: &PackageInfo,
cwd: Option<&Path>,
progress: ProgressCallback<'_>,
) -> Result<(), Box<dyn Error>> {
let target_dir = cwd.unwrap_or_else(|| Path::new("."));
// Find the dsc file in the file list
let dsc_file = info
.stanza
.files
.iter()
.find(|f| f.name.ends_with(".dsc"))
.ok_or("Could not find .dsc file in package info")?;
let filename = &dsc_file.name;
debug!("Fetching dsc file: {}", filename);
download_file_checksum(
format!("{}/{}", &info.archive_url, filename).as_str(),
&dsc_file.sha256,
target_dir,
progress,
)
.await?;
Ok(())
}
async fn fetch_archive_sources(
info: &PackageInfo,
cwd: Option<&Path>,
progress: ProgressCallback<'_>,
) -> Result<(), Box<dyn Error>> {
let package_dir = if let Some(path) = cwd {
path.join(&info.stanza.package)
path
} else {
Path::new(&info.stanza.package).to_path_buf()
&Path::new(".").to_path_buf()
};
std::fs::create_dir_all(&package_dir)?;
std::fs::create_dir_all(package_dir)?;
for file in &info.stanza.files {
let url = format!("{}/{}", info.archive_url, file.name);
download_file_checksum(&url, &file.sha256, &package_dir, progress).await?;
download_file_checksum(&url, &file.sha256, package_dir, progress).await?;
}
// Extract the debian tarball or diff
@@ -304,59 +332,19 @@ async fn fetch_archive_sources(
Ok(())
}
/// Pull a source package locally using pre-retrieved package information
///
/// This function takes a PackageInfo struct and downloads the package using the preferred method
/// (either git or direct archive download), as well as orig tarball, inside 'package' directory.
/// The source will be extracted under 'package/package'.
pub async fn pull(
package: &str,
_version: &str,
series: Option<&str>,
pocket: &str,
_ppa: &str,
dist: Option<&str>,
package_info: &PackageInfo,
cwd: Option<&Path>,
progress: ProgressCallback<'_>,
) -> Result<PackageInfo, Box<dyn Error>> {
let version_opt = if _version.is_empty() {
None
} else {
Some(_version)
};
/* Obtain the package information, either directly in a series or with a search in all series */
let package_info = if let Some(s) = series {
if let Some(cb) = progress {
cb(
&format!("Resolving package info for {}...", package),
"",
0,
0,
);
}
// Get the package information from that series and pocket
package_info::get(package, s, pocket, version_opt).await?
} else {
let dist = dist.unwrap_or_else(||
// Use auto-detection to see if current distro is ubuntu, or fallback to debian by default
if std::process::Command::new("lsb_release").arg("-i").arg("-s").output()
.map(|o| String::from_utf8_lossy(&o.stdout).trim().to_lowercase()).unwrap_or_default() == "ubuntu" {
"ubuntu"
} else {
"debian"
}
);
if let Some(cb) = progress {
cb(
&format!("Searching for package {} in {}...", package, dist),
"",
0,
0,
);
}
// Try to find the package in all series from that dist
package_info::find_package(package, dist, pocket, version_opt, progress).await?
};
force_archive: bool,
) -> Result<(), Box<dyn Error>> {
let package = &package_info.stanza.package;
let series = &package_info.series;
let package_dir = if let Some(path) = cwd {
path.join(package)
} else {
@@ -364,15 +352,20 @@ pub async fn pull(
};
/* Fetch the package: either via git (preferred VCS) or the archive */
if let Some(ref url) = package_info.preferred_vcs {
if let Some(ref url) = package_info.preferred_vcs
&& !force_archive
{
// We have found a preferred VCS (git repository) for the package, so
// we fetch the package from that repo.
// Depending on target series, we pick target branch; if no series is specified,
// Depending on target series, we pick target branch; if latest series is specified,
// we target the development branch, i.e. the default branch
let branch_name = if let Some(s) = series {
let branch_name = if crate::package_info::get_ordered_series(package_info.dist.as_str())
.await?[0]
!= *series
{
if package_info.dist == "ubuntu" {
Some(format!("{}/{}", package_info.dist, s))
Some(format!("{}/{}", package_info.dist, series))
} else {
// Debian does not have reliable branch naming...
// For now, we skip that part and clone default
@@ -399,6 +392,7 @@ pub async fn pull(
0,
);
}
clone_repo(
url.as_str(),
package,
@@ -406,23 +400,29 @@ pub async fn pull(
Some(&package_dir),
progress,
)?;
if !package_info.is_native() {
if let Some(cb) = progress {
cb("Fetching orig tarball...", "", 0, 0);
}
fetch_orig_tarball(&package_info, Some(&package_dir), progress).await?;
fetch_orig_tarball(package_info, Some(&package_dir), progress).await?;
} else {
debug!("Native package, skipping orig tarball fetch.");
}
if let Some(cb) = progress {
cb("Fetching dsc file...", "", 0, 0);
}
fetch_dsc_file(package_info, Some(&package_dir), progress).await?;
} else {
// Fallback to archive fetching
if let Some(cb) = progress {
cb("Downloading from archive...", "", 0, 0);
}
fetch_archive_sources(&package_info, Some(cwd.unwrap_or(Path::new("."))), progress).await?;
fetch_archive_sources(package_info, Some(&package_dir), progress).await?;
}
Ok(package_info)
Ok(())
}
#[cfg(test)]
@@ -434,16 +434,17 @@ mod tests {
// For determinism, we require for tests that either a distro or series is specified,
// as no distribution would mean fallback to system distro
assert!(dist != None || series != None);
assert!(dist.is_some() || series.is_some());
// Use a temp directory as working directory
let temp_dir = tempfile::tempdir().unwrap();
let cwd = temp_dir.path();
// Main 'pull' command: the one we want to test
let info = pull(package, "", series, "", "", dist, Some(cwd), None)
let info = crate::package_info::lookup(package, None, series, "", dist, None)
.await
.unwrap();
pull(&info, Some(cwd), None, false).await.unwrap();
let package_dir = cwd.join(package);
assert!(package_dir.exists());
@@ -480,18 +481,25 @@ mod tests {
}
}
// Check for orig tarball in package dir
// Check for orig tarball in package dir (only for non-native packages)
let mut found_tarball = false;
let mut found_dsc = false;
for entry in std::fs::read_dir(package_dir).unwrap() {
let entry = entry.unwrap();
let name = entry.file_name().to_string_lossy().to_string();
if name.contains(".orig.tar.") {
found_tarball = true;
break;
}
if name.ends_with(".dsc") {
found_dsc = true;
}
}
assert!(found_tarball, "Orig tarball not found in package dir");
// Only check for orig tarball if the package is not native
if !info.is_native() {
assert!(found_tarball, "Orig tarball not found in package dir");
}
assert!(found_dsc, "DSC file not found in package dir");
}
#[tokio::test]

32
src/utils/gpg.rs Normal file
View File

@@ -0,0 +1,32 @@
use gpgme::{Context, Protocol};
/// Check if a GPG key matching 'email' exists
/// Returns the key ID if found, None otherwise
pub fn find_signing_key_for_email(
email: &str,
) -> Result<Option<String>, Box<dyn std::error::Error>> {
// Create a new GPG context
let mut ctx = Context::from_protocol(Protocol::OpenPgp)?;
// List all secret keys
let keys = ctx.secret_keys()?;
// Find a key that matches the email and can sign
for key_result in keys {
let key = key_result?;
// Check if the key has signing capability
if key.can_sign() {
// Check user IDs for email match
for user_id in key.user_ids() {
if let Ok(userid_email) = user_id.email()
&& userid_email.eq_ignore_ascii_case(email)
&& let Ok(fingerprint) = key.fingerprint()
{
return Ok(Some(fingerprint.to_string()));
}
}
}
}
Ok(None)
}

1
src/utils/mod.rs Normal file
View File

@@ -0,0 +1 @@
pub mod gpg;