format: run cargo fmt
All checks were successful
CI / build (push) Successful in 1m44s

This commit is contained in:
2025-11-30 12:51:52 +01:00
parent c813823a1a
commit 2cfbb69fe7
5 changed files with 491 additions and 232 deletions

View File

@@ -1,14 +1,18 @@
use chrono::Local;
use git2::{Oid, Repository, Sort};
use regex::Regex;
use std::fs::File; use std::fs::File;
use std::io::{self, BufRead, Read, Write}; use std::io::{self, BufRead, Read, Write};
use std::path::Path; use std::path::Path;
use regex::Regex;
use chrono::Local;
use git2::{Repository, Sort, Oid};
/* /*
* Automatically generate a changelog entry from a commit history and previous changelog * Automatically generate a changelog entry from a commit history and previous changelog
*/ */
pub fn generate_entry(changelog_file: &str, cwd: Option<&Path>, user_version: Option<&str>) -> Result<(), Box<dyn std::error::Error>> { pub fn generate_entry(
changelog_file: &str,
cwd: Option<&Path>,
user_version: Option<&str>,
) -> Result<(), Box<dyn std::error::Error>> {
let changelog_path = if let Some(path) = cwd { let changelog_path = if let Some(path) = cwd {
path.join(changelog_file) path.join(changelog_file)
} else { } else {
@@ -28,18 +32,6 @@ pub fn generate_entry(changelog_file: &str, cwd: Option<&Path>, user_version: Op
let repo = Repository::open(&repo_path)?; let repo = Repository::open(&repo_path)?;
// 3. Find commits since the tag corresponding to the version // 3. Find commits since the tag corresponding to the version
// We assume the tag format is "debian/<version>" or just "<version>" or "v<version>"
// But usually for Debian packages it might be "debian/<version>"
// Let's try to find a tag that matches.
// For now, let's assume the tag is simply the version string, or debian/version
// If we can't find a tag, we might have to error out or take all commits?
// Let's try to find the tag.
// Actually, usually we want to generate an entry for a NEW version based on changes since the OLD version.
// The `changelog_file` passed here is the EXISTING changelog.
// So `version` is the PREVIOUS version.
// We want to find commits since `version`.
let commits = get_commits_since_version(&repo, &old_version)?; let commits = get_commits_since_version(&repo, &old_version)?;
@@ -48,20 +40,6 @@ pub fn generate_entry(changelog_file: &str, cwd: Option<&Path>, user_version: Op
// return Ok(()); // return Ok(());
} }
// 4. Format the new entry
// We don't know the NEW version yet, so we might use "UNRELEASED" or increment the version.
// For now, let's use "UNRELEASED" and let the user edit it, or maybe we can try to increment it.
// The requirement says "Automatically generate a changelog entry".
// Usually tools like `dch` add a new entry.
// Let's create a new entry with "UNRELEASED" distribution and incremented version?
// Or just append to the top.
// Let's assume we want to output the new entry to stdout or prepend to file?
// The function signature returns (), so maybe it modifies the file.
// Let's prepend to the file.
// Compute new version if needed, or use user-supplied one // Compute new version if needed, or use user-supplied one
let new_version = if let Some(version) = user_version { let new_version = if let Some(version) = user_version {
version.to_string() version.to_string()
@@ -71,7 +49,14 @@ pub fn generate_entry(changelog_file: &str, cwd: Option<&Path>, user_version: Op
}; };
let (maintainer_name, maintainer_email) = get_maintainer_info()?; let (maintainer_name, maintainer_email) = get_maintainer_info()?;
let new_entry = format_entry(&package, &new_version, &series, &commits, &maintainer_name, &maintainer_email); let new_entry = format_entry(
&package,
&new_version,
&series,
&commits,
&maintainer_name,
&maintainer_email,
);
prepend_to_file(&changelog_path, &new_entry)?; prepend_to_file(&changelog_path, &new_entry)?;
@@ -84,7 +69,12 @@ pub fn generate_entry(changelog_file: &str, cwd: Option<&Path>, user_version: Op
* Compute the next (most probable) version number of a package, from old version and * Compute the next (most probable) version number of a package, from old version and
* conditions on changes (is ubuntu upload, is a no change rebuild, is a non-maintainer upload) * conditions on changes (is ubuntu upload, is a no change rebuild, is a non-maintainer upload)
*/ */
fn compute_new_version(old_version: &str, is_ubuntu: bool, is_rebuild: bool, is_nmu: bool) -> String { fn compute_new_version(
old_version: &str,
is_ubuntu: bool,
is_rebuild: bool,
is_nmu: bool,
) -> String {
if is_ubuntu { if is_ubuntu {
return increment_suffix(old_version, "ubuntu"); return increment_suffix(old_version, "ubuntu");
} }
@@ -137,7 +127,9 @@ fn increment_suffix(version: &str, suffix: &str) -> String {
/* /*
* Parse a changelog file first entry header, to obtain (package, version, series) * Parse a changelog file first entry header, to obtain (package, version, series)
*/ */
fn parse_changelog_header(path: &Path) -> Result<(String, String, String), Box<dyn std::error::Error>> { fn parse_changelog_header(
path: &Path,
) -> Result<(String, String, String), Box<dyn std::error::Error>> {
let file = File::open(path)?; let file = File::open(path)?;
let mut reader = io::BufReader::new(file); let mut reader = io::BufReader::new(file);
let mut first_line = String::new(); let mut first_line = String::new();
@@ -158,7 +150,10 @@ fn parse_changelog_header(path: &Path) -> Result<(String, String, String), Box<d
/* /*
* Obtain all commit messages as a list since a tagged version in a git repository * Obtain all commit messages as a list since a tagged version in a git repository
*/ */
fn get_commits_since_version(repo: &Repository, version: &str) -> Result<Vec<String>, Box<dyn std::error::Error>> { fn get_commits_since_version(
repo: &Repository,
version: &str,
) -> Result<Vec<String>, Box<dyn std::error::Error>> {
let mut revwalk = repo.revwalk()?; let mut revwalk = repo.revwalk()?;
revwalk.set_sorting(Sort::TIME)?; revwalk.set_sorting(Sort::TIME)?;
@@ -216,11 +211,21 @@ fn get_commits_since_version(repo: &Repository, version: &str) -> Result<Vec<Str
* Create a changelog entry from information, i.e. format that information * Create a changelog entry from information, i.e. format that information
* into a changelog entry * into a changelog entry
*/ */
fn format_entry(package: &str, version: &str, series: &str, changes: &[String], maintainer_name: &str, maintainer_email: &str) -> String { fn format_entry(
package: &str,
version: &str,
series: &str,
changes: &[String],
maintainer_name: &str,
maintainer_email: &str,
) -> String {
let mut entry = String::new(); let mut entry = String::new();
// Header: package, version and distribution series // Header: package, version and distribution series
entry.push_str(&format!("{} ({}) {}; urgency=medium\n\n", package, version, series)); entry.push_str(&format!(
"{} ({}) {}; urgency=medium\n\n",
package, version, series
));
// Changes // Changes
for change in changes { for change in changes {
@@ -232,7 +237,10 @@ fn format_entry(package: &str, version: &str, series: &str, changes: &[String],
// Footer: date, maintainer // Footer: date, maintainer
let date = Local::now().format("%a, %d %b %Y %H:%M:%S %z").to_string(); let date = Local::now().format("%a, %d %b %Y %H:%M:%S %z").to_string();
entry.push_str(&format!("\n -- {} <{}> {}\n\n", maintainer_name, maintainer_email, date)); entry.push_str(&format!(
"\n -- {} <{}> {}\n\n",
maintainer_name, maintainer_email, date
));
return entry; return entry;
} }
@@ -272,17 +280,45 @@ mod tests {
use tempfile::TempDir; use tempfile::TempDir;
fn setup_repo(dir: &Path) { fn setup_repo(dir: &Path) {
Command::new("git").arg("init").current_dir(dir).output().unwrap(); Command::new("git")
Command::new("git").arg("config").arg("user.email").arg("you@example.com").current_dir(dir).output().unwrap(); .arg("init")
Command::new("git").arg("config").arg("user.name").arg("Your Name").current_dir(dir).output().unwrap(); .current_dir(dir)
.output()
.unwrap();
Command::new("git")
.arg("config")
.arg("user.email")
.arg("you@example.com")
.current_dir(dir)
.output()
.unwrap();
Command::new("git")
.arg("config")
.arg("user.name")
.arg("Your Name")
.current_dir(dir)
.output()
.unwrap();
} }
fn commit(dir: &Path, message: &str) { fn commit(dir: &Path, message: &str) {
Command::new("git").arg("commit").arg("--allow-empty").arg("-m").arg(message).current_dir(dir).output().unwrap(); Command::new("git")
.arg("commit")
.arg("--allow-empty")
.arg("-m")
.arg(message)
.current_dir(dir)
.output()
.unwrap();
} }
fn tag(dir: &Path, name: &str) { fn tag(dir: &Path, name: &str) {
Command::new("git").arg("tag").arg(name).current_dir(dir).output().unwrap(); Command::new("git")
.arg("tag")
.arg(name)
.current_dir(dir)
.output()
.unwrap();
} }
#[test] #[test]
@@ -298,7 +334,12 @@ mod tests {
std::fs::write(&changelog_path, initial_content).unwrap(); std::fs::write(&changelog_path, initial_content).unwrap();
// Commit and tag // Commit and tag
Command::new("git").arg("add").arg(".").current_dir(repo_dir).output().unwrap(); Command::new("git")
.arg("add")
.arg(".")
.current_dir(repo_dir)
.output()
.unwrap();
commit(repo_dir, "Initial commit"); commit(repo_dir, "Initial commit");
tag(repo_dir, "debian/0.1.0-1"); tag(repo_dir, "debian/0.1.0-1");
@@ -332,36 +373,69 @@ mod tests {
#[test] #[test]
fn test_compute_new_version() { fn test_compute_new_version() {
// Debian upload // Debian upload
assert_eq!(compute_new_version("15.2.0-8", false, false, false), "15.2.0-9"); assert_eq!(
assert_eq!(compute_new_version("15.2.0-9", false, false, false), "15.2.0-10"); compute_new_version("15.2.0-8", false, false, false),
"15.2.0-9"
);
assert_eq!(
compute_new_version("15.2.0-9", false, false, false),
"15.2.0-10"
);
// Ubuntu upload // Ubuntu upload
assert_eq!(compute_new_version("15.2.0-9", true, false, false), "15.2.0-9ubuntu1"); assert_eq!(
assert_eq!(compute_new_version("15.2.0-9ubuntu1", true, false, false), "15.2.0-9ubuntu2"); compute_new_version("15.2.0-9", true, false, false),
"15.2.0-9ubuntu1"
);
assert_eq!(
compute_new_version("15.2.0-9ubuntu1", true, false, false),
"15.2.0-9ubuntu2"
);
// No change rebuild // No change rebuild
assert_eq!(compute_new_version("15.2.0-9", false, true, false), "15.2.0-9build1"); assert_eq!(
assert_eq!(compute_new_version("15.2.0-9build1", false, true, false), "15.2.0-9build2"); compute_new_version("15.2.0-9", false, true, false),
"15.2.0-9build1"
);
assert_eq!(
compute_new_version("15.2.0-9build1", false, true, false),
"15.2.0-9build2"
);
// Rebuild of Ubuntu version // Rebuild of Ubuntu version
assert_eq!(compute_new_version("15.2.0-9ubuntu1", false, true, false), "15.2.0-9ubuntu1build1"); assert_eq!(
compute_new_version("15.2.0-9ubuntu1", false, true, false),
"15.2.0-9ubuntu1build1"
);
// NMU // NMU
// Native // Native
assert_eq!(compute_new_version("1.0", false, false, true), "1.0+nmu1"); assert_eq!(compute_new_version("1.0", false, false, true), "1.0+nmu1");
assert_eq!(compute_new_version("1.0+nmu1", false, false, true), "1.0+nmu2"); assert_eq!(
compute_new_version("1.0+nmu1", false, false, true),
"1.0+nmu2"
);
// Non-native // Non-native
assert_eq!(compute_new_version("1.0-1", false, false, true), "1.0-1.1"); assert_eq!(compute_new_version("1.0-1", false, false, true), "1.0-1.1");
assert_eq!(compute_new_version("1.0-1.1", false, false, true), "1.0-1.2"); assert_eq!(
compute_new_version("1.0-1.1", false, false, true),
"1.0-1.2"
);
// NMU of NMU? // NMU of NMU?
assert_eq!(compute_new_version("1.0-1.2", false, false, true), "1.0-1.3"); assert_eq!(
compute_new_version("1.0-1.2", false, false, true),
"1.0-1.3"
);
// Native package uploads // Native package uploads
assert_eq!(compute_new_version("1.0", false, false, false), "1.1"); assert_eq!(compute_new_version("1.0", false, false, false), "1.1");
assert_eq!(compute_new_version("1.0.5", false, false, false), "1.0.6"); assert_eq!(compute_new_version("1.0.5", false, false, false), "1.0.6");
assert_eq!(compute_new_version("20241126", false, false, false), "20241127"); assert_eq!(
compute_new_version("20241126", false, false, false),
"20241127"
);
} }
#[test] #[test]

View File

@@ -1,19 +1,25 @@
use std::cmp::min;
use std::error::Error; use std::error::Error;
use std::path::Path; use std::path::Path;
use std::cmp::min;
use pkh::package_info; use pkh::package_info;
use pkh::package_info::PackageInfo; use pkh::package_info::PackageInfo;
use std::process::Command; use std::process::Command;
use log::{debug}; use log::debug;
use regex::Regex; use regex::Regex;
use pkh::ProgressCallback; use pkh::ProgressCallback;
fn clone_repo(url: &str, package: &str, branch: Option<&str>, cwd: Option<&Path>, progress: ProgressCallback<'_>) -> Result<(), Box<dyn Error>> { fn clone_repo(
url: &str,
package: &str,
branch: Option<&str>,
cwd: Option<&Path>,
progress: ProgressCallback<'_>,
) -> Result<(), Box<dyn Error>> {
let target_path = if let Some(path) = cwd { let target_path = if let Some(path) = cwd {
path.join(package) path.join(package)
} else { } else {
@@ -23,7 +29,12 @@ fn clone_repo(url: &str, package: &str, branch: Option<&str>, cwd: Option<&Path>
let mut callbacks = git2::RemoteCallbacks::new(); let mut callbacks = git2::RemoteCallbacks::new();
if let Some(ref progress_cb) = progress { if let Some(ref progress_cb) = progress {
callbacks.transfer_progress(move |stats| { callbacks.transfer_progress(move |stats| {
(progress_cb)("", "Receiving objects...", stats.received_objects(), stats.total_objects()); (progress_cb)(
"",
"Receiving objects...",
stats.received_objects(),
stats.total_objects(),
);
true true
}); });
callbacks.sideband_progress(move |data| { callbacks.sideband_progress(move |data| {
@@ -31,8 +42,18 @@ fn clone_repo(url: &str, package: &str, branch: Option<&str>, cwd: Option<&Path>
let re = Regex::new(r"(.*):[ ]*([0-9]*)% \(([0-9]*)/([0-9]*)\)").unwrap(); let re = Regex::new(r"(.*):[ ]*([0-9]*)% \(([0-9]*)/([0-9]*)\)").unwrap();
if let Some(caps) = re.captures(msg.trim()) { if let Some(caps) = re.captures(msg.trim()) {
let msg = caps.get(1).map_or("", |m| m.as_str()).to_string(); let msg = caps.get(1).map_or("", |m| m.as_str()).to_string();
let objects = caps.get(3).map_or("", |m| m.as_str()).to_string().parse::<usize>().unwrap_or(0); let objects = caps
let total = caps.get(4).map_or("", |m| m.as_str()).to_string().parse::<usize>().unwrap_or(0); .get(3)
.map_or("", |m| m.as_str())
.to_string()
.parse::<usize>()
.unwrap_or(0);
let total = caps
.get(4)
.map_or("", |m| m.as_str())
.to_string()
.parse::<usize>()
.unwrap_or(0);
(progress_cb)("", msg.as_str(), objects, total); (progress_cb)("", msg.as_str(), objects, total);
} }
@@ -52,23 +73,19 @@ fn clone_repo(url: &str, package: &str, branch: Option<&str>, cwd: Option<&Path>
} }
return match builder.clone(url, &target_path) { return match builder.clone(url, &target_path) {
Ok(_repo) => { Ok(_repo) => Ok(()),
Ok(()) Err(e) => Err(format!("Failed to clone: {}", e).into()),
}
Err(e) => {
Err(format!("Failed to clone: {}", e).into())
}
}; };
} }
use sha2::{Sha256, Digest}; use sha2::{Digest, Sha256};
use std::fs::File; use std::fs::File;
use std::io::Write; use std::io::Write;
use futures_util::StreamExt;
use flate2::read::GzDecoder; use flate2::read::GzDecoder;
use xz2::read::XzDecoder; use futures_util::StreamExt;
use tar::Archive; use tar::Archive;
use xz2::read::XzDecoder;
fn extract_archive(path: &Path, dest: &Path) -> Result<(), Box<dyn Error>> { fn extract_archive(path: &Path, dest: &Path) -> Result<(), Box<dyn Error>> {
let file = File::open(path)?; let file = File::open(path)?;
@@ -97,19 +114,30 @@ fn checkout_pristine_tar(package_dir: &Path, filename: &str) -> Result<(), Box<d
.expect("pristine-tar checkout failed"); .expect("pristine-tar checkout failed");
if !output.status.success() { if !output.status.success() {
return Err(format!("pristine-tar checkout failed with status: {}", output.status).into()); return Err(format!(
"pristine-tar checkout failed with status: {}",
output.status
)
.into());
} }
Ok(()) Ok(())
} }
async fn download_file_checksum(url: &str, checksum: &str, target_dir: &Path, progress: ProgressCallback<'_>) -> Result<(), Box<dyn Error>> { async fn download_file_checksum(
url: &str,
checksum: &str,
target_dir: &Path,
progress: ProgressCallback<'_>,
) -> Result<(), Box<dyn Error>> {
// Download with reqwest // Download with reqwest
let response = reqwest::get(url).await?; let response = reqwest::get(url).await?;
if !response.status().is_success() { if !response.status().is_success() {
return Err(format!("Failed to download '{}' : {}", &url, response.status()).into()); return Err(format!("Failed to download '{}' : {}", &url, response.status()).into());
} }
let total_size = response.content_length().ok_or(format!("Failed to get content length from '{}'", &url))?; let total_size = response
.content_length()
.ok_or(format!("Failed to get content length from '{}'", &url))?;
let mut index = 0; let mut index = 0;
// Target file: extract file name from URL // Target file: extract file name from URL
@@ -135,7 +163,11 @@ async fn download_file_checksum(url: &str, checksum: &str, target_dir: &Path, pr
let result = hasher.finalize(); let result = hasher.finalize();
let calculated_checksum = hex::encode(result); let calculated_checksum = hex::encode(result);
if calculated_checksum != checksum { if calculated_checksum != checksum {
return Err(format!("Checksum mismatch! Expected {}, got {}", checksum, calculated_checksum).into()); return Err(format!(
"Checksum mismatch! Expected {}, got {}",
checksum, calculated_checksum
)
.into());
} }
Ok(()) Ok(())
@@ -145,7 +177,10 @@ fn setup_pristine_tar_branch(package_dir: &Path, dist: &str) -> Result<(), Box<d
let repo = git2::Repository::open(package_dir)?; let repo = git2::Repository::open(package_dir)?;
// Check if local branch already exists // Check if local branch already exists
if repo.find_branch("pristine-tar", git2::BranchType::Local).is_ok() { if repo
.find_branch("pristine-tar", git2::BranchType::Local)
.is_ok()
{
return Ok(()); return Ok(());
} }
@@ -175,7 +210,11 @@ fn setup_pristine_tar_branch(package_dir: &Path, dist: &str) -> Result<(), Box<d
Ok(()) Ok(())
} }
async fn fetch_orig_tarball(info: &PackageInfo, cwd: Option<&Path>, progress: ProgressCallback<'_>) -> Result<(), Box<dyn Error>> { async fn fetch_orig_tarball(
info: &PackageInfo,
cwd: Option<&Path>,
progress: ProgressCallback<'_>,
) -> Result<(), Box<dyn Error>> {
let package_dir = if let Some(path) = cwd { let package_dir = if let Some(path) = cwd {
path.join(&info.stanza.package) path.join(&info.stanza.package)
} else { } else {
@@ -184,9 +223,12 @@ async fn fetch_orig_tarball(info: &PackageInfo, cwd: Option<&Path>, progress: Pr
// Find the orig tarball in the file list // Find the orig tarball in the file list
// Usually ends with .orig.tar.gz or .orig.tar.xz // Usually ends with .orig.tar.gz or .orig.tar.xz
let orig_file = info.stanza.files.iter().find(|f| { let orig_file = info
f.name.contains(".orig.tar.") .stanza
}).unwrap(); .files
.iter()
.find(|f| f.name.contains(".orig.tar."))
.unwrap();
let filename = &orig_file.name; let filename = &orig_file.name;
// 1. Try executing pristine-tar // 1. Try executing pristine-tar
@@ -195,19 +237,32 @@ async fn fetch_orig_tarball(info: &PackageInfo, cwd: Option<&Path>, progress: Pr
let _ = setup_pristine_tar_branch(&package_dir, info.dist.as_str()); let _ = setup_pristine_tar_branch(&package_dir, info.dist.as_str());
if let Err(e) = checkout_pristine_tar(&package_dir, filename.as_str()) { if let Err(e) = checkout_pristine_tar(&package_dir, filename.as_str()) {
debug!("pristine-tar failed: {}. Falling back to archive download.", e); debug!(
"pristine-tar failed: {}. Falling back to archive download.",
e
);
// 2. Fallback to archive download // 2. Fallback to archive download
// We download to the parent directory of the package repo (which is standard for build tools) // We download to the parent directory of the package repo (which is standard for build tools)
// or the current directory if cwd is None (which effectively is the parent of the package dir) // or the current directory if cwd is None (which effectively is the parent of the package dir)
let target_dir = cwd.unwrap_or_else(|| Path::new(".")); let target_dir = cwd.unwrap_or_else(|| Path::new("."));
download_file_checksum(format!("{}/{}", &info.archive_url, filename).as_str(), &orig_file.sha256, target_dir, progress).await?; download_file_checksum(
format!("{}/{}", &info.archive_url, filename).as_str(),
&orig_file.sha256,
target_dir,
progress,
)
.await?;
} }
Ok(()) Ok(())
} }
async fn fetch_archive_sources(info: &PackageInfo, cwd: Option<&Path>, progress: ProgressCallback<'_>) -> Result<(), Box<dyn Error>> { async fn fetch_archive_sources(
info: &PackageInfo,
cwd: Option<&Path>,
progress: ProgressCallback<'_>,
) -> Result<(), Box<dyn Error>> {
let package_dir = if let Some(path) = cwd { let package_dir = if let Some(path) = cwd {
path.join(&info.stanza.package) path.join(&info.stanza.package)
} else { } else {
@@ -222,9 +277,11 @@ async fn fetch_archive_sources(info: &PackageInfo, cwd: Option<&Path>, progress:
} }
// Extract the debian tarball or diff // Extract the debian tarball or diff
let debian_file = info.stanza.files.iter().find(|f| { let debian_file = info
f.name.contains(".debian.tar.") || f.name.contains(".diff.gz") .stanza
}); .files
.iter()
.find(|f| f.name.contains(".debian.tar.") || f.name.contains(".diff.gz"));
if let Some(file) = debian_file { if let Some(file) = debian_file {
let path = package_dir.join(&file.name); let path = package_dir.join(&file.name);
@@ -243,13 +300,31 @@ async fn fetch_archive_sources(info: &PackageInfo, cwd: Option<&Path>, progress:
Ok(()) Ok(())
} }
pub async fn get(package: &str, _version: &str, series: Option<&str>, pocket: &str, _ppa: &str, dist: Option<&str>, cwd: Option<&Path>, progress: ProgressCallback<'_>) -> Result<PackageInfo, Box<dyn Error>> { pub async fn get(
let version_opt = if _version.is_empty() { None } else { Some(_version) }; package: &str,
_version: &str,
series: Option<&str>,
pocket: &str,
_ppa: &str,
dist: Option<&str>,
cwd: Option<&Path>,
progress: ProgressCallback<'_>,
) -> Result<PackageInfo, Box<dyn Error>> {
let version_opt = if _version.is_empty() {
None
} else {
Some(_version)
};
/* Obtain the package information, either directly in a series or with a search in all series */ /* Obtain the package information, either directly in a series or with a search in all series */
let package_info = if let Some(s) = series { let package_info = if let Some(s) = series {
if let Some(cb) = progress { if let Some(cb) = progress {
cb(&format!("Resolving package info for {}...", package), "", 0, 0); cb(
&format!("Resolving package info for {}...", package),
"",
0,
0,
);
} }
// Get the package information from that series and pocket // Get the package information from that series and pocket
@@ -266,7 +341,12 @@ pub async fn get(package: &str, _version: &str, series: Option<&str>, pocket: &s
); );
if let Some(cb) = progress { if let Some(cb) = progress {
cb(&format!("Searching for package {} in {}...", package, dist), "", 0, 0); cb(
&format!("Searching for package {} in {}...", package, dist),
"",
0,
0,
);
} }
// Try to find the package in all series from that dist // Try to find the package in all series from that dist
@@ -300,9 +380,28 @@ pub async fn get(package: &str, _version: &str, series: Option<&str>, pocket: &s
}; };
if let Some(cb) = progress { if let Some(cb) = progress {
cb(&format!("Cloning {}{}...", url, if let Some(b) = &branch_name { format!(" (branch {})", b) } else { String::new() }), "", 0, 0); cb(
&format!(
"Cloning {}{}...",
url,
if let Some(b) = &branch_name {
format!(" (branch {})", b)
} else {
String::new()
} }
clone_repo(url.as_str(), package, branch_name.as_deref(), Some(&package_dir), progress)?; ),
"",
0,
0,
);
}
clone_repo(
url.as_str(),
package,
branch_name.as_deref(),
Some(&package_dir),
progress,
)?;
if let Some(cb) = progress { if let Some(cb) = progress {
cb("Fetching orig tarball...", "", 0, 0); cb("Fetching orig tarball...", "", 0, 0);
} }
@@ -334,13 +433,21 @@ mod tests {
let cwd = temp_dir.path(); let cwd = temp_dir.path();
// Main 'get' command: the one we want to test // Main 'get' command: the one we want to test
let info = get(package, "", series, "", "", dist, Some(cwd), None).await.unwrap(); let info = get(package, "", series, "", "", dist, Some(cwd), None)
.await
.unwrap();
let package_dir = cwd.join(package); let package_dir = cwd.join(package);
assert!(package_dir.exists()); assert!(package_dir.exists());
let package_source_dir = package_dir.join(package); let package_source_dir = package_dir.join(package);
assert!(package_source_dir.exists(), "Package git repo directory not created"); assert!(
assert!(package_source_dir.join("debian").exists(), "debian directory not present"); package_source_dir.exists(),
"Package git repo directory not created"
);
assert!(
package_source_dir.join("debian").exists(),
"debian directory not present"
);
if package_source_dir.join(".git").exists() { if package_source_dir.join(".git").exists() {
// Verify we are on the correct branch // Verify we are on the correct branch

View File

@@ -2,7 +2,7 @@ use std::env;
use std::io::Write; use std::io::Write;
extern crate clap; extern crate clap;
use clap::{arg, command, Command}; use clap::{Command, arg, command};
extern crate flate2; extern crate flate2;
@@ -12,8 +12,8 @@ use get::get;
mod changelog; mod changelog;
use changelog::generate_entry; use changelog::generate_entry;
use log::{info, error};
use indicatif_log_bridge::LogWrapper; use indicatif_log_bridge::LogWrapper;
use log::{error, info};
mod ui; mod ui;
@@ -22,14 +22,10 @@ fn main() {
let logger = let logger =
env_logger::Builder::from_env(env_logger::Env::default().default_filter_or("info")) env_logger::Builder::from_env(env_logger::Env::default().default_filter_or("info"))
.format_timestamp(None) .format_timestamp(None)
.format(|buf, record| { .format(|buf, record| writeln!(buf, "{}", record.args()))
writeln!(buf, "{}", record.args())
})
.build(); .build();
let multi = indicatif::MultiProgress::new(); let multi = indicatif::MultiProgress::new();
LogWrapper::new(multi.clone(), logger) LogWrapper::new(multi.clone(), logger).try_init().unwrap();
.try_init()
.unwrap();
let matches = command!() let matches = command!()
.subcommand_required(true) .subcommand_required(true)
.disable_version_flag(true) .disable_version_flag(true)
@@ -37,30 +33,22 @@ fn main() {
Command::new("get") Command::new("get")
.about("Get a source package from the archive or git") .about("Get a source package from the archive or git")
.arg( .arg(
arg!(-s --series <series> "Target package distribution series") arg!(-s --series <series> "Target package distribution series").required(false),
.required(false)
) )
.arg( .arg(
arg!(-d --dist <dist> "Target package distribution (debian, ubuntu)") arg!(-d --dist <dist> "Target package distribution (debian, ubuntu)")
.required(false) .required(false),
) )
.arg( .arg(arg!(-v --version <version> "Target package version").required(false))
arg!(-v --version <version> "Target package version") .arg(arg!(--ppa <ppa> "Download the package from a specific PPA").required(false))
.required(false) .arg(arg!(<package> "Target package")),
)
.arg(
arg!(--ppa <ppa> "Download the package from a specific PPA")
.required(false)
)
.arg(arg!(<package> "Target package"))
) )
.subcommand( .subcommand(
Command::new("chlog") Command::new("chlog")
.about("Auto-generate changelog entry, editing it, committing it afterwards") .about("Auto-generate changelog entry, editing it, committing it afterwards")
.arg(arg!(-s --series <series> "Target distribution series").required(false)) .arg(arg!(-s --series <series> "Target distribution series").required(false))
.arg(arg!(--backport "This changelog is for a backport entry").required(false)) .arg(arg!(--backport "This changelog is for a backport entry").required(false))
.arg(arg!(-v --version <version> "Target version").required(false)) .arg(arg!(-v --version <version> "Target version").required(false)),
) )
.get_matches(); .get_matches();
@@ -69,13 +57,28 @@ fn main() {
let package = sub_matches.get_one::<String>("package").expect("required"); let package = sub_matches.get_one::<String>("package").expect("required");
let series = sub_matches.get_one::<String>("series").map(|s| s.as_str()); let series = sub_matches.get_one::<String>("series").map(|s| s.as_str());
let dist = sub_matches.get_one::<String>("dist").map(|s| s.as_str()); let dist = sub_matches.get_one::<String>("dist").map(|s| s.as_str());
let version = sub_matches.get_one::<String>("version").map(|s| s.as_str()).unwrap_or(""); let version = sub_matches
let ppa = sub_matches.get_one::<String>("ppa").map(|s| s.as_str()).unwrap_or(""); .get_one::<String>("version")
.map(|s| s.as_str())
.unwrap_or("");
let ppa = sub_matches
.get_one::<String>("ppa")
.map(|s| s.as_str())
.unwrap_or("");
// Since get is async, we need to block on it // Since get is async, we need to block on it
let (pb, mut progress_callback) = ui::create_progress_bar(&multi); let (pb, mut progress_callback) = ui::create_progress_bar(&multi);
if let Err(e) = rt.block_on(get(package, version, series, "", ppa, dist, None, Some(&mut progress_callback))) { if let Err(e) = rt.block_on(get(
package,
version,
series,
"",
ppa,
dist,
None,
Some(&mut progress_callback),
)) {
pb.finish_and_clear(); pb.finish_and_clear();
error!("{}", e); error!("{}", e);
std::process::exit(1); std::process::exit(1);
@@ -83,7 +86,7 @@ fn main() {
pb.finish_and_clear(); pb.finish_and_clear();
multi.remove(&pb); multi.remove(&pb);
info!("Done."); info!("Done.");
}, }
Some(("chlog", sub_matches)) => { Some(("chlog", sub_matches)) => {
let cwd = std::env::current_dir().unwrap(); let cwd = std::env::current_dir().unwrap();
let version = sub_matches.get_one::<String>("version").map(|s| s.as_str()); let version = sub_matches.get_one::<String>("version").map(|s| s.as_str());
@@ -98,7 +101,7 @@ fn main() {
.current_dir(&cwd) .current_dir(&cwd)
.args(&["debian/changelog"]) .args(&["debian/changelog"])
.status(); .status();
}, }
_ => unreachable!("Exhausted list of subcommands and subcommand_required prevents `None`"), _ => unreachable!("Exhausted list of subcommands and subcommand_required prevents `None`"),
} }
} }

View File

@@ -1,12 +1,12 @@
use chrono::NaiveDate;
use flate2::read::GzDecoder; use flate2::read::GzDecoder;
use std::io::Read;
use std::collections::HashMap; use std::collections::HashMap;
use std::error::Error; use std::error::Error;
use std::io::Read;
use std::path::Path; use std::path::Path;
use chrono::NaiveDate;
use log::{debug, warn};
use crate::ProgressCallback; use crate::ProgressCallback;
use log::{debug, warn};
const BASE_URL_UBUNTU: &str = "http://archive.ubuntu.com/ubuntu"; const BASE_URL_UBUNTU: &str = "http://archive.ubuntu.com/ubuntu";
const BASE_URL_DEBIAN: &str = "http://deb.debian.org/debian"; const BASE_URL_DEBIAN: &str = "http://deb.debian.org/debian";
@@ -31,8 +31,14 @@ fn parse_series_csv(content: &str) -> Result<Vec<String>, Box<dyn Error>> {
.from_reader(content.as_bytes()); .from_reader(content.as_bytes());
let headers = rdr.headers()?.clone(); let headers = rdr.headers()?.clone();
let series_idx = headers.iter().position(|h| h == "series").ok_or("Column 'series' not found")?; let series_idx = headers
let created_idx = headers.iter().position(|h| h == "created").ok_or("Column 'created' not found")?; .iter()
.position(|h| h == "series")
.ok_or("Column 'series' not found")?;
let created_idx = headers
.iter()
.position(|h| h == "created")
.ok_or("Column 'created' not found")?;
let mut entries = Vec::new(); let mut entries = Vec::new();
for result in rdr.records() { for result in rdr.records() {
@@ -54,7 +60,13 @@ async fn get_ordered_series(dist: &str) -> Result<Vec<String>, Box<dyn Error>> {
let content = if Path::new(format!("/usr/share/distro-info/{dist}.csv").as_str()).exists() { let content = if Path::new(format!("/usr/share/distro-info/{dist}.csv").as_str()).exists() {
std::fs::read_to_string(format!("/usr/share/distro-info/{dist}.csv"))? std::fs::read_to_string(format!("/usr/share/distro-info/{dist}.csv"))?
} else { } else {
reqwest::get(format!("https://salsa.debian.org/debian/distro-info-data/-/raw/main/{dist}.csv").as_str()).await?.text().await? reqwest::get(
format!("https://salsa.debian.org/debian/distro-info-data/-/raw/main/{dist}.csv")
.as_str(),
)
.await?
.text()
.await?
}; };
let mut series = parse_series_csv(&content)?; let mut series = parse_series_csv(&content)?;
@@ -85,7 +97,11 @@ pub async fn get_dist_series(dist: &str) -> Result<Vec<String>, Box<dyn Error>>
if Path::new(format!("/usr/share/distro-info/{dist}.csv").as_str()).exists() { if Path::new(format!("/usr/share/distro-info/{dist}.csv").as_str()).exists() {
get_series_from_file(format!("/usr/share/distro-info/{dist}.csv").as_str()) get_series_from_file(format!("/usr/share/distro-info/{dist}.csv").as_str())
} else { } else {
get_series_from_url(format!("https://salsa.debian.org/debian/distro-info-data/-/raw/main/{dist}.csv").as_str()).await get_series_from_url(
format!("https://salsa.debian.org/debian/distro-info-data/-/raw/main/{dist}.csv")
.as_str(),
)
.await
} }
} }
@@ -118,18 +134,21 @@ pub struct PackageStanza {
pub files: Vec<FileEntry>, pub files: Vec<FileEntry>,
} }
#[derive(Debug)] #[derive(Debug)]
pub struct PackageInfo { pub struct PackageInfo {
pub dist: String, pub dist: String,
pub series: String, pub series: String,
pub stanza: PackageStanza, pub stanza: PackageStanza,
pub preferred_vcs: Option<String>, pub preferred_vcs: Option<String>,
pub archive_url: String pub archive_url: String,
} }
fn get_sources_url(base_url: &str, series: &str, pocket: &str, component: &str) -> String { fn get_sources_url(base_url: &str, series: &str, pocket: &str, component: &str) -> String {
let pocket_full = if pocket.is_empty() { String::new() } else { format!("-{}", pocket) }; let pocket_full = if pocket.is_empty() {
String::new()
} else {
format!("-{}", pocket)
};
format!("{base_url}/dists/{series}{pocket_full}/{component}/source/Sources.gz") format!("{base_url}/dists/{series}{pocket_full}/{component}/source/Sources.gz")
} }
@@ -145,14 +164,22 @@ fn get_base_url(dist: &str) -> &str {
* Obtain the URL for the 'Release' file of a distribution series * Obtain the URL for the 'Release' file of a distribution series
*/ */
fn get_release_url(base_url: &str, series: &str, pocket: &str) -> String { fn get_release_url(base_url: &str, series: &str, pocket: &str) -> String {
let pocket_full = if pocket.is_empty() { String::new() } else { format!("-{}", pocket) }; let pocket_full = if pocket.is_empty() {
String::new()
} else {
format!("-{}", pocket)
};
format!("{base_url}/dists/{series}{pocket_full}/Release") format!("{base_url}/dists/{series}{pocket_full}/Release")
} }
/* /*
* Obtain the components of a distribution series by parsing the 'Release' file * Obtain the components of a distribution series by parsing the 'Release' file
*/ */
async fn get_components(base_url: &str, series: &str, pocket: &str) -> Result<Vec<String>, Box<dyn Error>> { async fn get_components(
base_url: &str,
series: &str,
pocket: &str,
) -> Result<Vec<String>, Box<dyn Error>> {
let url = get_release_url(base_url, series, pocket); let url = get_release_url(base_url, series, pocket);
debug!("Fetching Release file from: {}", url); debug!("Fetching Release file from: {}", url);
@@ -161,7 +188,10 @@ async fn get_components(base_url: &str, series: &str, pocket: &str) -> Result<Ve
for line in content.lines() { for line in content.lines() {
if line.starts_with("Components:") { if line.starts_with("Components:") {
if let Some((_, components)) = line.split_once(':') { if let Some((_, components)) = line.split_once(':') {
return Ok(components.split_whitespace().map(|s| s.to_string()).collect()); return Ok(components
.split_whitespace()
.map(|s| s.to_string())
.collect());
} }
} }
} }
@@ -173,7 +203,11 @@ async fn get_components(base_url: &str, series: &str, pocket: &str) -> Result<Ve
* Parse a 'Sources.gz' debian package file data, to look for a target package and * Parse a 'Sources.gz' debian package file data, to look for a target package and
* return the data for that package stanza * return the data for that package stanza
*/ */
fn parse_sources(data: &[u8], target_package: &str, target_version: Option<&str>) -> Result<Option<PackageStanza>, Box<dyn Error>> { fn parse_sources(
data: &[u8],
target_package: &str,
target_version: Option<&str>,
) -> Result<Option<PackageStanza>, Box<dyn Error>> {
let mut d = GzDecoder::new(data); let mut d = GzDecoder::new(data);
let mut s = String::new(); let mut s = String::new();
d.read_to_string(&mut s)?; d.read_to_string(&mut s)?;
@@ -183,7 +217,9 @@ fn parse_sources(data: &[u8], target_package: &str, target_version: Option<&str>
let mut current_key = String::new(); let mut current_key = String::new();
for line in stanza.lines() { for line in stanza.lines() {
if line.is_empty() { continue; } if line.is_empty() {
continue;
}
if line.starts_with(' ') || line.starts_with('\t') { if line.starts_with(' ') || line.starts_with('\t') {
// Continuation line // Continuation line
@@ -239,7 +275,12 @@ fn parse_sources(data: &[u8], target_package: &str, target_version: Option<&str>
Ok(None) Ok(None)
} }
pub async fn get(package_name: &str, series: &str, pocket: &str, version: Option<&str>) -> Result<PackageInfo, Box<dyn Error>> { pub async fn get(
package_name: &str,
series: &str,
pocket: &str,
version: Option<&str>,
) -> Result<PackageInfo, Box<dyn Error>> {
let dist = get_dist_from_series(series).await?; let dist = get_dist_from_series(series).await?;
// Handle Ubuntu case: Vcs-Git does not usually point to Launchpad but Salsa // Handle Ubuntu case: Vcs-Git does not usually point to Launchpad but Salsa
@@ -277,7 +318,10 @@ pub async fn get(package_name: &str, series: &str, pocket: &str, version: Option
let compressed_data = response.bytes().await?; let compressed_data = response.bytes().await?;
debug!("Downloaded Sources.gz for {}/{}/{}", dist, series, component); debug!(
"Downloaded Sources.gz for {}/{}/{}",
dist, series, component
);
if let Some(stanza) = parse_sources(&compressed_data, package_name, version)? { if let Some(stanza) = parse_sources(&compressed_data, package_name, version)? {
if let Some(vcs) = &stanza.vcs_git { if let Some(vcs) = &stanza.vcs_git {
@@ -297,10 +341,20 @@ pub async fn get(package_name: &str, series: &str, pocket: &str, version: Option
} }
} }
Err(format!("Package '{}' not found in {}/{}", package_name, dist, series).into()) Err(format!(
"Package '{}' not found in {}/{}",
package_name, dist, series
)
.into())
} }
pub async fn find_package(package_name: &str, dist: &str, pocket: &str, version: Option<&str>, progress: ProgressCallback<'_>) -> Result<PackageInfo, Box<dyn Error>> { pub async fn find_package(
package_name: &str,
dist: &str,
pocket: &str,
version: Option<&str>,
progress: ProgressCallback<'_>,
) -> Result<PackageInfo, Box<dyn Error>> {
let series_list = get_ordered_series(dist).await?; let series_list = get_ordered_series(dist).await?;
for (i, series) in series_list.iter().enumerate() { for (i, series) in series_list.iter().enumerate() {
@@ -311,12 +365,15 @@ pub async fn find_package(package_name: &str, dist: &str, pocket: &str, version:
match get(package_name, series, pocket, version).await { match get(package_name, series, pocket, version).await {
Ok(info) => { Ok(info) => {
if i > 0 { if i > 0 {
warn!("Package '{}' not found in development release. Found in {}/{}.", package_name, dist, series); warn!(
"Package '{}' not found in development release. Found in {}/{}.",
package_name, dist, series
);
} else { } else {
debug!("Found package '{}' in {}/{}", package_name, dist, series); debug!("Found package '{}' in {}/{}", package_name, dist, series);
} }
return Ok(info); return Ok(info);
}, }
Err(_e) => { Err(_e) => {
continue; continue;
} }
@@ -335,10 +392,15 @@ mod tests {
// "hello" should exist on Launchpad for Ubuntu // "hello" should exist on Launchpad for Ubuntu
let url = check_launchpad_repo("hello").await.unwrap(); let url = check_launchpad_repo("hello").await.unwrap();
assert!(url.is_some()); assert!(url.is_some());
assert_eq!(url.unwrap(), "https://git.launchpad.net/ubuntu/+source/hello"); assert_eq!(
url.unwrap(),
"https://git.launchpad.net/ubuntu/+source/hello"
);
// "this-package-should-not-exist-12345" should not exist // "this-package-should-not-exist-12345" should not exist
let url = check_launchpad_repo("this-package-should-not-exist-12345").await.unwrap(); let url = check_launchpad_repo("this-package-should-not-exist-12345")
.await
.unwrap();
assert!(url.is_none()); assert!(url.is_none());
} }
@@ -364,8 +426,8 @@ mod tests {
#[test] #[test]
fn test_parse_sources() { fn test_parse_sources() {
use flate2::write::GzEncoder;
use flate2::Compression; use flate2::Compression;
use flate2::write::GzEncoder;
use std::io::Write; use std::io::Write;
let data = "Package: hello\nVersion: 2.10-2\nDirectory: pool/main/h/hello\nVcs-Git: https://salsa.debian.org/debian/hello.git\n\nPackage: other\nVersion: 1.0\n"; let data = "Package: hello\nVersion: 2.10-2\nDirectory: pool/main/h/hello\nVcs-Git: https://salsa.debian.org/debian/hello.git\n\nPackage: other\nVersion: 1.0\n";
@@ -378,7 +440,10 @@ mod tests {
assert_eq!(info.package, "hello"); assert_eq!(info.package, "hello");
assert_eq!(info.version, "2.10-2"); assert_eq!(info.version, "2.10-2");
assert_eq!(info.directory, "pool/main/h/hello"); assert_eq!(info.directory, "pool/main/h/hello");
assert_eq!(info.vcs_git.unwrap(), "https://salsa.debian.org/debian/hello.git"); assert_eq!(
info.vcs_git.unwrap(),
"https://salsa.debian.org/debian/hello.git"
);
let none = parse_sources(&compressed, "missing", None).unwrap(); let none = parse_sources(&compressed, "missing", None).unwrap();
assert!(none.is_none()); assert!(none.is_none());
@@ -387,7 +452,9 @@ mod tests {
#[tokio::test] #[tokio::test]
async fn test_find_package_fallback() { async fn test_find_package_fallback() {
// python2.7 is in bullseye but not above // python2.7 is in bullseye but not above
let info = find_package("python2.7", "debian", "", None, None).await.unwrap(); let info = find_package("python2.7", "debian", "", None, None)
.await
.unwrap();
assert_eq!(info.stanza.package, "python2.7"); assert_eq!(info.stanza.package, "python2.7");
assert_eq!(info.series, "bullseye") assert_eq!(info.series, "bullseye")
} }
@@ -395,7 +462,9 @@ mod tests {
#[tokio::test] #[tokio::test]
async fn test_find_package_devel() { async fn test_find_package_devel() {
// hello is in sid // hello is in sid
let info = find_package("hello", "debian", "", None, None).await.unwrap(); let info = find_package("hello", "debian", "", None, None)
.await
.unwrap();
assert_eq!(info.stanza.package, "hello"); assert_eq!(info.stanza.package, "hello");
assert_eq!(info.series, "sid") assert_eq!(info.series, "sid")
} }

View File

@@ -1,12 +1,16 @@
use std::time::Duration;
use indicatif::{MultiProgress, ProgressBar, ProgressStyle}; use indicatif::{MultiProgress, ProgressBar, ProgressStyle};
use std::time::Duration;
pub fn create_progress_bar(multi: &MultiProgress) -> (ProgressBar, impl Fn(&str, &str, usize, usize) + '_) { pub fn create_progress_bar(
multi: &MultiProgress,
) -> (ProgressBar, impl Fn(&str, &str, usize, usize) + '_) {
let pb = multi.add(ProgressBar::new(0)); let pb = multi.add(ProgressBar::new(0));
pb.enable_steady_tick(Duration::from_millis(50)); pb.enable_steady_tick(Duration::from_millis(50));
pb.set_style(ProgressStyle::default_bar() pb.set_style(
ProgressStyle::default_bar()
.template("> {spinner:.blue} {prefix}") .template("> {spinner:.blue} {prefix}")
.unwrap()); .unwrap(),
);
let pb_clone = pb.clone(); let pb_clone = pb.clone();
let callback = move |prefix: &str, msg: &str, progress: usize, total: usize| { let callback = move |prefix: &str, msg: &str, progress: usize, total: usize| {
@@ -17,9 +21,11 @@ pub fn create_progress_bar(multi: &MultiProgress) -> (ProgressBar, impl Fn(&str,
.unwrap() .unwrap()
.progress_chars("=> ")); .progress_chars("=> "));
} else { } else {
pb.set_style(ProgressStyle::default_bar() pb.set_style(
ProgressStyle::default_bar()
.template("> {spinner:.blue} {prefix}") .template("> {spinner:.blue} {prefix}")
.unwrap()); .unwrap(),
);
} }
if !prefix.is_empty() { if !prefix.is_empty() {