This commit is contained in:
230
src/changelog.rs
230
src/changelog.rs
@@ -1,14 +1,18 @@
|
|||||||
|
use chrono::Local;
|
||||||
|
use git2::{Oid, Repository, Sort};
|
||||||
|
use regex::Regex;
|
||||||
use std::fs::File;
|
use std::fs::File;
|
||||||
use std::io::{self, BufRead, Read, Write};
|
use std::io::{self, BufRead, Read, Write};
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
use regex::Regex;
|
|
||||||
use chrono::Local;
|
|
||||||
use git2::{Repository, Sort, Oid};
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Automatically generate a changelog entry from a commit history and previous changelog
|
* Automatically generate a changelog entry from a commit history and previous changelog
|
||||||
*/
|
*/
|
||||||
pub fn generate_entry(changelog_file: &str, cwd: Option<&Path>, user_version: Option<&str>) -> Result<(), Box<dyn std::error::Error>> {
|
pub fn generate_entry(
|
||||||
|
changelog_file: &str,
|
||||||
|
cwd: Option<&Path>,
|
||||||
|
user_version: Option<&str>,
|
||||||
|
) -> Result<(), Box<dyn std::error::Error>> {
|
||||||
let changelog_path = if let Some(path) = cwd {
|
let changelog_path = if let Some(path) = cwd {
|
||||||
path.join(changelog_file)
|
path.join(changelog_file)
|
||||||
} else {
|
} else {
|
||||||
@@ -28,19 +32,7 @@ pub fn generate_entry(changelog_file: &str, cwd: Option<&Path>, user_version: Op
|
|||||||
let repo = Repository::open(&repo_path)?;
|
let repo = Repository::open(&repo_path)?;
|
||||||
|
|
||||||
// 3. Find commits since the tag corresponding to the version
|
// 3. Find commits since the tag corresponding to the version
|
||||||
// We assume the tag format is "debian/<version>" or just "<version>" or "v<version>"
|
|
||||||
// But usually for Debian packages it might be "debian/<version>"
|
|
||||||
// Let's try to find a tag that matches.
|
|
||||||
|
|
||||||
// For now, let's assume the tag is simply the version string, or debian/version
|
|
||||||
// If we can't find a tag, we might have to error out or take all commits?
|
|
||||||
// Let's try to find the tag.
|
|
||||||
|
|
||||||
// Actually, usually we want to generate an entry for a NEW version based on changes since the OLD version.
|
|
||||||
// The `changelog_file` passed here is the EXISTING changelog.
|
|
||||||
// So `version` is the PREVIOUS version.
|
|
||||||
// We want to find commits since `version`.
|
|
||||||
|
|
||||||
let commits = get_commits_since_version(&repo, &old_version)?;
|
let commits = get_commits_since_version(&repo, &old_version)?;
|
||||||
|
|
||||||
if commits.is_empty() {
|
if commits.is_empty() {
|
||||||
@@ -48,20 +40,6 @@ pub fn generate_entry(changelog_file: &str, cwd: Option<&Path>, user_version: Op
|
|||||||
// return Ok(());
|
// return Ok(());
|
||||||
}
|
}
|
||||||
|
|
||||||
// 4. Format the new entry
|
|
||||||
// We don't know the NEW version yet, so we might use "UNRELEASED" or increment the version.
|
|
||||||
// For now, let's use "UNRELEASED" and let the user edit it, or maybe we can try to increment it.
|
|
||||||
// The requirement says "Automatically generate a changelog entry".
|
|
||||||
// Usually tools like `dch` add a new entry.
|
|
||||||
|
|
||||||
// Let's create a new entry with "UNRELEASED" distribution and incremented version?
|
|
||||||
// Or just append to the top.
|
|
||||||
|
|
||||||
// Let's assume we want to output the new entry to stdout or prepend to file?
|
|
||||||
// The function signature returns (), so maybe it modifies the file.
|
|
||||||
|
|
||||||
// Let's prepend to the file.
|
|
||||||
|
|
||||||
// Compute new version if needed, or use user-supplied one
|
// Compute new version if needed, or use user-supplied one
|
||||||
let new_version = if let Some(version) = user_version {
|
let new_version = if let Some(version) = user_version {
|
||||||
version.to_string()
|
version.to_string()
|
||||||
@@ -69,12 +47,19 @@ pub fn generate_entry(changelog_file: &str, cwd: Option<&Path>, user_version: Op
|
|||||||
// TODO: Pass these flags from CLI
|
// TODO: Pass these flags from CLI
|
||||||
compute_new_version(&old_version, false, false, false)
|
compute_new_version(&old_version, false, false, false)
|
||||||
};
|
};
|
||||||
|
|
||||||
let (maintainer_name, maintainer_email) = get_maintainer_info()?;
|
let (maintainer_name, maintainer_email) = get_maintainer_info()?;
|
||||||
let new_entry = format_entry(&package, &new_version, &series, &commits, &maintainer_name, &maintainer_email);
|
let new_entry = format_entry(
|
||||||
|
&package,
|
||||||
|
&new_version,
|
||||||
|
&series,
|
||||||
|
&commits,
|
||||||
|
&maintainer_name,
|
||||||
|
&maintainer_email,
|
||||||
|
);
|
||||||
|
|
||||||
prepend_to_file(&changelog_path, &new_entry)?;
|
prepend_to_file(&changelog_path, &new_entry)?;
|
||||||
|
|
||||||
println!("Added new changelog entry to {}", changelog_path.display());
|
println!("Added new changelog entry to {}", changelog_path.display());
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
@@ -84,7 +69,12 @@ pub fn generate_entry(changelog_file: &str, cwd: Option<&Path>, user_version: Op
|
|||||||
* Compute the next (most probable) version number of a package, from old version and
|
* Compute the next (most probable) version number of a package, from old version and
|
||||||
* conditions on changes (is ubuntu upload, is a no change rebuild, is a non-maintainer upload)
|
* conditions on changes (is ubuntu upload, is a no change rebuild, is a non-maintainer upload)
|
||||||
*/
|
*/
|
||||||
fn compute_new_version(old_version: &str, is_ubuntu: bool, is_rebuild: bool, is_nmu: bool) -> String {
|
fn compute_new_version(
|
||||||
|
old_version: &str,
|
||||||
|
is_ubuntu: bool,
|
||||||
|
is_rebuild: bool,
|
||||||
|
is_nmu: bool,
|
||||||
|
) -> String {
|
||||||
if is_ubuntu {
|
if is_ubuntu {
|
||||||
return increment_suffix(old_version, "ubuntu");
|
return increment_suffix(old_version, "ubuntu");
|
||||||
}
|
}
|
||||||
@@ -107,15 +97,15 @@ fn compute_new_version(old_version: &str, is_ubuntu: bool, is_rebuild: bool, is_
|
|||||||
fn increment_suffix(version: &str, suffix: &str) -> String {
|
fn increment_suffix(version: &str, suffix: &str) -> String {
|
||||||
// If suffix is empty, we just look for trailing digits
|
// If suffix is empty, we just look for trailing digits
|
||||||
// If suffix is not empty, we look for suffix followed by digits
|
// If suffix is not empty, we look for suffix followed by digits
|
||||||
|
|
||||||
let pattern = if suffix.is_empty() {
|
let pattern = if suffix.is_empty() {
|
||||||
r"(\d+)$".to_string()
|
r"(\d+)$".to_string()
|
||||||
} else {
|
} else {
|
||||||
format!(r"{}(\d+)$", regex::escape(suffix))
|
format!(r"{}(\d+)$", regex::escape(suffix))
|
||||||
};
|
};
|
||||||
|
|
||||||
let re = Regex::new(&pattern).unwrap();
|
let re = Regex::new(&pattern).unwrap();
|
||||||
|
|
||||||
if let Some(caps) = re.captures(version) {
|
if let Some(caps) = re.captures(version) {
|
||||||
let num_str = caps.get(1).unwrap().as_str();
|
let num_str = caps.get(1).unwrap().as_str();
|
||||||
let num: u32 = num_str.parse().unwrap();
|
let num: u32 = num_str.parse().unwrap();
|
||||||
@@ -124,7 +114,7 @@ fn increment_suffix(version: &str, suffix: &str) -> String {
|
|||||||
new_ver.replace_range(range, &(num + 1).to_string());
|
new_ver.replace_range(range, &(num + 1).to_string());
|
||||||
return new_ver;
|
return new_ver;
|
||||||
}
|
}
|
||||||
|
|
||||||
// If pattern not found, append suffix + "1"
|
// If pattern not found, append suffix + "1"
|
||||||
// But if suffix is empty, we default to appending "-1" (standard Debian revision start)
|
// But if suffix is empty, we default to appending "-1" (standard Debian revision start)
|
||||||
if suffix.is_empty() {
|
if suffix.is_empty() {
|
||||||
@@ -137,7 +127,9 @@ fn increment_suffix(version: &str, suffix: &str) -> String {
|
|||||||
/*
|
/*
|
||||||
* Parse a changelog file first entry header, to obtain (package, version, series)
|
* Parse a changelog file first entry header, to obtain (package, version, series)
|
||||||
*/
|
*/
|
||||||
fn parse_changelog_header(path: &Path) -> Result<(String, String, String), Box<dyn std::error::Error>> {
|
fn parse_changelog_header(
|
||||||
|
path: &Path,
|
||||||
|
) -> Result<(String, String, String), Box<dyn std::error::Error>> {
|
||||||
let file = File::open(path)?;
|
let file = File::open(path)?;
|
||||||
let mut reader = io::BufReader::new(file);
|
let mut reader = io::BufReader::new(file);
|
||||||
let mut first_line = String::new();
|
let mut first_line = String::new();
|
||||||
@@ -158,7 +150,10 @@ fn parse_changelog_header(path: &Path) -> Result<(String, String, String), Box<d
|
|||||||
/*
|
/*
|
||||||
* Obtain all commit messages as a list since a tagged version in a git repository
|
* Obtain all commit messages as a list since a tagged version in a git repository
|
||||||
*/
|
*/
|
||||||
fn get_commits_since_version(repo: &Repository, version: &str) -> Result<Vec<String>, Box<dyn std::error::Error>> {
|
fn get_commits_since_version(
|
||||||
|
repo: &Repository,
|
||||||
|
version: &str,
|
||||||
|
) -> Result<Vec<String>, Box<dyn std::error::Error>> {
|
||||||
let mut revwalk = repo.revwalk()?;
|
let mut revwalk = repo.revwalk()?;
|
||||||
revwalk.set_sorting(Sort::TIME)?;
|
revwalk.set_sorting(Sort::TIME)?;
|
||||||
|
|
||||||
@@ -205,7 +200,7 @@ fn get_commits_since_version(repo: &Repository, version: &str) -> Result<Vec<Str
|
|||||||
let message = commit.message().unwrap_or("").trim();
|
let message = commit.message().unwrap_or("").trim();
|
||||||
let summary = message.lines().next().unwrap_or("").to_string();
|
let summary = message.lines().next().unwrap_or("").to_string();
|
||||||
if !summary.is_empty() {
|
if !summary.is_empty() {
|
||||||
commits.push(summary);
|
commits.push(summary);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -216,12 +211,22 @@ fn get_commits_since_version(repo: &Repository, version: &str) -> Result<Vec<Str
|
|||||||
* Create a changelog entry from information, i.e. format that information
|
* Create a changelog entry from information, i.e. format that information
|
||||||
* into a changelog entry
|
* into a changelog entry
|
||||||
*/
|
*/
|
||||||
fn format_entry(package: &str, version: &str, series: &str, changes: &[String], maintainer_name: &str, maintainer_email: &str) -> String {
|
fn format_entry(
|
||||||
|
package: &str,
|
||||||
|
version: &str,
|
||||||
|
series: &str,
|
||||||
|
changes: &[String],
|
||||||
|
maintainer_name: &str,
|
||||||
|
maintainer_email: &str,
|
||||||
|
) -> String {
|
||||||
let mut entry = String::new();
|
let mut entry = String::new();
|
||||||
|
|
||||||
// Header: package, version and distribution series
|
// Header: package, version and distribution series
|
||||||
entry.push_str(&format!("{} ({}) {}; urgency=medium\n\n", package, version, series));
|
entry.push_str(&format!(
|
||||||
|
"{} ({}) {}; urgency=medium\n\n",
|
||||||
|
package, version, series
|
||||||
|
));
|
||||||
|
|
||||||
// Changes
|
// Changes
|
||||||
for change in changes {
|
for change in changes {
|
||||||
entry.push_str(&format!(" * {}\n", change));
|
entry.push_str(&format!(" * {}\n", change));
|
||||||
@@ -229,11 +234,14 @@ fn format_entry(package: &str, version: &str, series: &str, changes: &[String],
|
|||||||
if changes.is_empty() {
|
if changes.is_empty() {
|
||||||
entry.push_str(" * \n");
|
entry.push_str(" * \n");
|
||||||
}
|
}
|
||||||
|
|
||||||
// Footer: date, maintainer
|
// Footer: date, maintainer
|
||||||
let date = Local::now().format("%a, %d %b %Y %H:%M:%S %z").to_string();
|
let date = Local::now().format("%a, %d %b %Y %H:%M:%S %z").to_string();
|
||||||
entry.push_str(&format!("\n -- {} <{}> {}\n\n", maintainer_name, maintainer_email, date));
|
entry.push_str(&format!(
|
||||||
|
"\n -- {} <{}> {}\n\n",
|
||||||
|
maintainer_name, maintainer_email, date
|
||||||
|
));
|
||||||
|
|
||||||
return entry;
|
return entry;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -244,11 +252,11 @@ fn prepend_to_file(path: &Path, content: &str) -> Result<(), Box<dyn std::error:
|
|||||||
let mut file = File::open(path)?;
|
let mut file = File::open(path)?;
|
||||||
let mut existing_content = String::new();
|
let mut existing_content = String::new();
|
||||||
file.read_to_string(&mut existing_content)?;
|
file.read_to_string(&mut existing_content)?;
|
||||||
|
|
||||||
let mut file = File::create(path)?;
|
let mut file = File::create(path)?;
|
||||||
file.write_all(content.as_bytes())?;
|
file.write_all(content.as_bytes())?;
|
||||||
file.write_all(existing_content.as_bytes())?;
|
file.write_all(existing_content.as_bytes())?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -272,17 +280,45 @@ mod tests {
|
|||||||
use tempfile::TempDir;
|
use tempfile::TempDir;
|
||||||
|
|
||||||
fn setup_repo(dir: &Path) {
|
fn setup_repo(dir: &Path) {
|
||||||
Command::new("git").arg("init").current_dir(dir).output().unwrap();
|
Command::new("git")
|
||||||
Command::new("git").arg("config").arg("user.email").arg("you@example.com").current_dir(dir).output().unwrap();
|
.arg("init")
|
||||||
Command::new("git").arg("config").arg("user.name").arg("Your Name").current_dir(dir).output().unwrap();
|
.current_dir(dir)
|
||||||
|
.output()
|
||||||
|
.unwrap();
|
||||||
|
Command::new("git")
|
||||||
|
.arg("config")
|
||||||
|
.arg("user.email")
|
||||||
|
.arg("you@example.com")
|
||||||
|
.current_dir(dir)
|
||||||
|
.output()
|
||||||
|
.unwrap();
|
||||||
|
Command::new("git")
|
||||||
|
.arg("config")
|
||||||
|
.arg("user.name")
|
||||||
|
.arg("Your Name")
|
||||||
|
.current_dir(dir)
|
||||||
|
.output()
|
||||||
|
.unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
fn commit(dir: &Path, message: &str) {
|
fn commit(dir: &Path, message: &str) {
|
||||||
Command::new("git").arg("commit").arg("--allow-empty").arg("-m").arg(message).current_dir(dir).output().unwrap();
|
Command::new("git")
|
||||||
|
.arg("commit")
|
||||||
|
.arg("--allow-empty")
|
||||||
|
.arg("-m")
|
||||||
|
.arg(message)
|
||||||
|
.current_dir(dir)
|
||||||
|
.output()
|
||||||
|
.unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
fn tag(dir: &Path, name: &str) {
|
fn tag(dir: &Path, name: &str) {
|
||||||
Command::new("git").arg("tag").arg(name).current_dir(dir).output().unwrap();
|
Command::new("git")
|
||||||
|
.arg("tag")
|
||||||
|
.arg(name)
|
||||||
|
.current_dir(dir)
|
||||||
|
.output()
|
||||||
|
.unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@@ -298,7 +334,12 @@ mod tests {
|
|||||||
std::fs::write(&changelog_path, initial_content).unwrap();
|
std::fs::write(&changelog_path, initial_content).unwrap();
|
||||||
|
|
||||||
// Commit and tag
|
// Commit and tag
|
||||||
Command::new("git").arg("add").arg(".").current_dir(repo_dir).output().unwrap();
|
Command::new("git")
|
||||||
|
.arg("add")
|
||||||
|
.arg(".")
|
||||||
|
.current_dir(repo_dir)
|
||||||
|
.output()
|
||||||
|
.unwrap();
|
||||||
commit(repo_dir, "Initial commit");
|
commit(repo_dir, "Initial commit");
|
||||||
tag(repo_dir, "debian/0.1.0-1");
|
tag(repo_dir, "debian/0.1.0-1");
|
||||||
|
|
||||||
@@ -320,7 +361,7 @@ mod tests {
|
|||||||
// Verify content
|
// Verify content
|
||||||
let content = std::fs::read_to_string(&changelog_path).unwrap();
|
let content = std::fs::read_to_string(&changelog_path).unwrap();
|
||||||
println!("{}", content);
|
println!("{}", content);
|
||||||
|
|
||||||
assert!(content.contains("mypackage (0.1.0-2) unstable; urgency=medium"));
|
assert!(content.contains("mypackage (0.1.0-2) unstable; urgency=medium"));
|
||||||
assert!(content.contains("* Fix bug A"));
|
assert!(content.contains("* Fix bug A"));
|
||||||
assert!(content.contains("* Add feature B"));
|
assert!(content.contains("* Add feature B"));
|
||||||
@@ -332,36 +373,69 @@ mod tests {
|
|||||||
#[test]
|
#[test]
|
||||||
fn test_compute_new_version() {
|
fn test_compute_new_version() {
|
||||||
// Debian upload
|
// Debian upload
|
||||||
assert_eq!(compute_new_version("15.2.0-8", false, false, false), "15.2.0-9");
|
assert_eq!(
|
||||||
assert_eq!(compute_new_version("15.2.0-9", false, false, false), "15.2.0-10");
|
compute_new_version("15.2.0-8", false, false, false),
|
||||||
|
"15.2.0-9"
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
compute_new_version("15.2.0-9", false, false, false),
|
||||||
|
"15.2.0-10"
|
||||||
|
);
|
||||||
|
|
||||||
// Ubuntu upload
|
// Ubuntu upload
|
||||||
assert_eq!(compute_new_version("15.2.0-9", true, false, false), "15.2.0-9ubuntu1");
|
assert_eq!(
|
||||||
assert_eq!(compute_new_version("15.2.0-9ubuntu1", true, false, false), "15.2.0-9ubuntu2");
|
compute_new_version("15.2.0-9", true, false, false),
|
||||||
|
"15.2.0-9ubuntu1"
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
compute_new_version("15.2.0-9ubuntu1", true, false, false),
|
||||||
|
"15.2.0-9ubuntu2"
|
||||||
|
);
|
||||||
|
|
||||||
// No change rebuild
|
// No change rebuild
|
||||||
assert_eq!(compute_new_version("15.2.0-9", false, true, false), "15.2.0-9build1");
|
assert_eq!(
|
||||||
assert_eq!(compute_new_version("15.2.0-9build1", false, true, false), "15.2.0-9build2");
|
compute_new_version("15.2.0-9", false, true, false),
|
||||||
|
"15.2.0-9build1"
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
compute_new_version("15.2.0-9build1", false, true, false),
|
||||||
|
"15.2.0-9build2"
|
||||||
|
);
|
||||||
|
|
||||||
// Rebuild of Ubuntu version
|
// Rebuild of Ubuntu version
|
||||||
assert_eq!(compute_new_version("15.2.0-9ubuntu1", false, true, false), "15.2.0-9ubuntu1build1");
|
assert_eq!(
|
||||||
|
compute_new_version("15.2.0-9ubuntu1", false, true, false),
|
||||||
|
"15.2.0-9ubuntu1build1"
|
||||||
|
);
|
||||||
|
|
||||||
// NMU
|
// NMU
|
||||||
// Native
|
// Native
|
||||||
assert_eq!(compute_new_version("1.0", false, false, true), "1.0+nmu1");
|
assert_eq!(compute_new_version("1.0", false, false, true), "1.0+nmu1");
|
||||||
assert_eq!(compute_new_version("1.0+nmu1", false, false, true), "1.0+nmu2");
|
assert_eq!(
|
||||||
|
compute_new_version("1.0+nmu1", false, false, true),
|
||||||
|
"1.0+nmu2"
|
||||||
|
);
|
||||||
|
|
||||||
// Non-native
|
// Non-native
|
||||||
assert_eq!(compute_new_version("1.0-1", false, false, true), "1.0-1.1");
|
assert_eq!(compute_new_version("1.0-1", false, false, true), "1.0-1.1");
|
||||||
assert_eq!(compute_new_version("1.0-1.1", false, false, true), "1.0-1.2");
|
assert_eq!(
|
||||||
|
compute_new_version("1.0-1.1", false, false, true),
|
||||||
|
"1.0-1.2"
|
||||||
|
);
|
||||||
|
|
||||||
// NMU of NMU?
|
// NMU of NMU?
|
||||||
assert_eq!(compute_new_version("1.0-1.2", false, false, true), "1.0-1.3");
|
assert_eq!(
|
||||||
|
compute_new_version("1.0-1.2", false, false, true),
|
||||||
|
"1.0-1.3"
|
||||||
|
);
|
||||||
|
|
||||||
// Native package uploads
|
// Native package uploads
|
||||||
assert_eq!(compute_new_version("1.0", false, false, false), "1.1");
|
assert_eq!(compute_new_version("1.0", false, false, false), "1.1");
|
||||||
assert_eq!(compute_new_version("1.0.5", false, false, false), "1.0.6");
|
assert_eq!(compute_new_version("1.0.5", false, false, false), "1.0.6");
|
||||||
assert_eq!(compute_new_version("20241126", false, false, false), "20241127");
|
assert_eq!(
|
||||||
|
compute_new_version("20241126", false, false, false),
|
||||||
|
"20241127"
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@@ -375,7 +449,7 @@ mod tests {
|
|||||||
let (name, email) = get_maintainer_info().unwrap();
|
let (name, email) = get_maintainer_info().unwrap();
|
||||||
assert_eq!(name, "Env Name");
|
assert_eq!(name, "Env Name");
|
||||||
assert_eq!(email, "env@example.com");
|
assert_eq!(email, "env@example.com");
|
||||||
|
|
||||||
unsafe {
|
unsafe {
|
||||||
std::env::remove_var("DEBFULLNAME");
|
std::env::remove_var("DEBFULLNAME");
|
||||||
std::env::remove_var("DEBEMAIL");
|
std::env::remove_var("DEBEMAIL");
|
||||||
|
|||||||
237
src/get.rs
237
src/get.rs
@@ -1,29 +1,40 @@
|
|||||||
|
use std::cmp::min;
|
||||||
use std::error::Error;
|
use std::error::Error;
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
use std::cmp::min;
|
|
||||||
|
|
||||||
use pkh::package_info;
|
use pkh::package_info;
|
||||||
use pkh::package_info::PackageInfo;
|
use pkh::package_info::PackageInfo;
|
||||||
|
|
||||||
use std::process::Command;
|
use std::process::Command;
|
||||||
|
|
||||||
use log::{debug};
|
use log::debug;
|
||||||
|
|
||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
|
|
||||||
use pkh::ProgressCallback;
|
use pkh::ProgressCallback;
|
||||||
|
|
||||||
fn clone_repo(url: &str, package: &str, branch: Option<&str>, cwd: Option<&Path>, progress: ProgressCallback<'_>) -> Result<(), Box<dyn Error>> {
|
fn clone_repo(
|
||||||
|
url: &str,
|
||||||
|
package: &str,
|
||||||
|
branch: Option<&str>,
|
||||||
|
cwd: Option<&Path>,
|
||||||
|
progress: ProgressCallback<'_>,
|
||||||
|
) -> Result<(), Box<dyn Error>> {
|
||||||
let target_path = if let Some(path) = cwd {
|
let target_path = if let Some(path) = cwd {
|
||||||
path.join(package)
|
path.join(package)
|
||||||
} else {
|
} else {
|
||||||
Path::new(package).to_path_buf()
|
Path::new(package).to_path_buf()
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut callbacks = git2::RemoteCallbacks::new();
|
let mut callbacks = git2::RemoteCallbacks::new();
|
||||||
if let Some(ref progress_cb) = progress {
|
if let Some(ref progress_cb) = progress {
|
||||||
callbacks.transfer_progress(move |stats| {
|
callbacks.transfer_progress(move |stats| {
|
||||||
(progress_cb)("", "Receiving objects...", stats.received_objects(), stats.total_objects());
|
(progress_cb)(
|
||||||
|
"",
|
||||||
|
"Receiving objects...",
|
||||||
|
stats.received_objects(),
|
||||||
|
stats.total_objects(),
|
||||||
|
);
|
||||||
true
|
true
|
||||||
});
|
});
|
||||||
callbacks.sideband_progress(move |data| {
|
callbacks.sideband_progress(move |data| {
|
||||||
@@ -31,8 +42,18 @@ fn clone_repo(url: &str, package: &str, branch: Option<&str>, cwd: Option<&Path>
|
|||||||
let re = Regex::new(r"(.*):[ ]*([0-9]*)% \(([0-9]*)/([0-9]*)\)").unwrap();
|
let re = Regex::new(r"(.*):[ ]*([0-9]*)% \(([0-9]*)/([0-9]*)\)").unwrap();
|
||||||
if let Some(caps) = re.captures(msg.trim()) {
|
if let Some(caps) = re.captures(msg.trim()) {
|
||||||
let msg = caps.get(1).map_or("", |m| m.as_str()).to_string();
|
let msg = caps.get(1).map_or("", |m| m.as_str()).to_string();
|
||||||
let objects = caps.get(3).map_or("", |m| m.as_str()).to_string().parse::<usize>().unwrap_or(0);
|
let objects = caps
|
||||||
let total = caps.get(4).map_or("", |m| m.as_str()).to_string().parse::<usize>().unwrap_or(0);
|
.get(3)
|
||||||
|
.map_or("", |m| m.as_str())
|
||||||
|
.to_string()
|
||||||
|
.parse::<usize>()
|
||||||
|
.unwrap_or(0);
|
||||||
|
let total = caps
|
||||||
|
.get(4)
|
||||||
|
.map_or("", |m| m.as_str())
|
||||||
|
.to_string()
|
||||||
|
.parse::<usize>()
|
||||||
|
.unwrap_or(0);
|
||||||
|
|
||||||
(progress_cb)("", msg.as_str(), objects, total);
|
(progress_cb)("", msg.as_str(), objects, total);
|
||||||
}
|
}
|
||||||
@@ -46,29 +67,25 @@ fn clone_repo(url: &str, package: &str, branch: Option<&str>, cwd: Option<&Path>
|
|||||||
|
|
||||||
let mut builder = git2::build::RepoBuilder::new();
|
let mut builder = git2::build::RepoBuilder::new();
|
||||||
builder.fetch_options(fetch_options);
|
builder.fetch_options(fetch_options);
|
||||||
|
|
||||||
if let Some(b) = branch {
|
if let Some(b) = branch {
|
||||||
builder.branch(b);
|
builder.branch(b);
|
||||||
}
|
}
|
||||||
|
|
||||||
return match builder.clone(url, &target_path) {
|
return match builder.clone(url, &target_path) {
|
||||||
Ok(_repo) => {
|
Ok(_repo) => Ok(()),
|
||||||
Ok(())
|
Err(e) => Err(format!("Failed to clone: {}", e).into()),
|
||||||
}
|
|
||||||
Err(e) => {
|
|
||||||
Err(format!("Failed to clone: {}", e).into())
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
use sha2::{Sha256, Digest};
|
use sha2::{Digest, Sha256};
|
||||||
use std::fs::File;
|
use std::fs::File;
|
||||||
use std::io::Write;
|
use std::io::Write;
|
||||||
|
|
||||||
use futures_util::StreamExt;
|
|
||||||
use flate2::read::GzDecoder;
|
use flate2::read::GzDecoder;
|
||||||
use xz2::read::XzDecoder;
|
use futures_util::StreamExt;
|
||||||
use tar::Archive;
|
use tar::Archive;
|
||||||
|
use xz2::read::XzDecoder;
|
||||||
|
|
||||||
fn extract_archive(path: &Path, dest: &Path) -> Result<(), Box<dyn Error>> {
|
fn extract_archive(path: &Path, dest: &Path) -> Result<(), Box<dyn Error>> {
|
||||||
let file = File::open(path)?;
|
let file = File::open(path)?;
|
||||||
@@ -85,7 +102,7 @@ fn extract_archive(path: &Path, dest: &Path) -> Result<(), Box<dyn Error>> {
|
|||||||
} else {
|
} else {
|
||||||
return Err(format!("Unsupported archive format: {}", filename).into());
|
return Err(format!("Unsupported archive format: {}", filename).into());
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -97,26 +114,37 @@ fn checkout_pristine_tar(package_dir: &Path, filename: &str) -> Result<(), Box<d
|
|||||||
.expect("pristine-tar checkout failed");
|
.expect("pristine-tar checkout failed");
|
||||||
|
|
||||||
if !output.status.success() {
|
if !output.status.success() {
|
||||||
return Err(format!("pristine-tar checkout failed with status: {}", output.status).into());
|
return Err(format!(
|
||||||
|
"pristine-tar checkout failed with status: {}",
|
||||||
|
output.status
|
||||||
|
)
|
||||||
|
.into());
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn download_file_checksum(url: &str, checksum: &str, target_dir: &Path, progress: ProgressCallback<'_>) -> Result<(), Box<dyn Error>> {
|
async fn download_file_checksum(
|
||||||
|
url: &str,
|
||||||
|
checksum: &str,
|
||||||
|
target_dir: &Path,
|
||||||
|
progress: ProgressCallback<'_>,
|
||||||
|
) -> Result<(), Box<dyn Error>> {
|
||||||
// Download with reqwest
|
// Download with reqwest
|
||||||
let response = reqwest::get(url).await?;
|
let response = reqwest::get(url).await?;
|
||||||
if !response.status().is_success() {
|
if !response.status().is_success() {
|
||||||
return Err(format!("Failed to download '{}' : {}", &url, response.status()).into());
|
return Err(format!("Failed to download '{}' : {}", &url, response.status()).into());
|
||||||
}
|
}
|
||||||
|
|
||||||
let total_size = response.content_length().ok_or(format!("Failed to get content length from '{}'", &url))?;
|
let total_size = response
|
||||||
|
.content_length()
|
||||||
|
.ok_or(format!("Failed to get content length from '{}'", &url))?;
|
||||||
let mut index = 0;
|
let mut index = 0;
|
||||||
|
|
||||||
// Target file: extract file name from URL
|
// Target file: extract file name from URL
|
||||||
let filename = Path::new(url).file_name().unwrap().to_str().unwrap();
|
let filename = Path::new(url).file_name().unwrap().to_str().unwrap();
|
||||||
let path = target_dir.join(filename);
|
let path = target_dir.join(filename);
|
||||||
let mut file = File::create(path)?;
|
let mut file = File::create(path)?;
|
||||||
|
|
||||||
// Download chunk by chunk to disk, while updating hasher for checksum
|
// Download chunk by chunk to disk, while updating hasher for checksum
|
||||||
let mut stream = response.bytes_stream();
|
let mut stream = response.bytes_stream();
|
||||||
let mut hasher = Sha256::new();
|
let mut hasher = Sha256::new();
|
||||||
@@ -135,7 +163,11 @@ async fn download_file_checksum(url: &str, checksum: &str, target_dir: &Path, pr
|
|||||||
let result = hasher.finalize();
|
let result = hasher.finalize();
|
||||||
let calculated_checksum = hex::encode(result);
|
let calculated_checksum = hex::encode(result);
|
||||||
if calculated_checksum != checksum {
|
if calculated_checksum != checksum {
|
||||||
return Err(format!("Checksum mismatch! Expected {}, got {}", checksum, calculated_checksum).into());
|
return Err(format!(
|
||||||
|
"Checksum mismatch! Expected {}, got {}",
|
||||||
|
checksum, calculated_checksum
|
||||||
|
)
|
||||||
|
.into());
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
@@ -143,9 +175,12 @@ async fn download_file_checksum(url: &str, checksum: &str, target_dir: &Path, pr
|
|||||||
|
|
||||||
fn setup_pristine_tar_branch(package_dir: &Path, dist: &str) -> Result<(), Box<dyn Error>> {
|
fn setup_pristine_tar_branch(package_dir: &Path, dist: &str) -> Result<(), Box<dyn Error>> {
|
||||||
let repo = git2::Repository::open(package_dir)?;
|
let repo = git2::Repository::open(package_dir)?;
|
||||||
|
|
||||||
// Check if local branch already exists
|
// Check if local branch already exists
|
||||||
if repo.find_branch("pristine-tar", git2::BranchType::Local).is_ok() {
|
if repo
|
||||||
|
.find_branch("pristine-tar", git2::BranchType::Local)
|
||||||
|
.is_ok()
|
||||||
|
{
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -156,26 +191,30 @@ fn setup_pristine_tar_branch(package_dir: &Path, dist: &str) -> Result<(), Box<d
|
|||||||
if let Some(name) = branch.name()? {
|
if let Some(name) = branch.name()? {
|
||||||
if name.ends_with(&format!("/{dist}/pristine-tar")) {
|
if name.ends_with(&format!("/{dist}/pristine-tar")) {
|
||||||
debug!("Found remote pristine-tar branch: {}", name);
|
debug!("Found remote pristine-tar branch: {}", name);
|
||||||
|
|
||||||
let commit = branch.get().peel_to_commit()?;
|
let commit = branch.get().peel_to_commit()?;
|
||||||
|
|
||||||
// Create local branch
|
// Create local branch
|
||||||
let mut local_branch = repo.branch("pristine-tar", &commit, false)?;
|
let mut local_branch = repo.branch("pristine-tar", &commit, false)?;
|
||||||
|
|
||||||
// Set upstream
|
// Set upstream
|
||||||
local_branch.set_upstream(Some(name))?;
|
local_branch.set_upstream(Some(name))?;
|
||||||
|
|
||||||
debug!("Created local pristine-tar branch tracking {}", name);
|
debug!("Created local pristine-tar branch tracking {}", name);
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
debug!("No remote pristine-tar branch found.");
|
debug!("No remote pristine-tar branch found.");
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn fetch_orig_tarball(info: &PackageInfo, cwd: Option<&Path>, progress: ProgressCallback<'_>) -> Result<(), Box<dyn Error>> {
|
async fn fetch_orig_tarball(
|
||||||
|
info: &PackageInfo,
|
||||||
|
cwd: Option<&Path>,
|
||||||
|
progress: ProgressCallback<'_>,
|
||||||
|
) -> Result<(), Box<dyn Error>> {
|
||||||
let package_dir = if let Some(path) = cwd {
|
let package_dir = if let Some(path) = cwd {
|
||||||
path.join(&info.stanza.package)
|
path.join(&info.stanza.package)
|
||||||
} else {
|
} else {
|
||||||
@@ -184,36 +223,52 @@ async fn fetch_orig_tarball(info: &PackageInfo, cwd: Option<&Path>, progress: Pr
|
|||||||
|
|
||||||
// Find the orig tarball in the file list
|
// Find the orig tarball in the file list
|
||||||
// Usually ends with .orig.tar.gz or .orig.tar.xz
|
// Usually ends with .orig.tar.gz or .orig.tar.xz
|
||||||
let orig_file = info.stanza.files.iter().find(|f| {
|
let orig_file = info
|
||||||
f.name.contains(".orig.tar.")
|
.stanza
|
||||||
}).unwrap();
|
.files
|
||||||
|
.iter()
|
||||||
|
.find(|f| f.name.contains(".orig.tar."))
|
||||||
|
.unwrap();
|
||||||
let filename = &orig_file.name;
|
let filename = &orig_file.name;
|
||||||
|
|
||||||
// 1. Try executing pristine-tar
|
// 1. Try executing pristine-tar
|
||||||
|
|
||||||
// Setup pristine-tar branch if needed (by tracking remote branch)
|
// Setup pristine-tar branch if needed (by tracking remote branch)
|
||||||
let _ = setup_pristine_tar_branch(&package_dir, info.dist.as_str());
|
let _ = setup_pristine_tar_branch(&package_dir, info.dist.as_str());
|
||||||
|
|
||||||
if let Err(e) = checkout_pristine_tar(&package_dir, filename.as_str()) {
|
if let Err(e) = checkout_pristine_tar(&package_dir, filename.as_str()) {
|
||||||
debug!("pristine-tar failed: {}. Falling back to archive download.", e);
|
debug!(
|
||||||
|
"pristine-tar failed: {}. Falling back to archive download.",
|
||||||
|
e
|
||||||
|
);
|
||||||
|
|
||||||
// 2. Fallback to archive download
|
// 2. Fallback to archive download
|
||||||
// We download to the parent directory of the package repo (which is standard for build tools)
|
// We download to the parent directory of the package repo (which is standard for build tools)
|
||||||
// or the current directory if cwd is None (which effectively is the parent of the package dir)
|
// or the current directory if cwd is None (which effectively is the parent of the package dir)
|
||||||
let target_dir = cwd.unwrap_or_else(|| Path::new("."));
|
let target_dir = cwd.unwrap_or_else(|| Path::new("."));
|
||||||
download_file_checksum(format!("{}/{}", &info.archive_url, filename).as_str(), &orig_file.sha256, target_dir, progress).await?;
|
download_file_checksum(
|
||||||
|
format!("{}/{}", &info.archive_url, filename).as_str(),
|
||||||
|
&orig_file.sha256,
|
||||||
|
target_dir,
|
||||||
|
progress,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn fetch_archive_sources(info: &PackageInfo, cwd: Option<&Path>, progress: ProgressCallback<'_>) -> Result<(), Box<dyn Error>> {
|
async fn fetch_archive_sources(
|
||||||
|
info: &PackageInfo,
|
||||||
|
cwd: Option<&Path>,
|
||||||
|
progress: ProgressCallback<'_>,
|
||||||
|
) -> Result<(), Box<dyn Error>> {
|
||||||
let package_dir = if let Some(path) = cwd {
|
let package_dir = if let Some(path) = cwd {
|
||||||
path.join(&info.stanza.package)
|
path.join(&info.stanza.package)
|
||||||
} else {
|
} else {
|
||||||
Path::new(&info.stanza.package).to_path_buf()
|
Path::new(&info.stanza.package).to_path_buf()
|
||||||
};
|
};
|
||||||
|
|
||||||
std::fs::create_dir_all(&package_dir)?;
|
std::fs::create_dir_all(&package_dir)?;
|
||||||
|
|
||||||
for file in &info.stanza.files {
|
for file in &info.stanza.files {
|
||||||
@@ -222,18 +277,20 @@ async fn fetch_archive_sources(info: &PackageInfo, cwd: Option<&Path>, progress:
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Extract the debian tarball or diff
|
// Extract the debian tarball or diff
|
||||||
let debian_file = info.stanza.files.iter().find(|f| {
|
let debian_file = info
|
||||||
f.name.contains(".debian.tar.") || f.name.contains(".diff.gz")
|
.stanza
|
||||||
});
|
.files
|
||||||
|
.iter()
|
||||||
|
.find(|f| f.name.contains(".debian.tar.") || f.name.contains(".diff.gz"));
|
||||||
|
|
||||||
if let Some(file) = debian_file {
|
if let Some(file) = debian_file {
|
||||||
let path = package_dir.join(&file.name);
|
let path = package_dir.join(&file.name);
|
||||||
let extract_dir = package_dir.join(&info.stanza.package);
|
let extract_dir = package_dir.join(&info.stanza.package);
|
||||||
|
|
||||||
if file.name.ends_with(".tar.xz") || file.name.ends_with(".tar.gz") {
|
if file.name.ends_with(".tar.xz") || file.name.ends_with(".tar.gz") {
|
||||||
if let Err(e) = extract_archive(&path, &extract_dir) {
|
if let Err(e) = extract_archive(&path, &extract_dir) {
|
||||||
return Err(format!("Failed to extract {}: {}", file.name, e).into());
|
return Err(format!("Failed to extract {}: {}", file.name, e).into());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Remove archive after extraction
|
// Remove archive after extraction
|
||||||
@@ -243,13 +300,31 @@ async fn fetch_archive_sources(info: &PackageInfo, cwd: Option<&Path>, progress:
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn get(package: &str, _version: &str, series: Option<&str>, pocket: &str, _ppa: &str, dist: Option<&str>, cwd: Option<&Path>, progress: ProgressCallback<'_>) -> Result<PackageInfo, Box<dyn Error>> {
|
pub async fn get(
|
||||||
let version_opt = if _version.is_empty() { None } else { Some(_version) };
|
package: &str,
|
||||||
|
_version: &str,
|
||||||
|
series: Option<&str>,
|
||||||
|
pocket: &str,
|
||||||
|
_ppa: &str,
|
||||||
|
dist: Option<&str>,
|
||||||
|
cwd: Option<&Path>,
|
||||||
|
progress: ProgressCallback<'_>,
|
||||||
|
) -> Result<PackageInfo, Box<dyn Error>> {
|
||||||
|
let version_opt = if _version.is_empty() {
|
||||||
|
None
|
||||||
|
} else {
|
||||||
|
Some(_version)
|
||||||
|
};
|
||||||
|
|
||||||
/* Obtain the package information, either directly in a series or with a search in all series */
|
/* Obtain the package information, either directly in a series or with a search in all series */
|
||||||
let package_info = if let Some(s) = series {
|
let package_info = if let Some(s) = series {
|
||||||
if let Some(cb) = progress {
|
if let Some(cb) = progress {
|
||||||
cb(&format!("Resolving package info for {}...", package), "", 0, 0);
|
cb(
|
||||||
|
&format!("Resolving package info for {}...", package),
|
||||||
|
"",
|
||||||
|
0,
|
||||||
|
0,
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get the package information from that series and pocket
|
// Get the package information from that series and pocket
|
||||||
@@ -264,15 +339,20 @@ pub async fn get(package: &str, _version: &str, series: Option<&str>, pocket: &s
|
|||||||
"debian"
|
"debian"
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
|
||||||
if let Some(cb) = progress {
|
if let Some(cb) = progress {
|
||||||
cb(&format!("Searching for package {} in {}...", package, dist), "", 0, 0);
|
cb(
|
||||||
|
&format!("Searching for package {} in {}...", package, dist),
|
||||||
|
"",
|
||||||
|
0,
|
||||||
|
0,
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Try to find the package in all series from that dist
|
// Try to find the package in all series from that dist
|
||||||
package_info::find_package(package, dist, pocket, version_opt, progress).await?
|
package_info::find_package(package, dist, pocket, version_opt, progress).await?
|
||||||
};
|
};
|
||||||
|
|
||||||
let package_dir = if let Some(path) = cwd {
|
let package_dir = if let Some(path) = cwd {
|
||||||
path.join(package)
|
path.join(package)
|
||||||
} else {
|
} else {
|
||||||
@@ -298,11 +378,30 @@ pub async fn get(package: &str, _version: &str, series: Option<&str>, pocket: &s
|
|||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
};
|
};
|
||||||
|
|
||||||
if let Some(cb) = progress {
|
if let Some(cb) = progress {
|
||||||
cb(&format!("Cloning {}{}...", url, if let Some(b) = &branch_name { format!(" (branch {})", b) } else { String::new() }), "", 0, 0);
|
cb(
|
||||||
|
&format!(
|
||||||
|
"Cloning {}{}...",
|
||||||
|
url,
|
||||||
|
if let Some(b) = &branch_name {
|
||||||
|
format!(" (branch {})", b)
|
||||||
|
} else {
|
||||||
|
String::new()
|
||||||
|
}
|
||||||
|
),
|
||||||
|
"",
|
||||||
|
0,
|
||||||
|
0,
|
||||||
|
);
|
||||||
}
|
}
|
||||||
clone_repo(url.as_str(), package, branch_name.as_deref(), Some(&package_dir), progress)?;
|
clone_repo(
|
||||||
|
url.as_str(),
|
||||||
|
package,
|
||||||
|
branch_name.as_deref(),
|
||||||
|
Some(&package_dir),
|
||||||
|
progress,
|
||||||
|
)?;
|
||||||
if let Some(cb) = progress {
|
if let Some(cb) = progress {
|
||||||
cb("Fetching orig tarball...", "", 0, 0);
|
cb("Fetching orig tarball...", "", 0, 0);
|
||||||
}
|
}
|
||||||
@@ -328,19 +427,27 @@ mod tests {
|
|||||||
// For determinism, we require for tests that either a distro or series is specified,
|
// For determinism, we require for tests that either a distro or series is specified,
|
||||||
// as no distribution would mean fallback to system distro
|
// as no distribution would mean fallback to system distro
|
||||||
assert!(dist != None || series != None);
|
assert!(dist != None || series != None);
|
||||||
|
|
||||||
// Use a temp directory as working directory
|
// Use a temp directory as working directory
|
||||||
let temp_dir = tempfile::tempdir().unwrap();
|
let temp_dir = tempfile::tempdir().unwrap();
|
||||||
let cwd = temp_dir.path();
|
let cwd = temp_dir.path();
|
||||||
|
|
||||||
// Main 'get' command: the one we want to test
|
// Main 'get' command: the one we want to test
|
||||||
let info = get(package, "", series, "", "", dist, Some(cwd), None).await.unwrap();
|
let info = get(package, "", series, "", "", dist, Some(cwd), None)
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
let package_dir = cwd.join(package);
|
let package_dir = cwd.join(package);
|
||||||
assert!(package_dir.exists());
|
assert!(package_dir.exists());
|
||||||
let package_source_dir = package_dir.join(package);
|
let package_source_dir = package_dir.join(package);
|
||||||
assert!(package_source_dir.exists(), "Package git repo directory not created");
|
assert!(
|
||||||
assert!(package_source_dir.join("debian").exists(), "debian directory not present");
|
package_source_dir.exists(),
|
||||||
|
"Package git repo directory not created"
|
||||||
|
);
|
||||||
|
assert!(
|
||||||
|
package_source_dir.join("debian").exists(),
|
||||||
|
"debian directory not present"
|
||||||
|
);
|
||||||
|
|
||||||
if package_source_dir.join(".git").exists() {
|
if package_source_dir.join(".git").exists() {
|
||||||
// Verify we are on the correct branch
|
// Verify we are on the correct branch
|
||||||
@@ -364,7 +471,7 @@ mod tests {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check for orig tarball in package dir
|
// Check for orig tarball in package dir
|
||||||
let mut found_tarball = false;
|
let mut found_tarball = false;
|
||||||
for entry in std::fs::read_dir(package_dir).unwrap() {
|
for entry in std::fs::read_dir(package_dir).unwrap() {
|
||||||
|
|||||||
65
src/main.rs
65
src/main.rs
@@ -2,7 +2,7 @@ use std::env;
|
|||||||
use std::io::Write;
|
use std::io::Write;
|
||||||
|
|
||||||
extern crate clap;
|
extern crate clap;
|
||||||
use clap::{arg, command, Command};
|
use clap::{Command, arg, command};
|
||||||
|
|
||||||
extern crate flate2;
|
extern crate flate2;
|
||||||
|
|
||||||
@@ -12,8 +12,8 @@ use get::get;
|
|||||||
mod changelog;
|
mod changelog;
|
||||||
use changelog::generate_entry;
|
use changelog::generate_entry;
|
||||||
|
|
||||||
use log::{info, error};
|
|
||||||
use indicatif_log_bridge::LogWrapper;
|
use indicatif_log_bridge::LogWrapper;
|
||||||
|
use log::{error, info};
|
||||||
|
|
||||||
mod ui;
|
mod ui;
|
||||||
|
|
||||||
@@ -22,60 +22,63 @@ fn main() {
|
|||||||
let logger =
|
let logger =
|
||||||
env_logger::Builder::from_env(env_logger::Env::default().default_filter_or("info"))
|
env_logger::Builder::from_env(env_logger::Env::default().default_filter_or("info"))
|
||||||
.format_timestamp(None)
|
.format_timestamp(None)
|
||||||
.format(|buf, record| {
|
.format(|buf, record| writeln!(buf, "{}", record.args()))
|
||||||
writeln!(buf, "{}", record.args())
|
|
||||||
})
|
|
||||||
.build();
|
.build();
|
||||||
let multi = indicatif::MultiProgress::new();
|
let multi = indicatif::MultiProgress::new();
|
||||||
LogWrapper::new(multi.clone(), logger)
|
LogWrapper::new(multi.clone(), logger).try_init().unwrap();
|
||||||
.try_init()
|
let matches = command!()
|
||||||
.unwrap();
|
|
||||||
let matches = command!()
|
|
||||||
.subcommand_required(true)
|
.subcommand_required(true)
|
||||||
.disable_version_flag(true)
|
.disable_version_flag(true)
|
||||||
.subcommand(
|
.subcommand(
|
||||||
Command::new("get")
|
Command::new("get")
|
||||||
.about("Get a source package from the archive or git")
|
.about("Get a source package from the archive or git")
|
||||||
.arg(
|
.arg(
|
||||||
arg!(-s --series <series> "Target package distribution series")
|
arg!(-s --series <series> "Target package distribution series").required(false),
|
||||||
.required(false)
|
|
||||||
)
|
)
|
||||||
.arg(
|
.arg(
|
||||||
arg!(-d --dist <dist> "Target package distribution (debian, ubuntu)")
|
arg!(-d --dist <dist> "Target package distribution (debian, ubuntu)")
|
||||||
.required(false)
|
.required(false),
|
||||||
)
|
)
|
||||||
.arg(
|
.arg(arg!(-v --version <version> "Target package version").required(false))
|
||||||
arg!(-v --version <version> "Target package version")
|
.arg(arg!(--ppa <ppa> "Download the package from a specific PPA").required(false))
|
||||||
.required(false)
|
.arg(arg!(<package> "Target package")),
|
||||||
)
|
|
||||||
.arg(
|
|
||||||
arg!(--ppa <ppa> "Download the package from a specific PPA")
|
|
||||||
.required(false)
|
|
||||||
)
|
|
||||||
.arg(arg!(<package> "Target package"))
|
|
||||||
)
|
)
|
||||||
.subcommand(
|
.subcommand(
|
||||||
Command::new("chlog")
|
Command::new("chlog")
|
||||||
.about("Auto-generate changelog entry, editing it, committing it afterwards")
|
.about("Auto-generate changelog entry, editing it, committing it afterwards")
|
||||||
.arg(arg!(-s --series <series> "Target distribution series").required(false))
|
.arg(arg!(-s --series <series> "Target distribution series").required(false))
|
||||||
.arg(arg!(--backport "This changelog is for a backport entry").required(false))
|
.arg(arg!(--backport "This changelog is for a backport entry").required(false))
|
||||||
.arg(arg!(-v --version <version> "Target version").required(false))
|
.arg(arg!(-v --version <version> "Target version").required(false)),
|
||||||
|
|
||||||
)
|
)
|
||||||
.get_matches();
|
.get_matches();
|
||||||
|
|
||||||
match matches.subcommand() {
|
match matches.subcommand() {
|
||||||
Some(("get", sub_matches)) => {
|
Some(("get", sub_matches)) => {
|
||||||
let package = sub_matches.get_one::<String>("package").expect("required");
|
let package = sub_matches.get_one::<String>("package").expect("required");
|
||||||
let series = sub_matches.get_one::<String>("series").map(|s| s.as_str());
|
let series = sub_matches.get_one::<String>("series").map(|s| s.as_str());
|
||||||
let dist = sub_matches.get_one::<String>("dist").map(|s| s.as_str());
|
let dist = sub_matches.get_one::<String>("dist").map(|s| s.as_str());
|
||||||
let version = sub_matches.get_one::<String>("version").map(|s| s.as_str()).unwrap_or("");
|
let version = sub_matches
|
||||||
let ppa = sub_matches.get_one::<String>("ppa").map(|s| s.as_str()).unwrap_or("");
|
.get_one::<String>("version")
|
||||||
|
.map(|s| s.as_str())
|
||||||
|
.unwrap_or("");
|
||||||
|
let ppa = sub_matches
|
||||||
|
.get_one::<String>("ppa")
|
||||||
|
.map(|s| s.as_str())
|
||||||
|
.unwrap_or("");
|
||||||
|
|
||||||
// Since get is async, we need to block on it
|
// Since get is async, we need to block on it
|
||||||
let (pb, mut progress_callback) = ui::create_progress_bar(&multi);
|
let (pb, mut progress_callback) = ui::create_progress_bar(&multi);
|
||||||
|
|
||||||
if let Err(e) = rt.block_on(get(package, version, series, "", ppa, dist, None, Some(&mut progress_callback))) {
|
if let Err(e) = rt.block_on(get(
|
||||||
|
package,
|
||||||
|
version,
|
||||||
|
series,
|
||||||
|
"",
|
||||||
|
ppa,
|
||||||
|
dist,
|
||||||
|
None,
|
||||||
|
Some(&mut progress_callback),
|
||||||
|
)) {
|
||||||
pb.finish_and_clear();
|
pb.finish_and_clear();
|
||||||
error!("{}", e);
|
error!("{}", e);
|
||||||
std::process::exit(1);
|
std::process::exit(1);
|
||||||
@@ -83,7 +86,7 @@ fn main() {
|
|||||||
pb.finish_and_clear();
|
pb.finish_and_clear();
|
||||||
multi.remove(&pb);
|
multi.remove(&pb);
|
||||||
info!("Done.");
|
info!("Done.");
|
||||||
},
|
}
|
||||||
Some(("chlog", sub_matches)) => {
|
Some(("chlog", sub_matches)) => {
|
||||||
let cwd = std::env::current_dir().unwrap();
|
let cwd = std::env::current_dir().unwrap();
|
||||||
let version = sub_matches.get_one::<String>("version").map(|s| s.as_str());
|
let version = sub_matches.get_one::<String>("version").map(|s| s.as_str());
|
||||||
@@ -98,7 +101,7 @@ fn main() {
|
|||||||
.current_dir(&cwd)
|
.current_dir(&cwd)
|
||||||
.args(&["debian/changelog"])
|
.args(&["debian/changelog"])
|
||||||
.status();
|
.status();
|
||||||
},
|
}
|
||||||
_ => unreachable!("Exhausted list of subcommands and subcommand_required prevents `None`"),
|
_ => unreachable!("Exhausted list of subcommands and subcommand_required prevents `None`"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,12 +1,12 @@
|
|||||||
|
use chrono::NaiveDate;
|
||||||
use flate2::read::GzDecoder;
|
use flate2::read::GzDecoder;
|
||||||
use std::io::Read;
|
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::error::Error;
|
use std::error::Error;
|
||||||
|
use std::io::Read;
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
use chrono::NaiveDate;
|
|
||||||
|
|
||||||
use log::{debug, warn};
|
|
||||||
use crate::ProgressCallback;
|
use crate::ProgressCallback;
|
||||||
|
use log::{debug, warn};
|
||||||
|
|
||||||
const BASE_URL_UBUNTU: &str = "http://archive.ubuntu.com/ubuntu";
|
const BASE_URL_UBUNTU: &str = "http://archive.ubuntu.com/ubuntu";
|
||||||
const BASE_URL_DEBIAN: &str = "http://deb.debian.org/debian";
|
const BASE_URL_DEBIAN: &str = "http://deb.debian.org/debian";
|
||||||
@@ -17,7 +17,7 @@ async fn check_launchpad_repo(package: &str) -> Result<Option<String>, Box<dyn E
|
|||||||
.redirect(reqwest::redirect::Policy::none())
|
.redirect(reqwest::redirect::Policy::none())
|
||||||
.build()?;
|
.build()?;
|
||||||
let response = client.head(&url).send().await?;
|
let response = client.head(&url).send().await?;
|
||||||
|
|
||||||
if response.status().is_success() {
|
if response.status().is_success() {
|
||||||
Ok(Some(url))
|
Ok(Some(url))
|
||||||
} else {
|
} else {
|
||||||
@@ -29,11 +29,17 @@ fn parse_series_csv(content: &str) -> Result<Vec<String>, Box<dyn Error>> {
|
|||||||
let mut rdr = csv::ReaderBuilder::new()
|
let mut rdr = csv::ReaderBuilder::new()
|
||||||
.flexible(true)
|
.flexible(true)
|
||||||
.from_reader(content.as_bytes());
|
.from_reader(content.as_bytes());
|
||||||
|
|
||||||
let headers = rdr.headers()?.clone();
|
let headers = rdr.headers()?.clone();
|
||||||
let series_idx = headers.iter().position(|h| h == "series").ok_or("Column 'series' not found")?;
|
let series_idx = headers
|
||||||
let created_idx = headers.iter().position(|h| h == "created").ok_or("Column 'created' not found")?;
|
.iter()
|
||||||
|
.position(|h| h == "series")
|
||||||
|
.ok_or("Column 'series' not found")?;
|
||||||
|
let created_idx = headers
|
||||||
|
.iter()
|
||||||
|
.position(|h| h == "created")
|
||||||
|
.ok_or("Column 'created' not found")?;
|
||||||
|
|
||||||
let mut entries = Vec::new();
|
let mut entries = Vec::new();
|
||||||
for result in rdr.records() {
|
for result in rdr.records() {
|
||||||
let record = result?;
|
let record = result?;
|
||||||
@@ -43,10 +49,10 @@ fn parse_series_csv(content: &str) -> Result<Vec<String>, Box<dyn Error>> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Sort by date descending (newest first)
|
// Sort by date descending (newest first)
|
||||||
entries.sort_by(|a, b| b.1.cmp(&a.1));
|
entries.sort_by(|a, b| b.1.cmp(&a.1));
|
||||||
|
|
||||||
Ok(entries.into_iter().map(|(s, _)| s).collect())
|
Ok(entries.into_iter().map(|(s, _)| s).collect())
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -54,11 +60,17 @@ async fn get_ordered_series(dist: &str) -> Result<Vec<String>, Box<dyn Error>> {
|
|||||||
let content = if Path::new(format!("/usr/share/distro-info/{dist}.csv").as_str()).exists() {
|
let content = if Path::new(format!("/usr/share/distro-info/{dist}.csv").as_str()).exists() {
|
||||||
std::fs::read_to_string(format!("/usr/share/distro-info/{dist}.csv"))?
|
std::fs::read_to_string(format!("/usr/share/distro-info/{dist}.csv"))?
|
||||||
} else {
|
} else {
|
||||||
reqwest::get(format!("https://salsa.debian.org/debian/distro-info-data/-/raw/main/{dist}.csv").as_str()).await?.text().await?
|
reqwest::get(
|
||||||
|
format!("https://salsa.debian.org/debian/distro-info-data/-/raw/main/{dist}.csv")
|
||||||
|
.as_str(),
|
||||||
|
)
|
||||||
|
.await?
|
||||||
|
.text()
|
||||||
|
.await?
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut series = parse_series_csv(&content)?;
|
let mut series = parse_series_csv(&content)?;
|
||||||
|
|
||||||
// For Debian, ensure 'sid' is first if it's not (it usually doesn't have a date or is very old/new depending on file)
|
// For Debian, ensure 'sid' is first if it's not (it usually doesn't have a date or is very old/new depending on file)
|
||||||
// Actually in the file sid has 1993 date.
|
// Actually in the file sid has 1993 date.
|
||||||
// But we want to try 'sid' (unstable) first for Debian.
|
// But we want to try 'sid' (unstable) first for Debian.
|
||||||
@@ -66,7 +78,7 @@ async fn get_ordered_series(dist: &str) -> Result<Vec<String>, Box<dyn Error>> {
|
|||||||
series.retain(|s| s != "sid");
|
series.retain(|s| s != "sid");
|
||||||
series.insert(0, "sid".to_string());
|
series.insert(0, "sid".to_string());
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(series)
|
Ok(series)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -85,7 +97,11 @@ pub async fn get_dist_series(dist: &str) -> Result<Vec<String>, Box<dyn Error>>
|
|||||||
if Path::new(format!("/usr/share/distro-info/{dist}.csv").as_str()).exists() {
|
if Path::new(format!("/usr/share/distro-info/{dist}.csv").as_str()).exists() {
|
||||||
get_series_from_file(format!("/usr/share/distro-info/{dist}.csv").as_str())
|
get_series_from_file(format!("/usr/share/distro-info/{dist}.csv").as_str())
|
||||||
} else {
|
} else {
|
||||||
get_series_from_url(format!("https://salsa.debian.org/debian/distro-info-data/-/raw/main/{dist}.csv").as_str()).await
|
get_series_from_url(
|
||||||
|
format!("https://salsa.debian.org/debian/distro-info-data/-/raw/main/{dist}.csv")
|
||||||
|
.as_str(),
|
||||||
|
)
|
||||||
|
.await
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -118,18 +134,21 @@ pub struct PackageStanza {
|
|||||||
pub files: Vec<FileEntry>,
|
pub files: Vec<FileEntry>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct PackageInfo {
|
pub struct PackageInfo {
|
||||||
pub dist: String,
|
pub dist: String,
|
||||||
pub series: String,
|
pub series: String,
|
||||||
pub stanza: PackageStanza,
|
pub stanza: PackageStanza,
|
||||||
pub preferred_vcs: Option<String>,
|
pub preferred_vcs: Option<String>,
|
||||||
pub archive_url: String
|
pub archive_url: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_sources_url(base_url: &str, series: &str, pocket: &str, component: &str) -> String {
|
fn get_sources_url(base_url: &str, series: &str, pocket: &str, component: &str) -> String {
|
||||||
let pocket_full = if pocket.is_empty() { String::new() } else { format!("-{}", pocket) };
|
let pocket_full = if pocket.is_empty() {
|
||||||
|
String::new()
|
||||||
|
} else {
|
||||||
|
format!("-{}", pocket)
|
||||||
|
};
|
||||||
format!("{base_url}/dists/{series}{pocket_full}/{component}/source/Sources.gz")
|
format!("{base_url}/dists/{series}{pocket_full}/{component}/source/Sources.gz")
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -145,27 +164,38 @@ fn get_base_url(dist: &str) -> &str {
|
|||||||
* Obtain the URL for the 'Release' file of a distribution series
|
* Obtain the URL for the 'Release' file of a distribution series
|
||||||
*/
|
*/
|
||||||
fn get_release_url(base_url: &str, series: &str, pocket: &str) -> String {
|
fn get_release_url(base_url: &str, series: &str, pocket: &str) -> String {
|
||||||
let pocket_full = if pocket.is_empty() { String::new() } else { format!("-{}", pocket) };
|
let pocket_full = if pocket.is_empty() {
|
||||||
|
String::new()
|
||||||
|
} else {
|
||||||
|
format!("-{}", pocket)
|
||||||
|
};
|
||||||
format!("{base_url}/dists/{series}{pocket_full}/Release")
|
format!("{base_url}/dists/{series}{pocket_full}/Release")
|
||||||
}
|
}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Obtain the components of a distribution series by parsing the 'Release' file
|
* Obtain the components of a distribution series by parsing the 'Release' file
|
||||||
*/
|
*/
|
||||||
async fn get_components(base_url: &str, series: &str, pocket: &str) -> Result<Vec<String>, Box<dyn Error>> {
|
async fn get_components(
|
||||||
|
base_url: &str,
|
||||||
|
series: &str,
|
||||||
|
pocket: &str,
|
||||||
|
) -> Result<Vec<String>, Box<dyn Error>> {
|
||||||
let url = get_release_url(base_url, series, pocket);
|
let url = get_release_url(base_url, series, pocket);
|
||||||
debug!("Fetching Release file from: {}", url);
|
debug!("Fetching Release file from: {}", url);
|
||||||
|
|
||||||
let content = reqwest::get(&url).await?.text().await?;
|
let content = reqwest::get(&url).await?.text().await?;
|
||||||
|
|
||||||
for line in content.lines() {
|
for line in content.lines() {
|
||||||
if line.starts_with("Components:") {
|
if line.starts_with("Components:") {
|
||||||
if let Some((_, components)) = line.split_once(':') {
|
if let Some((_, components)) = line.split_once(':') {
|
||||||
return Ok(components.split_whitespace().map(|s| s.to_string()).collect());
|
return Ok(components
|
||||||
|
.split_whitespace()
|
||||||
|
.map(|s| s.to_string())
|
||||||
|
.collect());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Err("Components not found.".into())
|
Err("Components not found.".into())
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -173,7 +203,11 @@ async fn get_components(base_url: &str, series: &str, pocket: &str) -> Result<Ve
|
|||||||
* Parse a 'Sources.gz' debian package file data, to look for a target package and
|
* Parse a 'Sources.gz' debian package file data, to look for a target package and
|
||||||
* return the data for that package stanza
|
* return the data for that package stanza
|
||||||
*/
|
*/
|
||||||
fn parse_sources(data: &[u8], target_package: &str, target_version: Option<&str>) -> Result<Option<PackageStanza>, Box<dyn Error>> {
|
fn parse_sources(
|
||||||
|
data: &[u8],
|
||||||
|
target_package: &str,
|
||||||
|
target_version: Option<&str>,
|
||||||
|
) -> Result<Option<PackageStanza>, Box<dyn Error>> {
|
||||||
let mut d = GzDecoder::new(data);
|
let mut d = GzDecoder::new(data);
|
||||||
let mut s = String::new();
|
let mut s = String::new();
|
||||||
d.read_to_string(&mut s)?;
|
d.read_to_string(&mut s)?;
|
||||||
@@ -181,10 +215,12 @@ fn parse_sources(data: &[u8], target_package: &str, target_version: Option<&str>
|
|||||||
for stanza in s.split("\n\n") {
|
for stanza in s.split("\n\n") {
|
||||||
let mut fields: HashMap<String, String> = HashMap::new();
|
let mut fields: HashMap<String, String> = HashMap::new();
|
||||||
let mut current_key = String::new();
|
let mut current_key = String::new();
|
||||||
|
|
||||||
for line in stanza.lines() {
|
for line in stanza.lines() {
|
||||||
if line.is_empty() { continue; }
|
if line.is_empty() {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
if line.starts_with(' ') || line.starts_with('\t') {
|
if line.starts_with(' ') || line.starts_with('\t') {
|
||||||
// Continuation line
|
// Continuation line
|
||||||
if let Some(val) = fields.get_mut(¤t_key) {
|
if let Some(val) = fields.get_mut(¤t_key) {
|
||||||
@@ -235,11 +271,16 @@ fn parse_sources(data: &[u8], target_package: &str, target_version: Option<&str>
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(None)
|
Ok(None)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn get(package_name: &str, series: &str, pocket: &str, version: Option<&str>) -> Result<PackageInfo, Box<dyn Error>> {
|
pub async fn get(
|
||||||
|
package_name: &str,
|
||||||
|
series: &str,
|
||||||
|
pocket: &str,
|
||||||
|
version: Option<&str>,
|
||||||
|
) -> Result<PackageInfo, Box<dyn Error>> {
|
||||||
let dist = get_dist_from_series(series).await?;
|
let dist = get_dist_from_series(series).await?;
|
||||||
|
|
||||||
// Handle Ubuntu case: Vcs-Git does not usually point to Launchpad but Salsa
|
// Handle Ubuntu case: Vcs-Git does not usually point to Launchpad but Salsa
|
||||||
@@ -261,7 +302,7 @@ pub async fn get(package_name: &str, series: &str, pocket: &str, version: Option
|
|||||||
let url = get_sources_url(base_url, series, pocket, &component);
|
let url = get_sources_url(base_url, series, pocket, &component);
|
||||||
|
|
||||||
debug!("Fetching sources from: {}", url);
|
debug!("Fetching sources from: {}", url);
|
||||||
|
|
||||||
let response = match reqwest::get(&url).await {
|
let response = match reqwest::get(&url).await {
|
||||||
Ok(resp) => resp,
|
Ok(resp) => resp,
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
@@ -269,16 +310,19 @@ pub async fn get(package_name: &str, series: &str, pocket: &str, version: Option
|
|||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
if !response.status().is_success() {
|
if !response.status().is_success() {
|
||||||
debug!("Failed to fetch {}: status {}", url, response.status());
|
debug!("Failed to fetch {}: status {}", url, response.status());
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
let compressed_data = response.bytes().await?;
|
let compressed_data = response.bytes().await?;
|
||||||
|
|
||||||
debug!("Downloaded Sources.gz for {}/{}/{}", dist, series, component);
|
debug!(
|
||||||
|
"Downloaded Sources.gz for {}/{}/{}",
|
||||||
|
dist, series, component
|
||||||
|
);
|
||||||
|
|
||||||
if let Some(stanza) = parse_sources(&compressed_data, package_name, version)? {
|
if let Some(stanza) = parse_sources(&compressed_data, package_name, version)? {
|
||||||
if let Some(vcs) = &stanza.vcs_git {
|
if let Some(vcs) = &stanza.vcs_git {
|
||||||
if preferred_vcs.is_none() {
|
if preferred_vcs.is_none() {
|
||||||
@@ -296,13 +340,23 @@ pub async fn get(package_name: &str, series: &str, pocket: &str, version: Option
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Err(format!("Package '{}' not found in {}/{}", package_name, dist, series).into())
|
Err(format!(
|
||||||
|
"Package '{}' not found in {}/{}",
|
||||||
|
package_name, dist, series
|
||||||
|
)
|
||||||
|
.into())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn find_package(package_name: &str, dist: &str, pocket: &str, version: Option<&str>, progress: ProgressCallback<'_>) -> Result<PackageInfo, Box<dyn Error>> {
|
pub async fn find_package(
|
||||||
|
package_name: &str,
|
||||||
|
dist: &str,
|
||||||
|
pocket: &str,
|
||||||
|
version: Option<&str>,
|
||||||
|
progress: ProgressCallback<'_>,
|
||||||
|
) -> Result<PackageInfo, Box<dyn Error>> {
|
||||||
let series_list = get_ordered_series(dist).await?;
|
let series_list = get_ordered_series(dist).await?;
|
||||||
|
|
||||||
for (i, series) in series_list.iter().enumerate() {
|
for (i, series) in series_list.iter().enumerate() {
|
||||||
if let Some(cb) = progress {
|
if let Some(cb) = progress {
|
||||||
cb("", &format!("Checking {}...", series), i, series_list.len());
|
cb("", &format!("Checking {}...", series), i, series_list.len());
|
||||||
@@ -311,18 +365,21 @@ pub async fn find_package(package_name: &str, dist: &str, pocket: &str, version:
|
|||||||
match get(package_name, series, pocket, version).await {
|
match get(package_name, series, pocket, version).await {
|
||||||
Ok(info) => {
|
Ok(info) => {
|
||||||
if i > 0 {
|
if i > 0 {
|
||||||
warn!("Package '{}' not found in development release. Found in {}/{}.", package_name, dist, series);
|
warn!(
|
||||||
|
"Package '{}' not found in development release. Found in {}/{}.",
|
||||||
|
package_name, dist, series
|
||||||
|
);
|
||||||
} else {
|
} else {
|
||||||
debug!("Found package '{}' in {}/{}", package_name, dist, series);
|
debug!("Found package '{}' in {}/{}", package_name, dist, series);
|
||||||
}
|
}
|
||||||
return Ok(info);
|
return Ok(info);
|
||||||
},
|
}
|
||||||
Err(_e) => {
|
Err(_e) => {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Err(format!("Package '{}' not found.", package_name).into())
|
Err(format!("Package '{}' not found.", package_name).into())
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -335,10 +392,15 @@ mod tests {
|
|||||||
// "hello" should exist on Launchpad for Ubuntu
|
// "hello" should exist on Launchpad for Ubuntu
|
||||||
let url = check_launchpad_repo("hello").await.unwrap();
|
let url = check_launchpad_repo("hello").await.unwrap();
|
||||||
assert!(url.is_some());
|
assert!(url.is_some());
|
||||||
assert_eq!(url.unwrap(), "https://git.launchpad.net/ubuntu/+source/hello");
|
assert_eq!(
|
||||||
|
url.unwrap(),
|
||||||
|
"https://git.launchpad.net/ubuntu/+source/hello"
|
||||||
|
);
|
||||||
|
|
||||||
// "this-package-should-not-exist-12345" should not exist
|
// "this-package-should-not-exist-12345" should not exist
|
||||||
let url = check_launchpad_repo("this-package-should-not-exist-12345").await.unwrap();
|
let url = check_launchpad_repo("this-package-should-not-exist-12345")
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
assert!(url.is_none());
|
assert!(url.is_none());
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -355,7 +417,7 @@ mod tests {
|
|||||||
assert!(series.contains(&"noble".to_string()));
|
assert!(series.contains(&"noble".to_string()));
|
||||||
assert!(series.contains(&"jammy".to_string()));
|
assert!(series.contains(&"jammy".to_string()));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
async fn test_get_dist_from_series() {
|
async fn test_get_dist_from_series() {
|
||||||
assert_eq!(get_dist_from_series("sid").await.unwrap(), "debian");
|
assert_eq!(get_dist_from_series("sid").await.unwrap(), "debian");
|
||||||
@@ -364,12 +426,12 @@ mod tests {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_parse_sources() {
|
fn test_parse_sources() {
|
||||||
use flate2::write::GzEncoder;
|
|
||||||
use flate2::Compression;
|
use flate2::Compression;
|
||||||
|
use flate2::write::GzEncoder;
|
||||||
use std::io::Write;
|
use std::io::Write;
|
||||||
|
|
||||||
let data = "Package: hello\nVersion: 2.10-2\nDirectory: pool/main/h/hello\nVcs-Git: https://salsa.debian.org/debian/hello.git\n\nPackage: other\nVersion: 1.0\n";
|
let data = "Package: hello\nVersion: 2.10-2\nDirectory: pool/main/h/hello\nVcs-Git: https://salsa.debian.org/debian/hello.git\n\nPackage: other\nVersion: 1.0\n";
|
||||||
|
|
||||||
let mut encoder = GzEncoder::new(Vec::new(), Compression::default());
|
let mut encoder = GzEncoder::new(Vec::new(), Compression::default());
|
||||||
encoder.write_all(data.as_bytes()).unwrap();
|
encoder.write_all(data.as_bytes()).unwrap();
|
||||||
let compressed = encoder.finish().unwrap();
|
let compressed = encoder.finish().unwrap();
|
||||||
@@ -378,7 +440,10 @@ mod tests {
|
|||||||
assert_eq!(info.package, "hello");
|
assert_eq!(info.package, "hello");
|
||||||
assert_eq!(info.version, "2.10-2");
|
assert_eq!(info.version, "2.10-2");
|
||||||
assert_eq!(info.directory, "pool/main/h/hello");
|
assert_eq!(info.directory, "pool/main/h/hello");
|
||||||
assert_eq!(info.vcs_git.unwrap(), "https://salsa.debian.org/debian/hello.git");
|
assert_eq!(
|
||||||
|
info.vcs_git.unwrap(),
|
||||||
|
"https://salsa.debian.org/debian/hello.git"
|
||||||
|
);
|
||||||
|
|
||||||
let none = parse_sources(&compressed, "missing", None).unwrap();
|
let none = parse_sources(&compressed, "missing", None).unwrap();
|
||||||
assert!(none.is_none());
|
assert!(none.is_none());
|
||||||
@@ -387,7 +452,9 @@ mod tests {
|
|||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
async fn test_find_package_fallback() {
|
async fn test_find_package_fallback() {
|
||||||
// python2.7 is in bullseye but not above
|
// python2.7 is in bullseye but not above
|
||||||
let info = find_package("python2.7", "debian", "", None, None).await.unwrap();
|
let info = find_package("python2.7", "debian", "", None, None)
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
assert_eq!(info.stanza.package, "python2.7");
|
assert_eq!(info.stanza.package, "python2.7");
|
||||||
assert_eq!(info.series, "bullseye")
|
assert_eq!(info.series, "bullseye")
|
||||||
}
|
}
|
||||||
@@ -395,7 +462,9 @@ mod tests {
|
|||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
async fn test_find_package_devel() {
|
async fn test_find_package_devel() {
|
||||||
// hello is in sid
|
// hello is in sid
|
||||||
let info = find_package("hello", "debian", "", None, None).await.unwrap();
|
let info = find_package("hello", "debian", "", None, None)
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
assert_eq!(info.stanza.package, "hello");
|
assert_eq!(info.stanza.package, "hello");
|
||||||
assert_eq!(info.series, "sid")
|
assert_eq!(info.series, "sid")
|
||||||
}
|
}
|
||||||
|
|||||||
22
src/ui.rs
22
src/ui.rs
@@ -1,12 +1,16 @@
|
|||||||
use std::time::Duration;
|
|
||||||
use indicatif::{MultiProgress, ProgressBar, ProgressStyle};
|
use indicatif::{MultiProgress, ProgressBar, ProgressStyle};
|
||||||
|
use std::time::Duration;
|
||||||
|
|
||||||
pub fn create_progress_bar(multi: &MultiProgress) -> (ProgressBar, impl Fn(&str, &str, usize, usize) + '_) {
|
pub fn create_progress_bar(
|
||||||
|
multi: &MultiProgress,
|
||||||
|
) -> (ProgressBar, impl Fn(&str, &str, usize, usize) + '_) {
|
||||||
let pb = multi.add(ProgressBar::new(0));
|
let pb = multi.add(ProgressBar::new(0));
|
||||||
pb.enable_steady_tick(Duration::from_millis(50));
|
pb.enable_steady_tick(Duration::from_millis(50));
|
||||||
pb.set_style(ProgressStyle::default_bar()
|
pb.set_style(
|
||||||
.template("> {spinner:.blue} {prefix}")
|
ProgressStyle::default_bar()
|
||||||
.unwrap());
|
.template("> {spinner:.blue} {prefix}")
|
||||||
|
.unwrap(),
|
||||||
|
);
|
||||||
|
|
||||||
let pb_clone = pb.clone();
|
let pb_clone = pb.clone();
|
||||||
let callback = move |prefix: &str, msg: &str, progress: usize, total: usize| {
|
let callback = move |prefix: &str, msg: &str, progress: usize, total: usize| {
|
||||||
@@ -17,9 +21,11 @@ pub fn create_progress_bar(multi: &MultiProgress) -> (ProgressBar, impl Fn(&str,
|
|||||||
.unwrap()
|
.unwrap()
|
||||||
.progress_chars("=> "));
|
.progress_chars("=> "));
|
||||||
} else {
|
} else {
|
||||||
pb.set_style(ProgressStyle::default_bar()
|
pb.set_style(
|
||||||
.template("> {spinner:.blue} {prefix}")
|
ProgressStyle::default_bar()
|
||||||
.unwrap());
|
.template("> {spinner:.blue} {prefix}")
|
||||||
|
.unwrap(),
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
if !prefix.is_empty() {
|
if !prefix.is_empty() {
|
||||||
|
|||||||
Reference in New Issue
Block a user