fetch from devel/sid fetch with specific dist
This commit is contained in:
66
src/get.rs
66
src/get.rs
@@ -239,20 +239,46 @@ async fn fetch_archive_sources(info: &PackageInfo, cwd: Option<&Path>, progress:
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn get(package: &str, _version: &str, series: &str, pocket: &str, _ppa: &str, cwd: Option<&Path>, progress: ProgressCallback<'_>) -> Result<(), Box<dyn Error>> {
|
||||
if let Some(cb) = progress {
|
||||
cb(&format!("Resolving package info for {}...", package), "", 0, 0);
|
||||
}
|
||||
let package_info = package_info::get(package, series, pocket).await?;
|
||||
pub async fn get(package: &str, _version: &str, series: Option<&str>, pocket: &str, _ppa: &str, dist: Option<&str>, cwd: Option<&Path>, progress: ProgressCallback<'_>) -> Result<(), Box<dyn Error>> {
|
||||
let version_opt = if _version.is_empty() { None } else { Some(_version) };
|
||||
|
||||
/* Obtain the package information, either directly in a series or with a search in all series */
|
||||
let package_info = if let Some(s) = series {
|
||||
if let Some(cb) = progress {
|
||||
cb(&format!("Resolving package info for {}...", package), "", 0, 0);
|
||||
}
|
||||
|
||||
// Get the package information from that series and pocket
|
||||
package_info::get(package, s, pocket, version_opt).await?
|
||||
} else {
|
||||
let dist = dist.unwrap_or_else(||
|
||||
// Use auto-detection to see if current distro is ubuntu, or fallback to debian by default
|
||||
if std::process::Command::new("lsb_release").arg("-i").arg("-s").output()
|
||||
.map(|o| String::from_utf8_lossy(&o.stdout).trim().to_lowercase()).unwrap_or_default() == "ubuntu" {
|
||||
"ubuntu"
|
||||
} else {
|
||||
"debian"
|
||||
}
|
||||
);
|
||||
|
||||
if let Some(cb) = progress {
|
||||
cb(&format!("Searching for package {} in {}...", package, dist), "", 0, 0);
|
||||
}
|
||||
|
||||
// Try to find the package in all series from that dist
|
||||
package_info::find_package(package, dist, pocket, version_opt, progress).await?
|
||||
};
|
||||
|
||||
let package_dir = if let Some(path) = cwd {
|
||||
path.join(package)
|
||||
} else {
|
||||
Path::new(package).to_path_buf()
|
||||
};
|
||||
|
||||
let info = package_info.unwrap();
|
||||
if let Some(ref url) = info.preferred_vcs {
|
||||
/* Fetch the package: either via git (preferred VCS) or the archive */
|
||||
if let Some(ref url) = package_info.preferred_vcs {
|
||||
// We have found a preferred VCS (git repository) for the package, so
|
||||
// we fetch the package from that repo.
|
||||
if let Some(cb) = progress {
|
||||
cb(&format!("Cloning {}...", url), "", 0, 0);
|
||||
}
|
||||
@@ -261,12 +287,13 @@ pub async fn get(package: &str, _version: &str, series: &str, pocket: &str, _ppa
|
||||
if let Some(cb) = progress {
|
||||
cb("Fetching orig tarball...", "", 0, 0);
|
||||
}
|
||||
fetch_orig_tarball(&info, Some(&package_dir), progress).await?;
|
||||
fetch_orig_tarball(&package_info, Some(&package_dir), progress).await?;
|
||||
} else {
|
||||
// Fallback to archive fetching
|
||||
if let Some(cb) = progress {
|
||||
cb("Downloading from archive...", "", 0, 0);
|
||||
}
|
||||
fetch_archive_sources(&info, Some(cwd.unwrap_or(Path::new("."))), progress).await?;
|
||||
fetch_archive_sources(&package_info, Some(cwd.unwrap_or(Path::new("."))), progress).await?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
@@ -276,7 +303,7 @@ pub async fn get(package: &str, _version: &str, series: &str, pocket: &str, _ppa
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
async fn test_get_package_end_to_end(package: &str, series: &str) {
|
||||
async fn test_get_package_end_to_end(package: &str, series: Option<&str>) {
|
||||
// This test verifies that 'pkh get' clones the repo and fetches the tarball.
|
||||
|
||||
// Use a temp directory as working directory
|
||||
@@ -284,10 +311,10 @@ mod tests {
|
||||
let cwd = temp_dir.path();
|
||||
|
||||
// Main 'get' command: the one we want to test
|
||||
get(package, "", series, "", "", Some(cwd), None).await.unwrap();
|
||||
get(package, "", series, "", "", None, Some(cwd), None).await.unwrap();
|
||||
|
||||
let package_dir = cwd.join(package);
|
||||
assert!(package_dir.exists(), "Package directory not created");
|
||||
assert!(package_dir.exists());
|
||||
let package_source_dir = package_dir.join(package);
|
||||
assert!(package_source_dir.exists(), "Package git repo directory not created");
|
||||
assert!(package_source_dir.join("debian").exists(), "debian directory not present");
|
||||
@@ -308,24 +335,29 @@ mod tests {
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_get_hello_ubuntu_end_to_end() {
|
||||
test_get_package_end_to_end("hello", "noble").await;
|
||||
test_get_package_end_to_end("hello", Some("noble")).await;
|
||||
}
|
||||
#[tokio::test]
|
||||
async fn test_get_hello_debian_end_to_end() {
|
||||
test_get_package_end_to_end("hello", "bookworm").await;
|
||||
test_get_package_end_to_end("hello", Some("bookworm")).await;
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_get_2048_universe_ubuntu_end_to_end() {
|
||||
test_get_package_end_to_end("2048", "noble").await;
|
||||
test_get_package_end_to_end("2048", Some("noble")).await;
|
||||
}
|
||||
#[tokio::test]
|
||||
async fn test_get_1oom_contrib_debian_end_to_end() {
|
||||
test_get_package_end_to_end("1oom", "trixie").await;
|
||||
test_get_package_end_to_end("1oom", Some("trixie")).await;
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_get_agg_svn_fallback_ok() {
|
||||
test_get_package_end_to_end("agg", "trixie").await;
|
||||
test_get_package_end_to_end("agg", Some("trixie")).await;
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_get_hello_latest_end_to_end() {
|
||||
test_get_package_end_to_end("hello", None).await;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -40,6 +40,10 @@ fn main() {
|
||||
arg!(-s --series <series> "Target package distribution series")
|
||||
.required(false)
|
||||
)
|
||||
.arg(
|
||||
arg!(-d --dist <dist> "Target package distribution (debian, ubuntu)")
|
||||
.required(false)
|
||||
)
|
||||
.arg(
|
||||
arg!(-v --version <version> "Target package version")
|
||||
.required(false)
|
||||
@@ -63,14 +67,15 @@ fn main() {
|
||||
match matches.subcommand() {
|
||||
Some(("get", sub_matches)) => {
|
||||
let package = sub_matches.get_one::<String>("package").expect("required");
|
||||
let series = sub_matches.get_one::<String>("series").map(|s| s.as_str()).unwrap_or("");
|
||||
let series = sub_matches.get_one::<String>("series").map(|s| s.as_str());
|
||||
let dist = sub_matches.get_one::<String>("dist").map(|s| s.as_str());
|
||||
let version = sub_matches.get_one::<String>("version").map(|s| s.as_str()).unwrap_or("");
|
||||
let ppa = sub_matches.get_one::<String>("ppa").map(|s| s.as_str()).unwrap_or("");
|
||||
|
||||
// Since get is async, we need to block on it
|
||||
let (pb, mut progress_callback) = ui::create_progress_bar(&multi);
|
||||
|
||||
if let Err(e) = rt.block_on(get(package, version, series, "", ppa, None, Some(&mut progress_callback))) {
|
||||
if let Err(e) = rt.block_on(get(package, version, series, "", ppa, dist, None, Some(&mut progress_callback))) {
|
||||
pb.finish_and_clear();
|
||||
error!("{}", e);
|
||||
std::process::exit(1);
|
||||
|
||||
@@ -3,8 +3,10 @@ use std::io::Read;
|
||||
use std::collections::HashMap;
|
||||
use std::error::Error;
|
||||
use std::path::Path;
|
||||
use chrono::NaiveDate;
|
||||
|
||||
use log::debug;
|
||||
use log::{debug, warn};
|
||||
use crate::ProgressCallback;
|
||||
|
||||
const BASE_URL_UBUNTU: &str = "http://archive.ubuntu.com/ubuntu";
|
||||
const BASE_URL_DEBIAN: &str = "http://deb.debian.org/debian";
|
||||
@@ -23,41 +25,60 @@ async fn check_launchpad_repo(package: &str) -> Result<Option<String>, Box<dyn E
|
||||
}
|
||||
}
|
||||
|
||||
async fn get_series_from_url(url: &str) -> Result<Vec<String>, Box<dyn Error>> {
|
||||
let content = reqwest::get(url).await?.text().await?;
|
||||
fn parse_series_csv(content: &str) -> Result<Vec<String>, Box<dyn Error>> {
|
||||
let mut rdr = csv::ReaderBuilder::new()
|
||||
.flexible(true)
|
||||
.from_reader(content.as_bytes());
|
||||
|
||||
let headers = rdr.headers()?.clone();
|
||||
let series_idx = headers.iter().position(|h| h == "series").ok_or("Column 'series' not found")?;
|
||||
let created_idx = headers.iter().position(|h| h == "created").ok_or("Column 'created' not found")?;
|
||||
|
||||
let mut series = Vec::new();
|
||||
let mut entries = Vec::new();
|
||||
for result in rdr.records() {
|
||||
let record = result?;
|
||||
if let Some(s) = record.get(series_idx) {
|
||||
series.push(s.to_string());
|
||||
if let (Some(s), Some(c)) = (record.get(series_idx), record.get(created_idx)) {
|
||||
if let Ok(date) = NaiveDate::parse_from_str(c, "%Y-%m-%d") {
|
||||
entries.push((s.to_string(), date));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Sort by date descending (newest first)
|
||||
entries.sort_by(|a, b| b.1.cmp(&a.1));
|
||||
|
||||
Ok(entries.into_iter().map(|(s, _)| s).collect())
|
||||
}
|
||||
|
||||
async fn get_ordered_series(dist: &str) -> Result<Vec<String>, Box<dyn Error>> {
|
||||
let content = if Path::new(format!("/usr/share/distro-info/{dist}.csv").as_str()).exists() {
|
||||
std::fs::read_to_string(format!("/usr/share/distro-info/{dist}.csv"))?
|
||||
} else {
|
||||
reqwest::get(format!("https://salsa.debian.org/debian/distro-info-data/-/raw/main/{dist}.csv").as_str()).await?.text().await?
|
||||
};
|
||||
|
||||
let mut series = parse_series_csv(&content)?;
|
||||
|
||||
// For Debian, ensure 'sid' is first if it's not (it usually doesn't have a date or is very old/new depending on file)
|
||||
// Actually in the file sid has 1993 date.
|
||||
// But we want to try 'sid' (unstable) first for Debian.
|
||||
if dist == "debian" {
|
||||
series.retain(|s| s != "sid");
|
||||
series.insert(0, "sid".to_string());
|
||||
}
|
||||
|
||||
Ok(series)
|
||||
}
|
||||
|
||||
// Keep existing functions for compatibility or refactor them to use get_ordered_series
|
||||
async fn get_series_from_url(url: &str) -> Result<Vec<String>, Box<dyn Error>> {
|
||||
let content = reqwest::get(url).await?.text().await?;
|
||||
parse_series_csv(&content)
|
||||
}
|
||||
|
||||
fn get_series_from_file(path: &str) -> Result<Vec<String>, Box<dyn Error>> {
|
||||
let mut rdr = csv::ReaderBuilder::new()
|
||||
.flexible(true)
|
||||
.from_path(path)?;
|
||||
|
||||
let headers = rdr.headers()?.clone();
|
||||
let series_idx = headers.iter().position(|h| h == "series").ok_or("Column 'series' not found")?;
|
||||
|
||||
let mut series = Vec::new();
|
||||
for result in rdr.records() {
|
||||
let record = result?;
|
||||
if let Some(s) = record.get(series_idx) {
|
||||
series.push(s.to_string());
|
||||
}
|
||||
}
|
||||
Ok(series)
|
||||
let content = std::fs::read_to_string(path)?;
|
||||
parse_series_csv(&content)
|
||||
}
|
||||
|
||||
pub async fn get_dist_series(dist: &str) -> Result<Vec<String>, Box<dyn Error>> {
|
||||
@@ -101,6 +122,7 @@ pub struct PackageStanza {
|
||||
#[derive(Debug)]
|
||||
pub struct PackageInfo {
|
||||
pub dist: String,
|
||||
pub series: String,
|
||||
pub stanza: PackageStanza,
|
||||
pub preferred_vcs: Option<String>,
|
||||
pub archive_url: String
|
||||
@@ -151,7 +173,7 @@ async fn get_components(base_url: &str, series: &str, pocket: &str) -> Result<Ve
|
||||
* Parse a 'Sources.gz' debian package file data, to look for a target package and
|
||||
* return the data for that package stanza
|
||||
*/
|
||||
fn parse_sources(data: &[u8], target_package: &str) -> Result<Option<PackageStanza>, Box<dyn Error>> {
|
||||
fn parse_sources(data: &[u8], target_package: &str, target_version: Option<&str>) -> Result<Option<PackageStanza>, Box<dyn Error>> {
|
||||
let mut d = GzDecoder::new(data);
|
||||
let mut s = String::new();
|
||||
d.read_to_string(&mut s)?;
|
||||
@@ -177,6 +199,17 @@ fn parse_sources(data: &[u8], target_package: &str) -> Result<Option<PackageStan
|
||||
|
||||
if let Some(pkg) = fields.get("Package") {
|
||||
if pkg == target_package {
|
||||
// Check version if requested
|
||||
if let Some(ver) = target_version {
|
||||
if let Some(pkg_ver) = fields.get("Version") {
|
||||
if pkg_ver != ver {
|
||||
continue;
|
||||
}
|
||||
} else {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
let mut files = Vec::new();
|
||||
if let Some(checksums) = fields.get("Checksums-Sha256") {
|
||||
for line in checksums.lines() {
|
||||
@@ -206,7 +239,7 @@ fn parse_sources(data: &[u8], target_package: &str) -> Result<Option<PackageStan
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
pub async fn get(package_name: &str, series: &str, pocket: &str) -> Result<Option<PackageInfo>, Box<dyn Error>> {
|
||||
pub async fn get(package_name: &str, series: &str, pocket: &str, version: Option<&str>) -> Result<PackageInfo, Box<dyn Error>> {
|
||||
let dist = get_dist_from_series(series).await?;
|
||||
|
||||
// Handle Ubuntu case: Vcs-Git does not usually point to Launchpad but Salsa
|
||||
@@ -246,7 +279,7 @@ pub async fn get(package_name: &str, series: &str, pocket: &str) -> Result<Optio
|
||||
|
||||
debug!("Downloaded Sources.gz for {}/{}/{}", dist, series, component);
|
||||
|
||||
if let Some(stanza) = parse_sources(&compressed_data, package_name)? {
|
||||
if let Some(stanza) = parse_sources(&compressed_data, package_name, version)? {
|
||||
if let Some(vcs) = &stanza.vcs_git {
|
||||
if preferred_vcs.is_none() {
|
||||
preferred_vcs = Some(vcs.clone());
|
||||
@@ -254,18 +287,45 @@ pub async fn get(package_name: &str, series: &str, pocket: &str) -> Result<Optio
|
||||
}
|
||||
|
||||
let archive_url = format!("{base_url}/{0}", stanza.directory);
|
||||
return Ok(Some(PackageInfo {
|
||||
return Ok(PackageInfo {
|
||||
dist: dist,
|
||||
series: series.to_string(),
|
||||
stanza: stanza,
|
||||
preferred_vcs: preferred_vcs,
|
||||
archive_url: archive_url,
|
||||
}));
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
Err(format!("Package '{}' not found in {}/{}", package_name, dist, series).into())
|
||||
}
|
||||
|
||||
pub async fn find_package(package_name: &str, dist: &str, pocket: &str, version: Option<&str>, progress: ProgressCallback<'_>) -> Result<PackageInfo, Box<dyn Error>> {
|
||||
let series_list = get_ordered_series(dist).await?;
|
||||
|
||||
for (i, series) in series_list.iter().enumerate() {
|
||||
if let Some(cb) = progress {
|
||||
cb("", &format!("Checking {}...", series), i, series_list.len());
|
||||
}
|
||||
|
||||
match get(package_name, series, pocket, version).await {
|
||||
Ok(info) => {
|
||||
if i > 0 {
|
||||
warn!("Package '{}' not found in development release. Found in {}/{}.", package_name, dist, series);
|
||||
} else {
|
||||
debug!("Found package '{}' in {}/{}", package_name, dist, series);
|
||||
}
|
||||
return Ok(info);
|
||||
},
|
||||
Err(_e) => {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Err(format!("Package '{}' not found.", package_name).into())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
@@ -314,13 +374,29 @@ mod tests {
|
||||
encoder.write_all(data.as_bytes()).unwrap();
|
||||
let compressed = encoder.finish().unwrap();
|
||||
|
||||
let info = parse_sources(&compressed, "hello").unwrap().unwrap();
|
||||
let info = parse_sources(&compressed, "hello", None).unwrap().unwrap();
|
||||
assert_eq!(info.package, "hello");
|
||||
assert_eq!(info.version, "2.10-2");
|
||||
assert_eq!(info.directory, "pool/main/h/hello");
|
||||
assert_eq!(info.vcs_git.unwrap(), "https://salsa.debian.org/debian/hello.git");
|
||||
|
||||
let none = parse_sources(&compressed, "missing").unwrap();
|
||||
let none = parse_sources(&compressed, "missing", None).unwrap();
|
||||
assert!(none.is_none());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_find_package_fallback() {
|
||||
// python2.7 is in bullseye but not above
|
||||
let info = find_package("python2.7", "debian", "", None, None).await.unwrap();
|
||||
assert_eq!(info.stanza.package, "python2.7");
|
||||
assert_eq!(info.series, "bullseye")
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_find_package_devel() {
|
||||
// hello is in sid
|
||||
let info = find_package("hello", "debian", "", None, None).await.unwrap();
|
||||
assert_eq!(info.stanza.package, "hello");
|
||||
assert_eq!(info.series, "sid")
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user