From 833af6e8d396c79d2a218fea4fd62f13086ed141 Mon Sep 17 00:00:00 2001 From: Daniel M Date: Sat, 27 Mar 2021 00:29:56 +0100 Subject: [PATCH] Don't preresolve zippy URLs - Zippy URLs are now resolved directly before starting the downloads - Resolve errors now should print the error to the top-log and skip the file. (untested) - The status reports are now sent after either 5M download or 1s - Fixes #10, #4 --- src/dlreport.rs | 11 ++++++++- src/download.rs | 12 ++++++---- src/main.rs | 61 +++++++++++++++++++------------------------------ 3 files changed, 41 insertions(+), 43 deletions(-) diff --git a/src/dlreport.rs b/src/dlreport.rs index 4f144a0..82ddceb 100644 --- a/src/dlreport.rs +++ b/src/dlreport.rs @@ -24,7 +24,9 @@ pub enum DlStatus { Done { duration_ms: u64 }, - Message(String) + Message(String), + /// Like Message but triggers a display refresh + MessageNow(String) } #[derive(Clone, Debug)] @@ -161,6 +163,8 @@ pub async fn watch_and_print_reports(mut receiver: mpsc::UnboundedReceiver { msg_queue.push_back(msg); + }, + DlStatus::MessageNow(msg) => { + msg_queue.push_back(msg); + moved_lines = print_accumulated_report(&statuses, &mut msg_queue, moved_lines)?; + t_last = SystemTime::now(); } } diff --git a/src/download.rs b/src/download.rs index 7d6d6a9..85afd74 100644 --- a/src/download.rs +++ b/src/download.rs @@ -144,7 +144,7 @@ pub async fn download_feedback_chunks(url: &str, into_file: &str, rep: DlReporte // This reduces the number of small disk writes and thereby reduces the // io bottleneck that occurs on HDDs with many small writes in different // files and offsets at the same time - if buff.len() >= 4_000_000 { + if buff.len() >= 1_000_000 { // Write the received data into the file ofile.write_all(&buff).await?; @@ -158,9 +158,11 @@ pub async fn download_feedback_chunks(url: &str, into_file: &str, rep: DlReporte // Update the number of bytes downloaded since the last report last_bytecount += datalen; - // Update the reported download speed after every 5MB - if last_bytecount > 5_000_000 { - let t_elapsed = t_last_speed.elapsed()?.as_millis(); + let t_elapsed = t_last_speed.elapsed()?.as_millis(); + + // Update the reported download speed after every 5MB or every second + // depending on what happens first + if last_bytecount > 5_000_000 || t_elapsed >= 1000 { // Update rolling average average_speed.add( @@ -274,7 +276,7 @@ pub async fn download_feedback_multi(url: &str, into_file: &str, rep: DlReporter dl_speeds[update.id as usize] = speed_mbps; progresses[update.id as usize] = bytes_curr; - if update_counter == 10 { + if update_counter >= 0 { update_counter = 0; let speed = dl_speeds.iter().sum(); diff --git a/src/main.rs b/src/main.rs index fb6e3d7..bc4b04e 100644 --- a/src/main.rs +++ b/src/main.rs @@ -129,48 +129,17 @@ async fn main() -> ResBE<()> { let ifile = std::fs::File::open(listfile)?; - let mut urls: Vec = std::io::BufReader::new(ifile) + let urls: Vec = std::io::BufReader::new(ifile) .lines() .map(|l| l.unwrap()) .filter(|url| url.len() > 0 && !url.starts_with("#")) .collect(); - if is_zippy { - println!("Pre-resolving zippyshare URLs"); - let mut zippy_urls = Vec::new(); - for url in urls { - zippy_urls.push( - match zippy::resolve_link(&url).await { - Ok(url) => url, - Err(e) => { - println!("Zippyshare link could not be resolved"); - eprintln!("{}", e); - exit(1); - } - } - ) - } - - urls = zippy_urls; - } - - download_multiple(urls, outdir, numparal, boost).await?; + download_multiple(urls, outdir, numparal, boost, is_zippy).await?; } else if arguments.is_present("download") { let url = arguments.value_of("download").unwrap(); - let url = if is_zippy { - match zippy::resolve_link(&url).await { - Ok(url) => url, - Err(e) => { - println!("Zippyshare link could not be resolved"); - eprintln!("{}", e); - exit(1); - } - } - } else { - url.to_string() - }; let numparal = if boost != 1 { boost @@ -178,7 +147,7 @@ async fn main() -> ResBE<()> { numparal }; - download_multiple(vec![url], outdir, 1, numparal).await?; + download_multiple(vec![url.to_string()], outdir, 1, numparal, is_zippy).await?; } else if arguments.is_present("resolve") { @@ -203,7 +172,7 @@ async fn main() -> ResBE<()> { Ok(()) } -async fn download_multiple(urls: Vec, outdir: &str, numparal: i32, boost: i32) -> ResBE<()> { +async fn download_multiple(urls: Vec, outdir: &str, numparal: i32, boost: i32, is_zippy: bool) -> ResBE<()> { let outdir = Path::new(outdir); if !outdir.exists() { @@ -237,6 +206,24 @@ async fn download_multiple(urls: Vec, outdir: &str, numparal: i32, boost let rep = DlReporter::new(global_url_index, tx); + let url = if is_zippy { + match zippy::resolve_link(&url).await { + Ok(url) => url, + Err(e) => { + rep.send( + DlStatus::MessageNow(format!("Zippyshare link could not be resolved: {}", url)) + ).unwrap(); + rep.send( + DlStatus::MessageNow(format!("{}", e)) + ).unwrap(); + + continue; + } + } + } else { + url.to_string() + }; + let file_name = download::url_to_filename(&url); let into_file = outdir.join(Path::new(&file_name)) .to_str().unwrap().to_string(); @@ -248,10 +235,10 @@ async fn download_multiple(urls: Vec, outdir: &str, numparal: i32, boost let local_filesize = std::fs::metadata(path_into_file).unwrap().len(); if filesize == local_filesize { - rep.send(DlStatus::Message(format!("Skipping file '{}': already present", &file_name))).unwrap(); + rep.send(DlStatus::MessageNow(format!("Skipping file '{}': already present", &file_name))).unwrap(); continue; } else { - rep.send(DlStatus::Message(format!("Replacing file '{}': present but not completed", &file_name))).unwrap(); + rep.send(DlStatus::MessageNow(format!("Replacing file '{}': present but not completed", &file_name))).unwrap(); } }