Don't preresolve zippy URLs

- Zippy URLs are now resolved directly before starting the downloads
- Resolve errors now should print the error to the top-log and skip the
  file. (untested)
- The status reports are now sent after either 5M download or 1s
- Fixes #10, #4
This commit is contained in:
Daniel M 2021-03-27 00:29:56 +01:00
parent 92f6c2699c
commit 833af6e8d3
3 changed files with 41 additions and 43 deletions

View File

@ -24,7 +24,9 @@ pub enum DlStatus {
Done { Done {
duration_ms: u64 duration_ms: u64
}, },
Message(String) Message(String),
/// Like Message but triggers a display refresh
MessageNow(String)
} }
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
@ -161,6 +163,8 @@ pub async fn watch_and_print_reports(mut receiver: mpsc::UnboundedReceiver<DlRep
msg_queue.push_back(format!("Starting download for file '{}'", &filename)); msg_queue.push_back(format!("Starting download for file '{}'", &filename));
statuses.insert(update.id, InfoHolder::new(filename, bytes_total)); statuses.insert(update.id, InfoHolder::new(filename, bytes_total));
moved_lines = print_accumulated_report(&statuses, &mut msg_queue, moved_lines)?;
}, },
DlStatus::Update { DlStatus::Update {
speed_mbps, speed_mbps,
@ -197,6 +201,11 @@ pub async fn watch_and_print_reports(mut receiver: mpsc::UnboundedReceiver<DlRep
}, },
DlStatus::Message(msg) => { DlStatus::Message(msg) => {
msg_queue.push_back(msg); msg_queue.push_back(msg);
},
DlStatus::MessageNow(msg) => {
msg_queue.push_back(msg);
moved_lines = print_accumulated_report(&statuses, &mut msg_queue, moved_lines)?;
t_last = SystemTime::now();
} }
} }

View File

@ -144,7 +144,7 @@ pub async fn download_feedback_chunks(url: &str, into_file: &str, rep: DlReporte
// This reduces the number of small disk writes and thereby reduces the // This reduces the number of small disk writes and thereby reduces the
// io bottleneck that occurs on HDDs with many small writes in different // io bottleneck that occurs on HDDs with many small writes in different
// files and offsets at the same time // files and offsets at the same time
if buff.len() >= 4_000_000 { if buff.len() >= 1_000_000 {
// Write the received data into the file // Write the received data into the file
ofile.write_all(&buff).await?; ofile.write_all(&buff).await?;
@ -158,9 +158,11 @@ pub async fn download_feedback_chunks(url: &str, into_file: &str, rep: DlReporte
// Update the number of bytes downloaded since the last report // Update the number of bytes downloaded since the last report
last_bytecount += datalen; last_bytecount += datalen;
// Update the reported download speed after every 5MB let t_elapsed = t_last_speed.elapsed()?.as_millis();
if last_bytecount > 5_000_000 {
let t_elapsed = t_last_speed.elapsed()?.as_millis(); // Update the reported download speed after every 5MB or every second
// depending on what happens first
if last_bytecount > 5_000_000 || t_elapsed >= 1000 {
// Update rolling average // Update rolling average
average_speed.add( average_speed.add(
@ -274,7 +276,7 @@ pub async fn download_feedback_multi(url: &str, into_file: &str, rep: DlReporter
dl_speeds[update.id as usize] = speed_mbps; dl_speeds[update.id as usize] = speed_mbps;
progresses[update.id as usize] = bytes_curr; progresses[update.id as usize] = bytes_curr;
if update_counter == 10 { if update_counter >= 0 {
update_counter = 0; update_counter = 0;
let speed = dl_speeds.iter().sum(); let speed = dl_speeds.iter().sum();

View File

@ -129,48 +129,17 @@ async fn main() -> ResBE<()> {
let ifile = std::fs::File::open(listfile)?; let ifile = std::fs::File::open(listfile)?;
let mut urls: Vec<String> = std::io::BufReader::new(ifile) let urls: Vec<String> = std::io::BufReader::new(ifile)
.lines() .lines()
.map(|l| l.unwrap()) .map(|l| l.unwrap())
.filter(|url| url.len() > 0 && !url.starts_with("#")) .filter(|url| url.len() > 0 && !url.starts_with("#"))
.collect(); .collect();
if is_zippy { download_multiple(urls, outdir, numparal, boost, is_zippy).await?;
println!("Pre-resolving zippyshare URLs");
let mut zippy_urls = Vec::new();
for url in urls {
zippy_urls.push(
match zippy::resolve_link(&url).await {
Ok(url) => url,
Err(e) => {
println!("Zippyshare link could not be resolved");
eprintln!("{}", e);
exit(1);
}
}
)
}
urls = zippy_urls;
}
download_multiple(urls, outdir, numparal, boost).await?;
} else if arguments.is_present("download") { } else if arguments.is_present("download") {
let url = arguments.value_of("download").unwrap(); let url = arguments.value_of("download").unwrap();
let url = if is_zippy {
match zippy::resolve_link(&url).await {
Ok(url) => url,
Err(e) => {
println!("Zippyshare link could not be resolved");
eprintln!("{}", e);
exit(1);
}
}
} else {
url.to_string()
};
let numparal = if boost != 1 { let numparal = if boost != 1 {
boost boost
@ -178,7 +147,7 @@ async fn main() -> ResBE<()> {
numparal numparal
}; };
download_multiple(vec![url], outdir, 1, numparal).await?; download_multiple(vec![url.to_string()], outdir, 1, numparal, is_zippy).await?;
} else if arguments.is_present("resolve") { } else if arguments.is_present("resolve") {
@ -203,7 +172,7 @@ async fn main() -> ResBE<()> {
Ok(()) Ok(())
} }
async fn download_multiple(urls: Vec<String>, outdir: &str, numparal: i32, boost: i32) -> ResBE<()> { async fn download_multiple(urls: Vec<String>, outdir: &str, numparal: i32, boost: i32, is_zippy: bool) -> ResBE<()> {
let outdir = Path::new(outdir); let outdir = Path::new(outdir);
if !outdir.exists() { if !outdir.exists() {
@ -237,6 +206,24 @@ async fn download_multiple(urls: Vec<String>, outdir: &str, numparal: i32, boost
let rep = DlReporter::new(global_url_index, tx); let rep = DlReporter::new(global_url_index, tx);
let url = if is_zippy {
match zippy::resolve_link(&url).await {
Ok(url) => url,
Err(e) => {
rep.send(
DlStatus::MessageNow(format!("Zippyshare link could not be resolved: {}", url))
).unwrap();
rep.send(
DlStatus::MessageNow(format!("{}", e))
).unwrap();
continue;
}
}
} else {
url.to_string()
};
let file_name = download::url_to_filename(&url); let file_name = download::url_to_filename(&url);
let into_file = outdir.join(Path::new(&file_name)) let into_file = outdir.join(Path::new(&file_name))
.to_str().unwrap().to_string(); .to_str().unwrap().to_string();
@ -248,10 +235,10 @@ async fn download_multiple(urls: Vec<String>, outdir: &str, numparal: i32, boost
let local_filesize = std::fs::metadata(path_into_file).unwrap().len(); let local_filesize = std::fs::metadata(path_into_file).unwrap().len();
if filesize == local_filesize { if filesize == local_filesize {
rep.send(DlStatus::Message(format!("Skipping file '{}': already present", &file_name))).unwrap(); rep.send(DlStatus::MessageNow(format!("Skipping file '{}': already present", &file_name))).unwrap();
continue; continue;
} else { } else {
rep.send(DlStatus::Message(format!("Replacing file '{}': present but not completed", &file_name))).unwrap(); rep.send(DlStatus::MessageNow(format!("Replacing file '{}': present but not completed", &file_name))).unwrap();
} }
} }