- Updated all crates to the latest versions - Rewrote arg parsing using clap derive - Started refactoring arg parsing & code in main
220 lines
6.9 KiB
Rust
220 lines
6.9 KiB
Rust
use std::{
|
|
collections::VecDeque, io::BufRead, path::Path, process::exit, sync::Arc, sync::Mutex,
|
|
time::SystemTime,
|
|
};
|
|
|
|
use clap::Parser;
|
|
use futures::future::join_all;
|
|
use tokio::sync::mpsc;
|
|
|
|
use crate::{
|
|
args::{CLIAction, CLIArgs},
|
|
dlreport::{DlReport, DlReporter, DlStatus},
|
|
errors::ResBE,
|
|
};
|
|
|
|
mod args;
|
|
mod dlreport;
|
|
mod download;
|
|
mod errors;
|
|
mod zippy;
|
|
|
|
#[tokio::main]
|
|
async fn main() -> ResBE<()> {
|
|
let args = CLIArgs::parse();
|
|
|
|
let action = match (&args.listfile, &args.download, &args.zippy_resolve) {
|
|
(Some(listfile), None, None) => CLIAction::UrlList(listfile.to_string()),
|
|
(None, Some(url), None) => CLIAction::DownloadUrl(url.to_string()),
|
|
(None, None, Some(zippy_url)) => CLIAction::ResolveZippyUrl(zippy_url.to_string()),
|
|
_ => unreachable!(),
|
|
};
|
|
|
|
let urls = match action {
|
|
CLIAction::DownloadUrl(url) => vec![url.clone()],
|
|
CLIAction::UrlList(listfile) => read_urls_from_listfile(&listfile).await,
|
|
CLIAction::ResolveZippyUrl(url) => resolve_zippy_url(&url).await,
|
|
};
|
|
|
|
download_multiple(args, urls).await
|
|
}
|
|
|
|
async fn resolve_zippy_url(url: &str) -> ! {
|
|
let resolved_url = zippy::resolve_link(&url).await.unwrap_or_else(|_| {
|
|
println!("Zippyshare link could not be resolved");
|
|
exit(1);
|
|
});
|
|
|
|
println!("{}", resolved_url);
|
|
exit(0);
|
|
}
|
|
|
|
async fn read_urls_from_listfile(listfile: &str) -> Vec<String> {
|
|
let p_listfile = Path::new(&listfile);
|
|
|
|
if !p_listfile.is_file() {
|
|
eprintln!("Listfile '{}' does not exist!", &listfile);
|
|
exit(1);
|
|
}
|
|
|
|
let ifile = std::fs::File::open(p_listfile).unwrap();
|
|
|
|
std::io::BufReader::new(ifile)
|
|
.lines()
|
|
.map(|l| l.unwrap())
|
|
.filter(|url| url.len() > 0 && !url.starts_with("#"))
|
|
.collect()
|
|
}
|
|
|
|
async fn download_multiple(cli_args: CLIArgs, raw_urls: Vec<String>) -> ResBE<()> {
|
|
let outdir = cli_args.outdir;
|
|
let outdir = Path::new(&outdir);
|
|
|
|
let parallel_file_count = cli_args.file_count.get();
|
|
let conn_count = cli_args.conn_count.get();
|
|
let zippy = cli_args.zippy;
|
|
|
|
let urls = Arc::new(Mutex::new(VecDeque::<(u32, String)>::new()));
|
|
|
|
raw_urls.iter().enumerate().for_each(|(i, url)| {
|
|
urls.lock().unwrap().push_back((i as u32, url.clone()));
|
|
});
|
|
|
|
if !outdir.exists() {
|
|
if let Err(_e) = std::fs::create_dir_all(outdir) {
|
|
eprintln!(
|
|
"Error creating output directory '{}'",
|
|
outdir.to_str().unwrap()
|
|
);
|
|
exit(1);
|
|
}
|
|
}
|
|
|
|
let t_start = SystemTime::now();
|
|
|
|
let mut joiners = Vec::new();
|
|
|
|
let (tx, rx) = mpsc::unbounded_channel::<DlReport>();
|
|
|
|
for _offset in 0..parallel_file_count {
|
|
let tx = tx.clone();
|
|
let outdir = outdir.to_owned();
|
|
let arg_filename = cli_args.into_file.clone();
|
|
|
|
let urls = urls.clone();
|
|
|
|
joiners.push(tokio::task::spawn(async move {
|
|
loop {
|
|
let (global_url_index, url) = match urls.lock().unwrap().pop_front() {
|
|
Some(it) => it,
|
|
None => break,
|
|
};
|
|
|
|
let tx = tx.clone();
|
|
|
|
let rep = DlReporter::new(global_url_index, tx);
|
|
|
|
let url = if zippy {
|
|
match zippy::resolve_link(&url).await {
|
|
Ok(url) => url,
|
|
Err(_e) => {
|
|
rep.send(DlStatus::Message(format!(
|
|
"Zippyshare link could not be resolved: {}",
|
|
url
|
|
)));
|
|
continue;
|
|
}
|
|
}
|
|
} else {
|
|
url.to_string()
|
|
};
|
|
|
|
let file_name = arg_filename
|
|
.clone()
|
|
.unwrap_or_else(|| download::url_to_filename(&url));
|
|
|
|
let into_file = outdir
|
|
.join(Path::new(&file_name))
|
|
.to_str()
|
|
.unwrap()
|
|
.to_string();
|
|
let path_into_file = Path::new(&into_file);
|
|
|
|
let (filesize, range_supported) =
|
|
match download::http_get_filesize_and_range_support(&url).await {
|
|
Ok((filesize, range_supported)) => (filesize, range_supported),
|
|
Err(_e) => {
|
|
rep.send(DlStatus::Message(format!(
|
|
"Error while querying metadata: {}",
|
|
url
|
|
)));
|
|
continue;
|
|
}
|
|
};
|
|
|
|
// If file with same name is present locally, check filesize
|
|
if path_into_file.exists() {
|
|
let local_filesize = std::fs::metadata(path_into_file).unwrap().len();
|
|
|
|
if filesize == local_filesize {
|
|
rep.send(DlStatus::Message(format!(
|
|
"Skipping file '{}': already present",
|
|
&file_name
|
|
)));
|
|
rep.send(DlStatus::Skipped);
|
|
continue;
|
|
} else {
|
|
rep.send(DlStatus::Message(format!(
|
|
"Replacing file '{}': present but not completed",
|
|
&file_name
|
|
)));
|
|
}
|
|
}
|
|
|
|
if conn_count == 1 {
|
|
if let Err(_e) =
|
|
download::download_feedback(&url, &into_file, rep.clone(), Some(filesize))
|
|
.await
|
|
{
|
|
rep.send(DlStatus::DoneErr {
|
|
filename: file_name.to_string(),
|
|
});
|
|
}
|
|
} else {
|
|
if !range_supported {
|
|
rep.send(DlStatus::Message(format!(
|
|
"Error Server does not support range header: {}",
|
|
url
|
|
)));
|
|
continue;
|
|
}
|
|
|
|
if let Err(_e) = download::download_feedback_multi(
|
|
&url,
|
|
&into_file,
|
|
rep.clone(),
|
|
conn_count,
|
|
Some(filesize),
|
|
)
|
|
.await
|
|
{
|
|
rep.send(DlStatus::DoneErr {
|
|
filename: file_name.to_string(),
|
|
});
|
|
}
|
|
};
|
|
}
|
|
}))
|
|
}
|
|
|
|
drop(tx);
|
|
|
|
dlreport::watch_and_print_reports(rx, raw_urls.len() as i32).await?;
|
|
|
|
join_all(joiners).await;
|
|
|
|
println!("Total time: {}s", t_start.elapsed()?.as_secs());
|
|
|
|
Ok(())
|
|
}
|