ffdl/src/main.rs

200 lines
5.6 KiB
Rust

use std::{
collections::VecDeque,
path::{Path, PathBuf},
process::exit,
sync::Arc,
time::SystemTime,
};
use clap::Parser;
use futures::future::join_all;
use tokio::{
fs::create_dir_all,
sync::{
mpsc::{unbounded_channel, UnboundedSender},
Mutex,
},
};
use crate::{
args::CLIArgs,
dlreport::{watch_and_print_reports, DlReport, DlReporter, DlStatus},
download::{download_feedback, download_feedback_multi, http_get_filesize_and_range_support},
zippy::is_zippyshare_url,
};
use anyhow::Result;
mod args;
mod dlreport;
mod download;
mod errors;
mod zippy;
struct DlRequest {
id: usize,
url: String,
}
type SyncQueue = Arc<Mutex<VecDeque<DlRequest>>>;
#[tokio::main]
async fn main() -> Result<()> {
let args = CLIArgs::parse();
// Combine all urls taken from files and the ones provided on the command line
let mut urls = args.download.clone();
for file in args.listfile.iter() {
match urls_from_listfile(file).await {
Ok(listfile_urls) => urls.extend(listfile_urls),
Err(_) => {
eprintln!("Failed to read urls from file: {}", file.display());
exit(1);
}
}
}
if urls.is_empty() {
eprintln!("No URLs provided");
return Ok(());
}
download_multiple(args, urls).await
}
/// Parse a listfile and return all urls found in it
async fn urls_from_listfile(listfile: &Path) -> Result<Vec<String>> {
let text = tokio::fs::read_to_string(listfile).await?;
let urls = text
.lines()
.map(str::trim)
.filter(|line| !line.is_empty() && !line.starts_with('#'))
.map(str::to_string)
.collect();
Ok(urls)
}
// Download all files in parallel according to the provided CLI arguments
async fn download_multiple(args: CLIArgs, raw_urls: Vec<String>) -> Result<()> {
let num_urls = raw_urls.len();
let urls: SyncQueue = Default::default();
let enumerated_urls = raw_urls
.into_iter()
.enumerate()
.map(|(id, url)| DlRequest { id, url });
urls.lock().await.extend(enumerated_urls);
if !args.outdir.exists() {
if let Err(_e) = create_dir_all(&args.outdir).await {
eprintln!(
"Error creating output directory '{}'",
args.outdir.display()
);
exit(1);
}
}
let (tx, rx) = unbounded_channel::<DlReport>();
let t_start = SystemTime::now();
let jobs = (0..args.file_count.get())
.map(|_| tokio::task::spawn(download_job(urls.clone(), tx.clone(), args.clone())))
.collect::<Vec<_>>();
drop(tx);
watch_and_print_reports(rx, num_urls as i32).await?;
join_all(jobs).await;
println!("Total time: {}s", t_start.elapsed()?.as_secs());
Ok(())
}
async fn download_job(urls: SyncQueue, reporter: UnboundedSender<DlReport>, cli_args: CLIArgs) {
while let Some(dlreq) = urls.lock().await.pop_front() {
let reporter = DlReporter::new(dlreq.id as u32, reporter.clone());
// Resolve the zippy url to the direct download url if necessary
let url = if is_zippyshare_url(&dlreq.url) {
match zippy::resolve_link(&dlreq.url).await {
Ok(url) => url,
Err(_e) => {
reporter.send(DlStatus::Message(format!(
"Zippyshare link could not be resolved, skipping: {}",
dlreq.url
)));
continue;
}
}
} else {
dlreq.url.to_string()
};
let info = match http_get_filesize_and_range_support(&url).await {
Ok(it) => it,
Err(_e) => {
reporter.send(DlStatus::Message(format!(
"Error while querying metadata: {url}"
)));
continue;
}
};
let into_file: PathBuf = cli_args
.outdir
.join(Path::new(&info.filename))
.to_str()
.unwrap()
.to_string()
.into();
// If file with same name is present locally, check filesize
if into_file.exists() {
let local_filesize = std::fs::metadata(&into_file).unwrap().len();
if info.filesize == local_filesize {
reporter.send(DlStatus::Message(format!(
"Skipping file '{}': already present",
info.filename
)));
reporter.send(DlStatus::Skipped);
continue;
} else {
reporter.send(DlStatus::Message(format!(
"Replacing file '{}': present but not completed",
&info.filename
)));
}
}
let dl_status = if cli_args.conn_count.get() == 1 {
download_feedback(&url, &into_file, reporter.clone(), Some(info.filesize)).await
} else if !info.range_support {
reporter.send(DlStatus::Message(format!(
"Server does not support range headers. Downloading with single connection: {}",
url
)));
download_feedback(&url, &into_file, reporter.clone(), Some(info.filesize)).await
} else {
download_feedback_multi(
&url,
&into_file,
reporter.clone(),
cli_args.conn_count.get(),
Some(info.filesize),
)
.await
};
if dl_status.is_err() {
reporter.send(DlStatus::DoneErr {
filename: info.filename,
});
}
}
}