use std::collections::VecDeque; use std::path::Path; use std::process::exit; use std::sync::Arc; use std::time::SystemTime; use anyhow::Result; use clap::Parser; use futures::future::join_all; use tokio::fs::create_dir_all; use tokio::sync::mpsc::{unbounded_channel, UnboundedSender}; use tokio::sync::Mutex; use crate::args::CLIArgs; use crate::clireporter::cli_print_reports; use crate::dlreport::{DlReport, DlReporter}; use crate::download::{download_feedback, download_feedback_multi, http_file_info}; use crate::integrations::{is_integrated_url, resolve_integrated_url}; mod args; mod clireporter; mod dlreport; mod download; mod errors; mod integrations; mod misc; struct DlRequest { id: usize, url: String, } type SyncQueue = Arc>>; #[tokio::main] async fn main() -> Result<()> { let args = CLIArgs::parse(); // Combine all urls taken from files and the ones provided on the command line let mut urls = args.download.clone(); for file in args.listfile.iter() { match urls_from_listfile(file).await { Ok(listfile_urls) => urls.extend(listfile_urls), Err(_) => { eprintln!("Failed to read urls from file: {}", file.display()); exit(1); } } } if urls.is_empty() { eprintln!("No URLs provided"); return Ok(()); } download_multiple(args, urls).await } /// Parse a listfile and return all urls found in it async fn urls_from_listfile(listfile: &Path) -> Result> { let text = tokio::fs::read_to_string(listfile).await?; let urls = text .lines() .map(str::trim) .filter(|line| !line.is_empty() && !line.starts_with('#')) .map(str::to_string) .collect(); Ok(urls) } // Download all files in parallel according to the provided CLI arguments async fn download_multiple(args: CLIArgs, raw_urls: Vec) -> Result<()> { let num_urls = raw_urls.len(); let urls: SyncQueue = Default::default(); let enumerated_urls = raw_urls .into_iter() .enumerate() .map(|(id, url)| DlRequest { id, url }); urls.lock().await.extend(enumerated_urls); if !args.outdir.exists() { if let Err(_e) = create_dir_all(&args.outdir).await { eprintln!( "Error creating output directory '{}'", args.outdir.display() ); exit(1); } } let (tx, rx) = unbounded_channel::(); let t_start = SystemTime::now(); let jobs = (0..args.file_count.get()) .map(|_| tokio::task::spawn(download_job(Arc::clone(&urls), tx.clone(), args.clone()))) .collect::>(); drop(tx); cli_print_reports(rx, num_urls as i32).await?; join_all(jobs).await; println!("Total time: {}s", t_start.elapsed()?.as_secs()); Ok(()) } async fn download_job(urls: SyncQueue, reporter: UnboundedSender, cli_args: CLIArgs) { // The mutex access must be in its own scope to ensure that the lock is dropped while let Some(dlreq) = { let mut urls = urls.lock().await; urls.pop_front().take() } { println!("Downloading {}", dlreq.url); let reporter = DlReporter::new(dlreq.id as u32, reporter.clone()); // Resolve the zippy url to the direct download url if necessary let url = match is_integrated_url(&dlreq.url) { Some(service) => match resolve_integrated_url(&dlreq.url, service).await { Ok(url) => url, Err(_e) => { report_msg!( reporter, "Zippyshare link could not be resolved, skipping: {}", dlreq.url ); continue; } }, None => dlreq.url, }; let info = match http_file_info(&url).await { Ok(it) => it, Err(_e) => { report_msg!(reporter, "Error while querying metadata: {url}"); continue; } }; let into_file = cli_args.outdir.join(Path::new(&info.filename)); // If file with same name is present locally, check filesize if into_file.exists() { let local_filesize = std::fs::metadata(&into_file).unwrap().len(); if info.filesize == local_filesize { report_msg!( reporter, "Skipping file '{}': already present", info.filename ); reporter.skipped(); continue; } else { report_msg!( reporter, "Replacing file '{}': present but not completed", &info.filename ); } } let dl_status = if cli_args.conn_count.get() == 1 { download_feedback(&url, &into_file, reporter.clone(), info.filesize).await } else if !info.range_support { report_msg!( reporter, "Server does not support range headers. Downloading with single connection: {url}" ); download_feedback(&url, &into_file, reporter.clone(), info.filesize).await } else { download_feedback_multi( &url, &into_file, reporter.clone(), cli_args.conn_count.get(), info.filesize, ) .await }; if dl_status.is_err() { reporter.done_err(info.filename); } } }