357 lines
11 KiB
Rust
357 lines
11 KiB
Rust
use std::path::Path;
|
|
use std::process::exit;
|
|
use clap::{ App, Arg, ArgGroup, crate_version };
|
|
use tokio::sync::mpsc;
|
|
use futures::future::join_all;
|
|
use std::io::BufRead;
|
|
use std::time::SystemTime;
|
|
|
|
use dlreport::{ DlReport, DlStatus, DlReporter };
|
|
use errors::ResBE;
|
|
|
|
mod zippy;
|
|
mod download;
|
|
mod errors;
|
|
mod dlreport;
|
|
|
|
|
|
#[derive(Clone, Debug)]
|
|
enum CLIAction {
|
|
DownloadUrl(String),
|
|
ResolveZippyUrl(String),
|
|
UrlList(String),
|
|
None
|
|
}
|
|
|
|
#[derive(Clone, Debug)]
|
|
struct CLIArguments {
|
|
outdir: String,
|
|
into_file: Option<String>,
|
|
file_count: u32,
|
|
conn_count: u32,
|
|
zippy: bool,
|
|
action: CLIAction,
|
|
urls: Vec<String>
|
|
}
|
|
|
|
|
|
#[tokio::main]
|
|
async fn main() -> ResBE<()> {
|
|
|
|
let arguments = App::new("FFDL - Fast File Downloader")
|
|
.version(crate_version!())
|
|
.about("Download files fast")
|
|
.arg(
|
|
Arg::with_name("outdir")
|
|
.short("o")
|
|
.long("outdir")
|
|
.value_name("OUTPUT DIR")
|
|
.takes_value(true)
|
|
.help("Set the output directory. The directory will be created \
|
|
if it doesn't exit yet")
|
|
)
|
|
.arg(
|
|
Arg::with_name("into_file")
|
|
.short("i")
|
|
.long("into-file")
|
|
.value_name("FILENAME")
|
|
.takes_value(true)
|
|
.requires("download")
|
|
.help("Force filename. This only works for single file downloads")
|
|
)
|
|
.arg(
|
|
Arg::with_name("file_count")
|
|
.short("n")
|
|
.long("num-files")
|
|
.value_name("NUMBER OF CONCURRENT FILE DOWNLOADS")
|
|
.takes_value(true)
|
|
.help("Specify the number concurrent file downloads")
|
|
)
|
|
.arg(
|
|
Arg::with_name("conn_count")
|
|
.short("c")
|
|
.long("connections")
|
|
.value_name("NUMBER OF CONNECTIONS")
|
|
.takes_value(true)
|
|
.help("The number concurrent connections per file download. \
|
|
Downloads might fail when the number of connections is \
|
|
too high. Files started with multiple connections can't \
|
|
be continued. NOTE: This will likely cause IO \
|
|
bottlenecks on HDDs")
|
|
)
|
|
.arg(
|
|
Arg::with_name("zippyshare")
|
|
.short("z")
|
|
.long("zippy")
|
|
.takes_value(false)
|
|
.help("The provided URLs are zippyshare URLs and need to be resolved")
|
|
)
|
|
.group(
|
|
ArgGroup::with_name("action")
|
|
.required(true)
|
|
)
|
|
.arg(
|
|
Arg::with_name("listfile")
|
|
.short("l")
|
|
.long("listfile")
|
|
.value_name("URL LIST")
|
|
.takes_value(true)
|
|
.group("action")
|
|
.help("Download all files form the specified url list")
|
|
)
|
|
.arg(
|
|
Arg::with_name("download")
|
|
.short("d")
|
|
.long("download")
|
|
.value_name("URL")
|
|
.takes_value(true)
|
|
.group("action")
|
|
.help("Download only the specified URL")
|
|
)
|
|
.arg(
|
|
Arg::with_name("zippy-resolve")
|
|
.long("zippy-resolve")
|
|
.value_name("ZIPPYSHARE URL")
|
|
.takes_value(true)
|
|
.group("action")
|
|
.help("Resolve the zippyshare url to real download url")
|
|
)
|
|
.get_matches();
|
|
|
|
|
|
let outdir = arguments.value_of("outdir").unwrap_or("./").to_string();
|
|
|
|
let into_file = arguments.value_of("into_file").map(String::from);
|
|
|
|
let file_count = arguments.value_of("file_count").unwrap_or("1");
|
|
|
|
let file_count: u32 = file_count.parse().unwrap_or_else(|_| {
|
|
eprintln!("Invalid value for num-files: {}", file_count);
|
|
exit(1);
|
|
});
|
|
|
|
if file_count <= 0 {
|
|
eprintln!("Invalid value for num-files: {}", file_count);
|
|
exit(1);
|
|
}
|
|
|
|
let conn_count = arguments.value_of("conn_count").unwrap_or("1");
|
|
|
|
let conn_count: u32 = conn_count.parse().unwrap_or_else(|_| {
|
|
eprintln!("Invalid value for connections: {}", conn_count);
|
|
exit(1);
|
|
});
|
|
|
|
if conn_count <= 0 {
|
|
eprintln!("Invalid value for connections: {}", conn_count);
|
|
exit(1);
|
|
}
|
|
|
|
let is_zippy = arguments.is_present("zippyshare");
|
|
|
|
|
|
let action =
|
|
if let Some(listfile) = arguments.value_of("listfile") {
|
|
CLIAction::UrlList (
|
|
listfile.to_string()
|
|
)
|
|
} else if let Some(download_url) = arguments.value_of("download") {
|
|
CLIAction::DownloadUrl(
|
|
download_url.to_string()
|
|
)
|
|
} else if let Some(resolve_url) = arguments.value_of("zippy-resolve") {
|
|
CLIAction::ResolveZippyUrl(
|
|
resolve_url.to_string()
|
|
)
|
|
}
|
|
else {
|
|
CLIAction::None
|
|
};
|
|
|
|
|
|
let mut cli_args = CLIArguments {
|
|
outdir: outdir,
|
|
into_file: into_file,
|
|
file_count: file_count,
|
|
conn_count: conn_count,
|
|
zippy: is_zippy,
|
|
action: action,
|
|
urls: Vec::new()
|
|
};
|
|
|
|
// Evaluate and execute the requested action. The 3 different actions are
|
|
// mutally exclusive, so only one of them will be executed
|
|
|
|
match &cli_args.action {
|
|
|
|
CLIAction::UrlList(listfile) => {
|
|
|
|
let p_listfile = Path::new(listfile);
|
|
|
|
if !p_listfile.is_file() {
|
|
eprintln!("Listfile '{}' does not exist!", &listfile);
|
|
exit(1);
|
|
}
|
|
|
|
let ifile = std::fs::File::open(p_listfile)?;
|
|
|
|
cli_args.urls = std::io::BufReader::new(ifile)
|
|
.lines()
|
|
.map(|l| l.unwrap())
|
|
.filter(|url| url.len() > 0 && !url.starts_with("#"))
|
|
.collect();
|
|
},
|
|
|
|
CLIAction::DownloadUrl(url) => {
|
|
cli_args.urls = vec![url.clone()];
|
|
}
|
|
|
|
CLIAction::ResolveZippyUrl(url) => {
|
|
let resolved_url = zippy::resolve_link(url).await.unwrap_or_else(|_| {
|
|
println!("Zippyshare link could not be resolved");
|
|
exit(1);
|
|
});
|
|
|
|
println!("{}", resolved_url);
|
|
},
|
|
|
|
CLIAction::None => {
|
|
eprintln!("No action selected. This should not happen");
|
|
exit(1);
|
|
}
|
|
|
|
}
|
|
|
|
download_multiple(cli_args).await
|
|
|
|
}
|
|
|
|
async fn download_multiple(cli_args: CLIArguments) -> ResBE<()> {
|
|
let outdir = cli_args.outdir;
|
|
let outdir = Path::new(&outdir);
|
|
|
|
let file_count = cli_args.file_count;
|
|
|
|
let zippy = cli_args.zippy;
|
|
|
|
let conn_count = cli_args.conn_count;
|
|
|
|
if !outdir.exists() {
|
|
if let Err(_e) = std::fs::create_dir_all(outdir) {
|
|
eprintln!("Error creating output directory '{}'", outdir.to_str().unwrap());
|
|
exit(1);
|
|
}
|
|
}
|
|
|
|
let t_start = SystemTime::now();
|
|
|
|
let mut joiners = Vec::new();
|
|
|
|
let (tx, rx) = mpsc::unbounded_channel::<DlReport>();
|
|
|
|
for offset in 0 .. file_count {
|
|
|
|
let urls: Vec<String> = cli_args.urls
|
|
.iter()
|
|
.enumerate()
|
|
.filter(|(index, _)| (index) % file_count as usize == offset as usize)
|
|
.map(|(_, v)| v.to_owned())
|
|
.collect();
|
|
|
|
let tx = tx.clone();
|
|
let outdir = outdir.to_owned();
|
|
let offset = offset;
|
|
let arg_filename = cli_args.into_file.clone();
|
|
|
|
joiners.push(tokio::task::spawn(async move {
|
|
|
|
for (i, url) in urls.iter().enumerate() {
|
|
|
|
let tx = tx.clone();
|
|
|
|
// Recalculated index in the main url vector, used as id
|
|
let global_url_index = i as u32 * file_count + offset;
|
|
|
|
let rep = DlReporter::new(global_url_index, tx);
|
|
|
|
let url = if zippy {
|
|
match zippy::resolve_link(&url).await {
|
|
Ok(url) => url,
|
|
Err(_e) => {
|
|
rep.send(
|
|
DlStatus::Message(format!("Zippyshare link could not be resolved: {}", url))
|
|
);
|
|
continue;
|
|
}
|
|
}
|
|
} else {
|
|
url.to_string()
|
|
};
|
|
|
|
let file_name = arg_filename.clone().unwrap_or_else(|| download::url_to_filename(&url));
|
|
|
|
|
|
let into_file = outdir.join(Path::new(&file_name))
|
|
.to_str().unwrap().to_string();
|
|
let path_into_file = Path::new(&into_file);
|
|
|
|
let (filesize, range_supported) = match download::http_get_filesize_and_range_support(&url).await {
|
|
Ok((filesize, range_supported)) => (filesize, range_supported),
|
|
Err(_e) => {
|
|
rep.send(
|
|
DlStatus::Message(format!("Error while querying metadata: {}", url))
|
|
);
|
|
continue;
|
|
}
|
|
};
|
|
|
|
// If file with same name is present locally, check filesize
|
|
if path_into_file.exists() {
|
|
|
|
let local_filesize = std::fs::metadata(path_into_file).unwrap().len();
|
|
|
|
if filesize == local_filesize {
|
|
rep.send(DlStatus::Message(format!("Skipping file '{}': already present", &file_name)));
|
|
continue;
|
|
} else {
|
|
rep.send(DlStatus::Message(format!("Replacing file '{}': present but not completed", &file_name)));
|
|
}
|
|
}
|
|
|
|
if conn_count == 1 {
|
|
if let Err(_e) = download::download_feedback(&url, &into_file, rep.clone(), Some(filesize)).await {
|
|
rep.send(DlStatus::DoneErr {
|
|
filename: file_name.to_string()
|
|
});
|
|
}
|
|
} else {
|
|
|
|
if !range_supported {
|
|
rep.send(
|
|
DlStatus::Message(format!("Error Server does not support range header: {}", url))
|
|
);
|
|
continue;
|
|
}
|
|
|
|
if let Err(_e) = download::download_feedback_multi(&url, &into_file, rep.clone(), conn_count, Some(filesize)).await {
|
|
rep.send(DlStatus::DoneErr {
|
|
filename: file_name.to_string()
|
|
});
|
|
}
|
|
};
|
|
|
|
}
|
|
}))
|
|
|
|
}
|
|
|
|
drop(tx);
|
|
|
|
dlreport::watch_and_print_reports(rx).await?;
|
|
|
|
join_all(joiners).await;
|
|
|
|
println!("Total time: {}s", t_start.elapsed()?.as_secs());
|
|
|
|
Ok(())
|
|
} |