Compare commits

...

7 Commits

Author SHA1 Message Date
Hachi-R
91bb371e0b fix: reduce connections limit for http downloads 2025-04-14 09:34:00 -03:00
Hachi-R
e507676088 fix: adjust chunk and buffer sizes 2025-04-14 09:29:30 -03:00
Hachi-R
f3c7010930 fix: increase chunk size for better download performance 2025-04-13 20:39:04 -03:00
Hachi-R
66d40c566b fix: download speed in resume 2025-04-13 20:05:09 -03:00
Hachi-R
2452a3a51a fix: remove unnecessary comment in downloader 2025-04-13 17:25:03 -03:00
Hachi-R
4520f6bb20 fix: move final progress log after header removal 2025-04-13 16:44:39 -03:00
Hachi-R
d7b5bb5940 fix: optimize file handling in resume manager 2025-04-13 16:34:13 -03:00
3 changed files with 49 additions and 33 deletions

View File

@@ -14,8 +14,8 @@ class HttpDownloader:
cmd.append(url) cmd.append(url)
cmd.extend([ cmd.extend([
"--chunk-size", "10", "--chunk-size", "64",
"--buffer-size", "16", "--buffer-size", "20",
"--force-download", "--force-download",
"--log", "--log",
"--silent" "--silent"

View File

@@ -36,7 +36,7 @@ if start_download_payload:
http_downloader = HttpDownloader(hydra_httpdl_bin) http_downloader = HttpDownloader(hydra_httpdl_bin)
downloads[initial_download['game_id']] = http_downloader downloads[initial_download['game_id']] = http_downloader
try: try:
http_downloader.start_download(initial_download['url'], initial_download['save_path'], initial_download.get('header'), initial_download.get('allow_multiple_connections', False), initial_download.get('connections_limit', 24)) http_downloader.start_download(initial_download['url'], initial_download['save_path'], initial_download.get('header'), initial_download.get('allow_multiple_connections', False), initial_download.get('connections_limit', 8))
except Exception as e: except Exception as e:
print("Error starting http download", e) print("Error starting http download", e)
@@ -148,11 +148,11 @@ def action():
torrent_downloader.start_download(url, data['save_path']) torrent_downloader.start_download(url, data['save_path'])
else: else:
if existing_downloader and isinstance(existing_downloader, HttpDownloader): if existing_downloader and isinstance(existing_downloader, HttpDownloader):
existing_downloader.start_download(url, data['save_path'], data.get('header'), data.get('allow_multiple_connections', False), data.get('connections_limit', 24)) existing_downloader.start_download(url, data['save_path'], data.get('header'), data.get('allow_multiple_connections', False), data.get('connections_limit', 8))
else: else:
http_downloader = HttpDownloader(hydra_httpdl_bin) http_downloader = HttpDownloader(hydra_httpdl_bin)
downloads[game_id] = http_downloader downloads[game_id] = http_downloader
http_downloader.start_download(url, data['save_path'], data.get('header'), data.get('allow_multiple_connections', False), data.get('connections_limit', 24)) http_downloader.start_download(url, data['save_path'], data.get('header'), data.get('allow_multiple_connections', False), data.get('connections_limit', 8))
downloading_game_id = game_id downloading_game_id = game_id

View File

@@ -27,7 +27,7 @@ const DEFAULT_FORCE_DOWNLOAD: bool = false;
const HEADER_SIZE: usize = 4096; const HEADER_SIZE: usize = 4096;
const MAGIC_NUMBER: &[u8; 5] = b"HYDRA"; const MAGIC_NUMBER: &[u8; 5] = b"HYDRA";
const FORMAT_VERSION: u8 = 1; const FORMAT_VERSION: u8 = 1;
const FINALIZE_BUFFER_SIZE: usize = 1024 * 1024; // const FINALIZE_BUFFER_SIZE: usize = 1024 * 1024;
#[derive(Parser)] #[derive(Parser)]
#[command(name = "hydra-httpdl")] #[command(name = "hydra-httpdl")]
@@ -364,7 +364,8 @@ impl Downloader {
if let Some(pb) = &progress.bar { if let Some(pb) = &progress.bar {
let downloaded = file_size * completed_chunks as u64 / total_chunks as u64; let downloaded = file_size * completed_chunks as u64 / total_chunks as u64;
pb.inc(downloaded); pb.set_position(downloaded);
pb.reset_elapsed();
} }
} }
@@ -527,6 +528,19 @@ impl Downloader {
progress.finish(); progress.finish();
if let Some((log_handle, log_cancel_tx)) = log_progress { if let Some((log_handle, log_cancel_tx)) = log_progress {
let _ = log_cancel_tx.send(());
let _ = log_handle.await;
}
let manager = resume_manager.lock().await;
if manager.is_download_complete() {
if self.config.should_log() {
println!("Download complete, finalizing file...");
}
manager.finalize_download()?;
tokio::time::sleep(tokio::time::Duration::from_secs(1)).await;
if self.config.should_log_stats() { if self.config.should_log_stats() {
let json_output = json!({ let json_output = json!({
"progress": 1.0, "progress": 1.0,
@@ -539,17 +553,6 @@ impl Downloader {
}); });
println!("{}", json_output); println!("{}", json_output);
} }
let _ = log_cancel_tx.send(());
let _ = log_handle.await;
}
let manager = resume_manager.lock().await;
if manager.is_download_complete() {
if self.config.should_log() {
println!("Download complete, finalizing file...");
}
manager.finalize_download()?;
} }
Ok(()) Ok(())
@@ -891,27 +894,40 @@ impl ResumeManager {
anyhow::bail!("Download is not complete"); anyhow::bail!("Download is not complete");
} }
let temp_path = format!("{}.tmp", self.file_path); let file = OpenOptions::new()
let source = File::open(&self.file_path)?; .read(true)
let dest = File::create(&temp_path)?; .write(true)
.open(&self.file_path)?;
let mut reader = BufReader::with_capacity(FINALIZE_BUFFER_SIZE, source); let file_size = self.header.file_size;
let mut writer = BufWriter::with_capacity(FINALIZE_BUFFER_SIZE, dest);
reader.seek(SeekFrom::Start(HEADER_SIZE as u64))?; let buffer_size = 64 * 1024 * 1024;
let mut buffer = vec![0u8; buffer_size.min(file_size as usize)];
std::io::copy(&mut reader, &mut writer)?; let mut file = BufReader::new(file);
writer.flush()?; let mut write_pos = 0;
drop(writer); let mut read_pos = HEADER_SIZE as u64;
match std::fs::rename(&temp_path, &self.file_path) { while read_pos < (HEADER_SIZE as u64 + file_size) {
Ok(_) => Ok(()), file.seek(SeekFrom::Start(read_pos))?;
Err(_) => {
let _ = std::fs::remove_file(&self.file_path); let bytes_read = file.read(&mut buffer)?;
std::fs::rename(&temp_path, &self.file_path)?; if bytes_read == 0 {
Ok(()) break;
} }
file.get_mut().seek(SeekFrom::Start(write_pos))?;
file.get_mut().write_all(&buffer[..bytes_read])?;
read_pos += bytes_read as u64;
write_pos += bytes_read as u64;
} }
file.get_mut().set_len(file_size)?;
file.get_mut().flush()?;
Ok(())
} }
} }