Compare commits

...

7 Commits

Author SHA1 Message Date
Hachi-R
91bb371e0b fix: reduce connections limit for http downloads 2025-04-14 09:34:00 -03:00
Hachi-R
e507676088 fix: adjust chunk and buffer sizes 2025-04-14 09:29:30 -03:00
Hachi-R
f3c7010930 fix: increase chunk size for better download performance 2025-04-13 20:39:04 -03:00
Hachi-R
66d40c566b fix: download speed in resume 2025-04-13 20:05:09 -03:00
Hachi-R
2452a3a51a fix: remove unnecessary comment in downloader 2025-04-13 17:25:03 -03:00
Hachi-R
4520f6bb20 fix: move final progress log after header removal 2025-04-13 16:44:39 -03:00
Hachi-R
d7b5bb5940 fix: optimize file handling in resume manager 2025-04-13 16:34:13 -03:00
3 changed files with 49 additions and 33 deletions

View File

@@ -14,8 +14,8 @@ class HttpDownloader:
cmd.append(url)
cmd.extend([
"--chunk-size", "10",
"--buffer-size", "16",
"--chunk-size", "64",
"--buffer-size", "20",
"--force-download",
"--log",
"--silent"

View File

@@ -36,7 +36,7 @@ if start_download_payload:
http_downloader = HttpDownloader(hydra_httpdl_bin)
downloads[initial_download['game_id']] = http_downloader
try:
http_downloader.start_download(initial_download['url'], initial_download['save_path'], initial_download.get('header'), initial_download.get('allow_multiple_connections', False), initial_download.get('connections_limit', 24))
http_downloader.start_download(initial_download['url'], initial_download['save_path'], initial_download.get('header'), initial_download.get('allow_multiple_connections', False), initial_download.get('connections_limit', 8))
except Exception as e:
print("Error starting http download", e)
@@ -148,11 +148,11 @@ def action():
torrent_downloader.start_download(url, data['save_path'])
else:
if existing_downloader and isinstance(existing_downloader, HttpDownloader):
existing_downloader.start_download(url, data['save_path'], data.get('header'), data.get('allow_multiple_connections', False), data.get('connections_limit', 24))
existing_downloader.start_download(url, data['save_path'], data.get('header'), data.get('allow_multiple_connections', False), data.get('connections_limit', 8))
else:
http_downloader = HttpDownloader(hydra_httpdl_bin)
downloads[game_id] = http_downloader
http_downloader.start_download(url, data['save_path'], data.get('header'), data.get('allow_multiple_connections', False), data.get('connections_limit', 24))
http_downloader.start_download(url, data['save_path'], data.get('header'), data.get('allow_multiple_connections', False), data.get('connections_limit', 8))
downloading_game_id = game_id

View File

@@ -27,7 +27,7 @@ const DEFAULT_FORCE_DOWNLOAD: bool = false;
const HEADER_SIZE: usize = 4096;
const MAGIC_NUMBER: &[u8; 5] = b"HYDRA";
const FORMAT_VERSION: u8 = 1;
const FINALIZE_BUFFER_SIZE: usize = 1024 * 1024;
// const FINALIZE_BUFFER_SIZE: usize = 1024 * 1024;
#[derive(Parser)]
#[command(name = "hydra-httpdl")]
@@ -364,7 +364,8 @@ impl Downloader {
if let Some(pb) = &progress.bar {
let downloaded = file_size * completed_chunks as u64 / total_chunks as u64;
pb.inc(downloaded);
pb.set_position(downloaded);
pb.reset_elapsed();
}
}
@@ -527,6 +528,19 @@ impl Downloader {
progress.finish();
if let Some((log_handle, log_cancel_tx)) = log_progress {
let _ = log_cancel_tx.send(());
let _ = log_handle.await;
}
let manager = resume_manager.lock().await;
if manager.is_download_complete() {
if self.config.should_log() {
println!("Download complete, finalizing file...");
}
manager.finalize_download()?;
tokio::time::sleep(tokio::time::Duration::from_secs(1)).await;
if self.config.should_log_stats() {
let json_output = json!({
"progress": 1.0,
@@ -539,17 +553,6 @@ impl Downloader {
});
println!("{}", json_output);
}
let _ = log_cancel_tx.send(());
let _ = log_handle.await;
}
let manager = resume_manager.lock().await;
if manager.is_download_complete() {
if self.config.should_log() {
println!("Download complete, finalizing file...");
}
manager.finalize_download()?;
}
Ok(())
@@ -891,27 +894,40 @@ impl ResumeManager {
anyhow::bail!("Download is not complete");
}
let temp_path = format!("{}.tmp", self.file_path);
let source = File::open(&self.file_path)?;
let dest = File::create(&temp_path)?;
let file = OpenOptions::new()
.read(true)
.write(true)
.open(&self.file_path)?;
let mut reader = BufReader::with_capacity(FINALIZE_BUFFER_SIZE, source);
let mut writer = BufWriter::with_capacity(FINALIZE_BUFFER_SIZE, dest);
let file_size = self.header.file_size;
reader.seek(SeekFrom::Start(HEADER_SIZE as u64))?;
let buffer_size = 64 * 1024 * 1024;
let mut buffer = vec![0u8; buffer_size.min(file_size as usize)];
std::io::copy(&mut reader, &mut writer)?;
writer.flush()?;
drop(writer);
let mut file = BufReader::new(file);
let mut write_pos = 0;
let mut read_pos = HEADER_SIZE as u64;
match std::fs::rename(&temp_path, &self.file_path) {
Ok(_) => Ok(()),
Err(_) => {
let _ = std::fs::remove_file(&self.file_path);
std::fs::rename(&temp_path, &self.file_path)?;
Ok(())
while read_pos < (HEADER_SIZE as u64 + file_size) {
file.seek(SeekFrom::Start(read_pos))?;
let bytes_read = file.read(&mut buffer)?;
if bytes_read == 0 {
break;
}
file.get_mut().seek(SeekFrom::Start(write_pos))?;
file.get_mut().write_all(&buffer[..bytes_read])?;
read_pos += bytes_read as u64;
write_pos += bytes_read as u64;
}
file.get_mut().set_len(file_size)?;
file.get_mut().flush()?;
Ok(())
}
}