feat: importing sources on auth

This commit is contained in:
Chubby Granny Chaser
2025-04-12 21:42:22 +01:00
23 changed files with 3211 additions and 168 deletions

View File

@@ -10,7 +10,8 @@ jobs:
build:
strategy:
matrix:
os: [windows-latest, ubuntu-latest]
# os: [windows-latest, ubuntu-latest]
os: [windows-latest]
runs-on: ${{ matrix.os }}
@@ -31,6 +32,16 @@ jobs:
with:
python-version: 3.9
- name: Install Rust
uses: actions-rs/toolchain@v1
with:
toolchain: stable
components: rustfmt
- name: Build Rust
run: cargo build --release
working-directory: ./rust_rpc
- name: Install dependencies
run: pip install -r requirements.txt

4
.gitignore vendored
View File

@@ -9,10 +9,12 @@ out
.vite
ludusavi/
hydra-python-rpc/
aria2/
.python-version
# Sentry Config File
.env.sentry-build-plugin
*storybook.log
target/

View File

@@ -3,7 +3,6 @@ productName: Hydra
directories:
buildResources: build
extraResources:
- aria2
- ludusavi
- hydra-python-rpc
- seeds
@@ -23,6 +22,7 @@ win:
extraResources:
- from: binaries/7z.exe
- from: binaries/7z.dll
- from: rust_rpc/target/release/hydra-httpdl.exe
target:
- nsis
- portable
@@ -40,6 +40,7 @@ mac:
entitlementsInherit: build/entitlements.mac.plist
extraResources:
- from: binaries/7zz
- from: rust_rpc/target/release/hydra-httpdl
extendInfo:
- NSCameraUsageDescription: Application requests access to the device's camera.
- NSMicrophoneUsageDescription: Application requests access to the device's microphone.
@@ -51,6 +52,7 @@ dmg:
linux:
extraResources:
- from: binaries/7zzs
- from: rust_rpc/target/release/hydra-httpdl
target:
- AppImage
- snap

View File

@@ -21,7 +21,7 @@
"typecheck:web": "tsc --noEmit -p tsconfig.web.json --composite false",
"typecheck": "npm run typecheck:node && npm run typecheck:web",
"start": "electron-vite preview",
"dev": "electron-vite dev",
"dev": "cargo build --manifest-path=rust_rpc/Cargo.toml && electron-vite dev",
"build": "npm run typecheck && electron-vite build",
"postinstall": "electron-builder install-app-deps && node ./scripts/postinstall.cjs",
"build:unpack": "npm run build && electron-builder --dir",

View File

@@ -1,48 +1,94 @@
import aria2p
import os
import subprocess
import json
class HttpDownloader:
def __init__(self):
self.download = None
self.aria2 = aria2p.API(
aria2p.Client(
host="http://localhost",
port=6800,
secret=""
)
)
def __init__(self, hydra_httpdl_bin: str):
self.hydra_exe = hydra_httpdl_bin
self.process = None
self.last_status = None
def start_download(self, url: str, save_path: str, header: str, out: str = None):
if self.download:
self.aria2.resume([self.download])
def start_download(self, url: str, save_path: str, header: str = None, out: str = None, allow_multiple_connections: bool = False, connections_limit: int = 1):
cmd = [self.hydra_exe]
cmd.append(url)
cmd.extend([
"--chunk-size", "10",
"--buffer-size", "16",
"--force-download",
"--log",
"--silent"
])
if header:
cmd.extend(["--header", header])
if allow_multiple_connections:
cmd.extend(["--connections", str(connections_limit)])
else:
downloads = self.aria2.add(url, options={"header": header, "dir": save_path, "out": out})
self.download = downloads[0]
def pause_download(self):
if self.download:
self.aria2.pause([self.download])
def cancel_download(self):
if self.download:
self.aria2.remove([self.download])
self.download = None
def get_download_status(self):
if self.download == None:
cmd.extend(["--connections", "1"])
print(f"running hydra-httpdl: {' '.join(cmd)}")
try:
self.process = subprocess.Popen(
cmd,
cwd=save_path,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
universal_newlines=True
)
except Exception as e:
print(f"error running hydra-httpdl: {e}")
return None
download = self.aria2.get_download(self.download.gid)
response = {
'folderName': download.name,
'fileSize': download.total_length,
'progress': download.completed_length / download.total_length if download.total_length else 0,
'downloadSpeed': download.download_speed,
'numPeers': 0,
'numSeeds': 0,
'status': download.status,
'bytesDownloaded': download.completed_length,
}
return response
def get_download_status(self):
if not self.process:
return None
try:
line = self.process.stdout.readline()
if line:
status = json.loads(line.strip())
self.last_status = status
elif self.last_status:
status = self.last_status
else:
return None
response = {
"status": "active",
"progress": status["progress"],
"downloadSpeed": status["speed_bps"],
"numPeers": 0,
"numSeeds": 0,
"bytesDownloaded": status["downloaded_bytes"],
"fileSize": status["total_bytes"],
"folderName": status["filename"]
}
if status["progress"] == 1:
response["status"] = "complete"
return response
except Exception as e:
print(f"error getting download status: {e}")
return None
def stop_download(self):
if self.process:
self.process.terminate()
self.process = None
self.last_status = None
def pause_download(self):
self.stop_download()
def cancel_download(self):
self.stop_download()

View File

@@ -13,6 +13,7 @@ http_port = sys.argv[2]
rpc_password = sys.argv[3]
start_download_payload = sys.argv[4]
start_seeding_payload = sys.argv[5]
hydra_httpdl_bin = sys.argv[6]
downloads = {}
# This can be streamed down from Node
@@ -32,7 +33,7 @@ if start_download_payload:
except Exception as e:
print("Error starting torrent download", e)
else:
http_downloader = HttpDownloader()
http_downloader = HttpDownloader(hydra_httpdl_bin)
downloads[initial_download['game_id']] = http_downloader
try:
http_downloader.start_download(initial_download['url'], initial_download['save_path'], initial_download.get('header'), initial_download.get("out"))
@@ -147,11 +148,11 @@ def action():
torrent_downloader.start_download(url, data['save_path'])
else:
if existing_downloader and isinstance(existing_downloader, HttpDownloader):
existing_downloader.start_download(url, data['save_path'], data.get('header'), data.get('out'))
existing_downloader.start_download(url, data['save_path'], data.get('header'), data.get('out'), data.get('allow_multiple_connections', False), data.get('connections_limit', 24))
else:
http_downloader = HttpDownloader()
http_downloader = HttpDownloader(hydra_httpdl_bin)
downloads[game_id] = http_downloader
http_downloader.start_download(url, data['save_path'], data.get('header'), data.get('out'))
http_downloader.start_download(url, data['save_path'], data.get('header'), data.get('out'), data.get('allow_multiple_connections', False), data.get('connections_limit', 24))
downloading_game_id = game_id

View File

@@ -5,4 +5,3 @@ pywin32; sys_platform == 'win32'
psutil
Pillow
flask
aria2p

2040
rust_rpc/Cargo.lock generated Normal file

File diff suppressed because it is too large Load Diff

25
rust_rpc/Cargo.toml Normal file
View File

@@ -0,0 +1,25 @@
[package]
name = "hydra-httpdl"
version = "0.1.0"
edition = "2021"
[dependencies]
tokio = { version = "1", features = ["full", "macros", "rt-multi-thread"] }
reqwest = { version = "0.12.5", features = ["stream"] }
futures = "0.3"
bytes = "1.4"
indicatif = "0.17"
anyhow = "1.0"
async-trait = "0.1"
tokio-util = { version = "0.7", features = ["io"] }
clap = { version = "4.4", features = ["derive"] }
urlencoding = "2.1"
serde_json = "1.0"
bitvec = "1.0"
sha2 = "0.10"
[profile.release]
opt-level = 3
lto = "fat"
codegen-units = 1
panic = "abort"
strip = true

968
rust_rpc/src/main.rs Normal file
View File

@@ -0,0 +1,968 @@
use anyhow::Result;
use bitvec::prelude::*;
use clap::Parser;
use futures::stream::{FuturesUnordered, StreamExt};
use indicatif::{ProgressBar, ProgressStyle};
use reqwest::{Client, StatusCode, Url};
use serde_json::json;
use sha2::{Digest, Sha256};
use std::fs::{File, OpenOptions};
use std::io::{BufReader, BufWriter, Read, Seek, SeekFrom, Write};
use std::path::Path;
use std::sync::Arc;
use tokio::sync::Mutex;
const DEFAULT_MAX_RETRIES: usize = 3;
const RETRY_BACKOFF_MS: u64 = 500;
const DEFAULT_OUTPUT_FILENAME: &str = "output.bin";
const DEFAULT_CONNECTIONS: usize = 16;
const DEFAULT_CHUNK_SIZE_MB: usize = 5;
const DEFAULT_BUFFER_SIZE_MB: usize = 8;
const DEFAULT_VERBOSE: bool = false;
const DEFAULT_SILENT: bool = false;
const DEFAULT_LOG: bool = false;
const DEFAULT_FORCE_NEW: bool = false;
const DEFAULT_RESUME_ONLY: bool = false;
const DEFAULT_FORCE_DOWNLOAD: bool = false;
const HEADER_SIZE: usize = 4096;
const MAGIC_NUMBER: &[u8; 5] = b"HYDRA";
const FORMAT_VERSION: u8 = 1;
const FINALIZE_BUFFER_SIZE: usize = 1024 * 1024;
#[derive(Parser)]
#[command(name = "hydra-httpdl")]
#[command(author = "los-broxas")]
#[command(version = "0.2.0")]
#[command(about = "high speed and low resource usage http downloader with resume capability", long_about = None)]
struct CliArgs {
/// file url to download
#[arg(required = true)]
url: String,
/// output file path (or directory to save with original filename)
#[arg(default_value = DEFAULT_OUTPUT_FILENAME)]
output: String,
/// number of concurrent connections for parallel download
#[arg(short = 'c', long, default_value_t = DEFAULT_CONNECTIONS)]
connections: usize,
/// chunk size in MB for each connection
#[arg(short = 'k', long, default_value_t = DEFAULT_CHUNK_SIZE_MB)]
chunk_size: usize,
/// buffer size in MB for file writing
#[arg(short, long, default_value_t = DEFAULT_BUFFER_SIZE_MB)]
buffer_size: usize,
/// show detailed progress information
#[arg(short = 'v', long, default_value_t = DEFAULT_VERBOSE)]
verbose: bool,
/// suppress progress bar
#[arg(short = 's', long, default_value_t = DEFAULT_SILENT)]
silent: bool,
/// log download statistics in JSON format every second
#[arg(short = 'l', long, default_value_t = DEFAULT_LOG)]
log: bool,
/// force new download, ignore existing partial files
#[arg(short = 'f', long, default_value_t = DEFAULT_FORCE_NEW)]
force_new: bool,
/// only resume existing download, exit if no partial file exists
#[arg(short = 'r', long, default_value_t = DEFAULT_RESUME_ONLY)]
resume_only: bool,
/// force download, ignore some verification checks
#[arg(short = 'F', long, default_value_t = DEFAULT_FORCE_DOWNLOAD)]
force_download: bool,
/// HTTP headers to send with request (format: "Key: Value")
#[arg(short = 'H', long)]
header: Vec<String>,
}
struct DownloadConfig {
url: String,
output_path: String,
num_connections: usize,
chunk_size: usize,
buffer_size: usize,
verbose: bool,
silent: bool,
log: bool,
force_new: bool,
resume_only: bool,
headers: Vec<String>,
force_download: bool,
}
impl DownloadConfig {
fn should_log(&self) -> bool {
self.verbose && !self.silent
}
fn should_log_stats(&self) -> bool {
self.log
}
}
struct DownloadStats {
progress_percent: f64,
bytes_downloaded: u64,
total_size: u64,
speed_bytes_per_sec: f64,
eta_seconds: u64,
elapsed_seconds: u64,
}
struct HydraHeader {
magic: [u8; 5], // "HYDRA" identifier
version: u8, // header version
file_size: u64, // file size
etag: [u8; 32], // etag hash
url_hash: [u8; 32], // url hash
chunk_size: u32, // chunk size
chunk_count: u32, // chunk count
chunks_bitmap: BitVec<u8>, // chunks bitmap
}
impl HydraHeader {
fn new(file_size: u64, etag: &str, url: &str, chunk_size: u32) -> Self {
let chunk_count = ((file_size as f64) / (chunk_size as f64)).ceil() as u32;
let chunks_bitmap = bitvec![u8, Lsb0; 0; chunk_count as usize];
let mut etag_hash = [0u8; 32];
let etag_digest = Sha256::digest(etag.as_bytes());
etag_hash.copy_from_slice(&etag_digest[..]);
let mut url_hash = [0u8; 32];
let url_digest = Sha256::digest(url.as_bytes());
url_hash.copy_from_slice(&url_digest[..]);
Self {
magic: *MAGIC_NUMBER,
version: FORMAT_VERSION,
file_size,
etag: etag_hash,
url_hash,
chunk_size,
chunk_count,
chunks_bitmap,
}
}
fn write_to_file<W: Write + Seek>(&self, writer: &mut W) -> Result<()> {
writer.write_all(&self.magic)?;
writer.write_all(&[self.version])?;
writer.write_all(&self.file_size.to_le_bytes())?;
writer.write_all(&self.etag)?;
writer.write_all(&self.url_hash)?;
writer.write_all(&self.chunk_size.to_le_bytes())?;
writer.write_all(&self.chunk_count.to_le_bytes())?;
let bitmap_bytes = self.chunks_bitmap.as_raw_slice();
writer.write_all(bitmap_bytes)?;
let header_size = 5 + 1 + 8 + 32 + 32 + 4 + 4 + bitmap_bytes.len();
let padding_size = HEADER_SIZE - header_size;
let padding = vec![0u8; padding_size];
writer.write_all(&padding)?;
Ok(())
}
fn read_from_file<R: Read + Seek>(reader: &mut R) -> Result<Self> {
let mut magic = [0u8; 5];
reader.read_exact(&mut magic)?;
if magic != *MAGIC_NUMBER {
anyhow::bail!("Not a valid Hydra download file");
}
let mut version = [0u8; 1];
reader.read_exact(&mut version)?;
if version[0] != FORMAT_VERSION {
anyhow::bail!("Incompatible format version");
}
let mut file_size_bytes = [0u8; 8];
reader.read_exact(&mut file_size_bytes)?;
let file_size = u64::from_le_bytes(file_size_bytes);
let mut etag = [0u8; 32];
reader.read_exact(&mut etag)?;
let mut url_hash = [0u8; 32];
reader.read_exact(&mut url_hash)?;
let mut chunk_size_bytes = [0u8; 4];
reader.read_exact(&mut chunk_size_bytes)?;
let chunk_size = u32::from_le_bytes(chunk_size_bytes);
let mut chunk_count_bytes = [0u8; 4];
reader.read_exact(&mut chunk_count_bytes)?;
let chunk_count = u32::from_le_bytes(chunk_count_bytes);
let bitmap_bytes_len = (chunk_count as usize + 7) / 8;
let mut bitmap_bytes = vec![0u8; bitmap_bytes_len];
reader.read_exact(&mut bitmap_bytes)?;
let chunks_bitmap = BitVec::<u8, Lsb0>::from_vec(bitmap_bytes);
reader.seek(SeekFrom::Start(HEADER_SIZE as u64))?;
Ok(Self {
magic,
version: version[0],
file_size,
etag,
url_hash,
chunk_size,
chunk_count,
chunks_bitmap,
})
}
fn set_chunk_complete(&mut self, chunk_index: usize) -> Result<()> {
if chunk_index >= self.chunk_count as usize {
anyhow::bail!("Chunk index out of bounds");
}
self.chunks_bitmap.set(chunk_index, true);
Ok(())
}
fn is_chunk_complete(&self, chunk_index: usize) -> bool {
if chunk_index >= self.chunk_count as usize {
return false;
}
self.chunks_bitmap[chunk_index]
}
fn get_incomplete_chunks(&self) -> Vec<(u64, u64)> {
let incomplete_count = self.chunk_count as usize - self.chunks_bitmap.count_ones();
let mut chunks = Vec::with_capacity(incomplete_count);
let chunk_size = self.chunk_size as u64;
for i in 0..self.chunk_count as usize {
if !self.is_chunk_complete(i) {
let start = i as u64 * chunk_size;
let end = std::cmp::min((i as u64 + 1) * chunk_size - 1, self.file_size - 1);
chunks.push((start, end));
}
}
chunks
}
fn is_download_complete(&self) -> bool {
self.chunks_bitmap.count_ones() == self.chunk_count as usize
}
}
struct ProgressTracker {
bar: Option<ProgressBar>,
}
impl ProgressTracker {
fn new(file_size: u64, silent: bool, enable_stats: bool) -> Result<Self> {
let bar = if !silent || enable_stats {
let pb = ProgressBar::new(file_size);
pb.set_style(
ProgressStyle::default_bar()
.template("[{elapsed_precise}] [{bar:40.cyan/blue}] {bytes}/{total_bytes} ({bytes_per_sec}, {eta})")?
);
if silent {
pb.set_draw_target(indicatif::ProgressDrawTarget::hidden());
}
Some(pb)
} else {
None
};
Ok(Self { bar })
}
fn increment(&self, amount: u64) {
if let Some(pb) = &self.bar {
pb.inc(amount);
}
}
fn finish(&self) {
if let Some(pb) = &self.bar {
pb.finish_with_message("Download complete");
}
}
fn get_stats(&self) -> Option<DownloadStats> {
if let Some(pb) = &self.bar {
let position = pb.position();
let total = pb.length().unwrap_or(1);
Some(DownloadStats {
progress_percent: position as f64 / total as f64,
bytes_downloaded: position,
total_size: total,
speed_bytes_per_sec: pb.per_sec(),
eta_seconds: pb.eta().as_secs(),
elapsed_seconds: pb.elapsed().as_secs(),
})
} else {
None
}
}
}
struct Downloader {
client: Client,
config: DownloadConfig,
}
impl Downloader {
async fn download(&self) -> Result<()> {
let (file_size, filename, etag) = self.get_file_info().await?;
let output_path = self.determine_output_path(filename);
if self.config.should_log() {
println!("Detected filename: {}", output_path);
}
let resume_manager = ResumeManager::try_from_file(
&output_path,
file_size,
&etag,
&self.config.url,
self.config.chunk_size as u32,
self.config.force_new,
self.config.resume_only,
)?;
let file = self.prepare_output_file(&output_path, file_size)?;
let progress = ProgressTracker::new(file_size, self.config.silent, self.config.log)?;
let chunks = if resume_manager.is_download_complete() {
if self.config.should_log() {
println!("File is already fully downloaded, finalizing...");
}
resume_manager.finalize_download()?;
return Ok(());
} else {
let completed_chunks = resume_manager.header.chunks_bitmap.count_ones() as u32;
let total_chunks = resume_manager.header.chunk_count;
if completed_chunks > 0 {
if self.config.should_log() {
let percent_done = (completed_chunks as f64 / total_chunks as f64) * 100.0;
println!("Resuming download: {:.1}% already downloaded", percent_done);
}
if let Some(pb) = &progress.bar {
let downloaded = file_size * completed_chunks as u64 / total_chunks as u64;
pb.inc(downloaded);
}
}
resume_manager.get_incomplete_chunks()
};
if self.config.should_log() {
println!(
"Downloading {} chunks of total {}",
chunks.len(),
resume_manager.header.chunk_count
);
}
self.process_chunks_with_resume(
chunks,
file,
file_size,
progress,
output_path.clone(),
resume_manager,
)
.await?;
Ok(())
}
fn determine_output_path(&self, filename: Option<String>) -> String {
if Path::new(&self.config.output_path)
.file_name()
.unwrap_or_default()
== DEFAULT_OUTPUT_FILENAME
&& filename.is_some()
{
filename.unwrap()
} else {
self.config.output_path.clone()
}
}
fn prepare_output_file(&self, path: &str, size: u64) -> Result<Arc<Mutex<BufWriter<File>>>> {
let file = if Path::new(path).exists() {
OpenOptions::new().read(true).write(true).open(path)?
} else {
let file = File::create(path)?;
file.set_len(HEADER_SIZE as u64 + size)?;
file
};
Ok(Arc::new(Mutex::new(BufWriter::with_capacity(
self.config.buffer_size,
file,
))))
}
async fn process_chunks_with_resume(
&self,
chunks: Vec<(u64, u64)>,
file: Arc<Mutex<BufWriter<File>>>,
_file_size: u64,
progress: ProgressTracker,
real_filename: String,
resume_manager: ResumeManager,
) -> Result<()> {
let mut tasks = FuturesUnordered::new();
let log_progress = if self.config.should_log_stats() {
let progress_clone = progress.bar.clone();
let filename = real_filename.clone();
let (log_cancel_tx, mut log_cancel_rx) = tokio::sync::oneshot::channel();
let log_task = tokio::spawn(async move {
let mut interval = tokio::time::interval(tokio::time::Duration::from_secs(1));
let tracker = ProgressTracker {
bar: progress_clone,
};
loop {
tokio::select! {
_ = interval.tick() => {
if let Some(stats) = tracker.get_stats() {
let json_output = json!({
"progress": stats.progress_percent,
"speed_bps": stats.speed_bytes_per_sec,
"downloaded_bytes": stats.bytes_downloaded,
"total_bytes": stats.total_size,
"eta_seconds": stats.eta_seconds,
"elapsed_seconds": stats.elapsed_seconds,
"filename": filename
});
println!("{}", json_output);
}
}
_ = &mut log_cancel_rx => {
break;
}
}
}
});
Some((log_task, log_cancel_tx))
} else {
None
};
let resume_manager = Arc::new(Mutex::new(resume_manager));
for (start, end) in chunks {
let client = self.client.clone();
let url = self.config.url.clone();
let file_clone = Arc::clone(&file);
let pb_clone = progress.bar.clone();
let manager_clone = Arc::clone(&resume_manager);
let headers = self.config.headers.clone();
let force_download = self.config.force_download;
let should_log = self.config.should_log();
let chunk_size = self.config.chunk_size as u64;
let chunk_index = (start / chunk_size) as usize;
tasks.push(tokio::spawn(async move {
let result = Self::download_chunk_with_retry(
client,
url,
start,
end,
file_clone,
pb_clone,
DEFAULT_MAX_RETRIES,
&headers,
force_download,
should_log,
)
.await;
if result.is_ok() {
let mut manager = manager_clone.lock().await;
manager.set_chunk_complete(chunk_index)?;
}
result
}));
if tasks.len() >= self.config.num_connections {
if let Some(result) = tasks.next().await {
result??;
}
}
}
while let Some(result) = tasks.next().await {
result??;
}
{
let mut writer = file.lock().await;
writer.flush()?;
}
progress.finish();
if let Some((log_handle, log_cancel_tx)) = log_progress {
if self.config.should_log_stats() {
let json_output = json!({
"progress": 1.0,
"speed_bps": 0.0,
"downloaded_bytes": _file_size,
"total_bytes": _file_size,
"eta_seconds": 0,
"elapsed_seconds": if let Some(pb) = &progress.bar { pb.elapsed().as_secs() } else { 0 },
"filename": real_filename
});
println!("{}", json_output);
}
let _ = log_cancel_tx.send(());
let _ = log_handle.await;
}
let manager = resume_manager.lock().await;
if manager.is_download_complete() {
if self.config.should_log() {
println!("Download complete, finalizing file...");
}
manager.finalize_download()?;
}
Ok(())
}
async fn download_chunk_with_retry(
client: Client,
url: String,
start: u64,
end: u64,
file: Arc<Mutex<BufWriter<File>>>,
progress_bar: Option<ProgressBar>,
max_retries: usize,
headers: &[String],
force_download: bool,
should_log: bool,
) -> Result<()> {
let mut retries = 0;
loop {
match Self::download_chunk(
client.clone(),
url.clone(),
start,
end,
file.clone(),
progress_bar.clone(),
headers,
force_download,
should_log,
)
.await
{
Ok(_) => return Ok(()),
Err(e) => {
retries += 1;
if retries >= max_retries {
return Err(e);
}
tokio::time::sleep(tokio::time::Duration::from_millis(
RETRY_BACKOFF_MS * (2_u64.pow(retries as u32 - 1)),
))
.await;
}
}
}
}
async fn download_chunk(
client: Client,
url: String,
start: u64,
end: u64,
file: Arc<Mutex<BufWriter<File>>>,
progress_bar: Option<ProgressBar>,
headers: &[String],
force_download: bool,
should_log: bool,
) -> Result<()> {
let mut req = client
.get(&url)
.header("Range", format!("bytes={}-{}", start, end));
for header in headers {
if let Some(idx) = header.find(':') {
let (name, value) = header.split_at(idx);
let value = value[1..].trim();
req = req.header(name.trim(), value);
}
}
let resp = req.send().await?;
if resp.status() != StatusCode::PARTIAL_CONTENT && resp.status() != StatusCode::OK {
if !force_download {
anyhow::bail!("Server does not support Range requests");
} else if should_log {
println!("Server does not support Range requests, ignoring...");
}
}
let mut stream = resp.bytes_stream();
let mut position = start;
let mut total_bytes = 0;
let expected_bytes = end - start + 1;
while let Some(chunk_result) = stream.next().await {
let chunk = chunk_result?;
let chunk_size = chunk.len() as u64;
total_bytes += chunk_size;
if total_bytes > expected_bytes {
let remaining = expected_bytes - (total_bytes - chunk_size);
let mut writer = file.lock().await;
writer.seek(SeekFrom::Start(HEADER_SIZE as u64 + position))?;
writer.write_all(&chunk[..remaining as usize])?;
let tracker = ProgressTracker {
bar: progress_bar.clone(),
};
tracker.increment(remaining);
break;
}
let mut writer = file.lock().await;
writer.seek(SeekFrom::Start(HEADER_SIZE as u64 + position))?;
writer.write_all(&chunk)?;
drop(writer);
position += chunk_size;
let tracker = ProgressTracker {
bar: progress_bar.clone(),
};
tracker.increment(chunk_size);
}
Ok(())
}
async fn get_file_info(&self) -> Result<(u64, Option<String>, String)> {
let mut req = self.client.head(&self.config.url);
for header in &self.config.headers {
if let Some(idx) = header.find(':') {
let (name, value) = header.split_at(idx);
let value = value[1..].trim();
req = req.header(name.trim(), value);
}
}
let resp = req.send().await?;
let accepts_ranges = resp
.headers()
.get("accept-ranges")
.and_then(|v| v.to_str().ok())
.map(|v| v.contains("bytes"))
.unwrap_or(false);
if !accepts_ranges {
let range_check = self
.client
.get(&self.config.url)
.header("Range", "bytes=0-0")
.send()
.await?;
if range_check.status() != StatusCode::PARTIAL_CONTENT {
if !self.config.force_download {
anyhow::bail!(
"Server does not support Range requests, cannot continue with parallel download"
);
} else if self.config.should_log() {
println!("Server does not support Range requests, ignoring...");
}
}
}
let file_size = if let Some(content_length) = resp.headers().get("content-length") {
content_length.to_str()?.parse()?
} else {
anyhow::bail!("Could not determine file size")
};
let etag = if let Some(etag_header) = resp.headers().get("etag") {
etag_header.to_str()?.to_string()
} else {
format!(
"no-etag-{}",
std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.unwrap()
.as_secs()
)
};
let filename = self.extract_filename_from_response(&resp);
Ok((file_size, filename, etag))
}
fn extract_filename_from_response(&self, resp: &reqwest::Response) -> Option<String> {
if let Some(disposition) = resp.headers().get("content-disposition") {
if let Ok(disposition_str) = disposition.to_str() {
if let Some(filename) = Self::parse_content_disposition(disposition_str) {
return Some(filename);
}
}
}
Self::extract_filename_from_url(&self.config.url)
}
fn parse_content_disposition(disposition: &str) -> Option<String> {
if let Some(idx) = disposition.find("filename=") {
let start = idx + 9;
let mut end = disposition.len();
if disposition.as_bytes().get(start) == Some(&b'"') {
let quoted_name = &disposition[start + 1..];
if let Some(quote_end) = quoted_name.find('"') {
return Some(quoted_name[..quote_end].to_string());
}
} else {
if let Some(semicolon) = disposition[start..].find(';') {
end = start + semicolon;
}
return Some(disposition[start..end].to_string());
}
}
None
}
fn extract_filename_from_url(url: &str) -> Option<String> {
if let Ok(parsed_url) = Url::parse(url) {
let path = parsed_url.path();
if let Some(path_filename) = Path::new(path).file_name() {
if let Some(filename_str) = path_filename.to_str() {
if !filename_str.is_empty() {
if let Ok(decoded) = urlencoding::decode(filename_str) {
return Some(decoded.to_string());
}
}
}
}
}
None
}
}
struct ResumeManager {
header: HydraHeader,
file_path: String,
}
impl ResumeManager {
fn try_from_file(
path: &str,
file_size: u64,
etag: &str,
url: &str,
chunk_size: u32,
force_new: bool,
resume_only: bool,
) -> Result<Self> {
if force_new {
if Path::new(path).exists() {
std::fs::remove_file(path)?;
}
return Self::create_new_file(path, file_size, etag, url, chunk_size);
}
if let Ok(file) = File::open(path) {
let mut reader = BufReader::new(file);
match HydraHeader::read_from_file(&mut reader) {
Ok(header) => {
let current_url_hash = Sha256::digest(url.as_bytes());
let current_etag_hash = Sha256::digest(etag.as_bytes());
let url_matches = header.url_hash == current_url_hash.as_slice();
let etag_matches = header.etag == current_etag_hash.as_slice();
let size_matches = header.file_size == file_size;
if url_matches && etag_matches && size_matches {
return Ok(Self {
header,
file_path: path.to_string(),
});
}
if resume_only {
anyhow::bail!(
"Existing file is not compatible and resume_only option is active"
);
}
std::fs::remove_file(path)?;
}
Err(e) => {
if resume_only {
return Err(anyhow::anyhow!("Could not read file to resume: {}", e));
}
std::fs::remove_file(path)?;
}
}
} else if resume_only {
anyhow::bail!("File not found and resume_only option is active");
}
Self::create_new_file(path, file_size, etag, url, chunk_size)
}
fn create_new_file(
path: &str,
file_size: u64,
etag: &str,
url: &str,
chunk_size: u32,
) -> Result<Self> {
let header = HydraHeader::new(file_size, etag, url, chunk_size);
let file = File::create(path)?;
file.set_len(HEADER_SIZE as u64 + file_size)?;
let mut writer = BufWriter::new(file);
header.write_to_file(&mut writer)?;
writer.flush()?;
Ok(Self {
header,
file_path: path.to_string(),
})
}
fn get_incomplete_chunks(&self) -> Vec<(u64, u64)> {
self.header.get_incomplete_chunks()
}
fn set_chunk_complete(&mut self, chunk_index: usize) -> Result<()> {
self.header.set_chunk_complete(chunk_index)?;
let file = OpenOptions::new().write(true).open(&self.file_path)?;
let mut writer = BufWriter::new(file);
let bitmap_offset = 5 + 1 + 8 + 32 + 32 + 4 + 4;
writer.seek(SeekFrom::Start(bitmap_offset as u64))?;
let bitmap_bytes = self.header.chunks_bitmap.as_raw_slice();
writer.write_all(bitmap_bytes)?;
writer.flush()?;
Ok(())
}
fn is_download_complete(&self) -> bool {
self.header.is_download_complete()
}
fn finalize_download(&self) -> Result<()> {
if !self.is_download_complete() {
anyhow::bail!("Download is not complete");
}
let temp_path = format!("{}.tmp", self.file_path);
let source = File::open(&self.file_path)?;
let dest = File::create(&temp_path)?;
let mut reader = BufReader::with_capacity(FINALIZE_BUFFER_SIZE, source);
let mut writer = BufWriter::with_capacity(FINALIZE_BUFFER_SIZE, dest);
reader.seek(SeekFrom::Start(HEADER_SIZE as u64))?;
std::io::copy(&mut reader, &mut writer)?;
writer.flush()?;
drop(writer);
match std::fs::rename(&temp_path, &self.file_path) {
Ok(_) => Ok(()),
Err(_) => {
let _ = std::fs::remove_file(&self.file_path);
std::fs::rename(&temp_path, &self.file_path)?;
Ok(())
}
}
}
}
#[tokio::main]
async fn main() -> Result<()> {
let args = CliArgs::parse();
let config = DownloadConfig {
url: args.url.clone(),
output_path: args.output,
num_connections: args.connections,
chunk_size: args.chunk_size * 1024 * 1024,
buffer_size: args.buffer_size * 1024 * 1024,
verbose: args.verbose,
silent: args.silent,
log: args.log,
force_new: args.force_new,
resume_only: args.resume_only,
headers: args.header,
force_download: args.force_download,
};
if config.force_new && config.resume_only {
eprintln!("Error: --force-new and --resume-only options cannot be used together");
std::process::exit(1);
}
let downloader = Downloader {
client: Client::new(),
config,
};
if downloader.config.should_log() {
println!(
"Starting download with {} connections, chunk size: {}MB, buffer: {}MB",
downloader.config.num_connections, args.chunk_size, args.buffer_size
);
println!("URL: {}", args.url);
if downloader.config.force_new {
println!("Forcing new download, ignoring existing files");
} else if downloader.config.resume_only {
println!("Only resuming existing download");
} else {
println!("Resuming download if possible");
}
}
downloader.download().await?;
Ok(())
}

View File

@@ -2,7 +2,6 @@ const { default: axios } = require("axios");
const util = require("node:util");
const fs = require("node:fs");
const path = require("node:path");
const { spawnSync } = require("node:child_process");
const exec = util.promisify(require("node:child_process").exec);
@@ -47,79 +46,4 @@ const downloadLudusavi = async () => {
});
};
const downloadAria2WindowsAndLinux = async () => {
const file =
process.platform === "win32"
? "aria2-1.37.0-win-64bit-build1.zip"
: "aria2-1.37.0-1-x86_64.pkg.tar.zst";
const downloadUrl =
process.platform === "win32"
? `https://github.com/aria2/aria2/releases/download/release-1.37.0/${file}`
: "https://archlinux.org/packages/extra/x86_64/aria2/download/";
console.log(`Downloading ${file}...`);
const response = await axios.get(downloadUrl, { responseType: "stream" });
const stream = response.data.pipe(fs.createWriteStream(file));
stream.on("finish", async () => {
console.log(`Downloaded ${file}, extracting...`);
if (process.platform === "win32") {
await exec(`npx extract-zip ${file}`);
console.log("Extracted. Renaming folder...");
fs.mkdirSync("aria2");
fs.copyFileSync(
path.join(file.replace(".zip", ""), "aria2c.exe"),
"aria2/aria2c.exe"
);
fs.rmSync(file.replace(".zip", ""), { recursive: true });
} else {
await exec(`tar --zstd -xvf ${file} usr/bin/aria2c`);
console.log("Extracted. Copying binary file...");
fs.mkdirSync("aria2");
fs.copyFileSync("usr/bin/aria2c", "aria2/aria2c");
fs.rmSync("usr", { recursive: true });
}
console.log(`Extracted ${file}, removing compressed downloaded file...`);
fs.rmSync(file);
});
};
const copyAria2Macos = async () => {
console.log("Checking if aria2 is installed...");
const isAria2Installed = spawnSync("which", ["aria2c"]).status;
if (isAria2Installed != 0) {
console.log("Please install aria2");
console.log("brew install aria2");
return;
}
console.log("Copying aria2 binary...");
fs.mkdirSync("aria2");
await exec(`cp $(which aria2c) aria2/aria2c`);
};
const copyAria2 = () => {
const aria2Path =
process.platform === "win32" ? "aria2/aria2c.exe" : "aria2/aria2c";
if (fs.existsSync(aria2Path)) {
console.log("Aria2 already exists, skipping download...");
return;
}
if (process.platform == "darwin") {
copyAria2Macos();
} else {
downloadAria2WindowsAndLinux();
}
};
copyAria2();
downloadLudusavi();

View File

@@ -354,7 +354,8 @@
"common_redist": "Common redistributables",
"common_redist_description": "Common redistributables are required to run some games. Installing them is recommended to avoid issues.",
"install_common_redist": "Install",
"installing_common_redist": "Installing…"
"installing_common_redist": "Installing…",
"show_download_speed_in_megabytes": "Show download speed in megabytes per second"
},
"notifications": {
"download_complete": "Download complete",

View File

@@ -341,7 +341,8 @@
"common_redist": "Componentes recomendados",
"common_redist_description": "Componentes recomendados são necessários para executar alguns jogos. A instalação deles é recomendada para evitar problemas.",
"install_common_redist": "Instalar",
"installing_common_redist": "Instalando…"
"installing_common_redist": "Instalando…",
"show_download_speed_in_megabytes": "Exibir taxas de download em megabytes por segundo"
},
"notifications": {
"download_complete": "Download concluído",

View File

@@ -8,7 +8,6 @@ import { electronApp, optimizer } from "@electron-toolkit/utils";
import { logger, WindowManager } from "@main/services";
import resources from "@locales";
import { PythonRPC } from "./services/python-rpc";
import { Aria2 } from "./services/aria2";
import { db, levelKeys } from "./level";
import { loadState } from "./main";
@@ -143,7 +142,6 @@ app.on("window-all-closed", () => {
app.on("before-quit", () => {
/* Disconnects libtorrent */
PythonRPC.kill();
Aria2.kill();
});
app.on("activate", () => {

View File

@@ -2,7 +2,6 @@ import { DownloadManager, Ludusavi, startMainLoop } from "./services";
import { RealDebridClient } from "./services/download/real-debrid";
import { HydraApi } from "./services/hydra-api";
import { uploadGamesBatch } from "./services/library-sync";
import { Aria2 } from "./services/aria2";
import { downloadsSublevel } from "./level/sublevels/downloads";
import { sortBy } from "lodash-es";
import { Downloader } from "@shared";
@@ -21,8 +20,6 @@ export const loadState = async () => {
await import("./events");
Aria2.spawn();
if (userPreferences?.realDebridApiToken) {
RealDebridClient.authorize(userPreferences.realDebridApiToken);
}

View File

@@ -1,33 +0,0 @@
import path from "node:path";
import cp from "node:child_process";
import { app } from "electron";
export class Aria2 {
private static process: cp.ChildProcess | null = null;
private static readonly binaryPath = app.isPackaged
? path.join(process.resourcesPath, "aria2", "aria2c")
: path.join(__dirname, "..", "..", "aria2", "aria2c");
public static spawn() {
this.process = cp.spawn(
this.binaryPath,
[
"--enable-rpc",
"--rpc-listen-all",
"--file-allocation=none",
"--allow-overwrite=true",
"-s",
"16",
"-x",
"16",
"-k",
"1M",
],
{ stdio: "inherit", windowsHide: true }
);
}
public static kill() {
this.process?.kill();
}
}

View File

@@ -313,6 +313,8 @@ export class DownloadManager {
url: downloadLink,
save_path: download.downloadPath,
header: `Cookie: accountToken=${token}`,
allow_multiple_connections: true,
connections_limit: 8,
};
}
case Downloader.PixelDrain: {
@@ -371,6 +373,7 @@ export class DownloadManager {
game_id: downloadId,
url: downloadUrl,
save_path: download.downloadPath,
allow_multiple_connections: true,
};
}
case Downloader.TorBox: {
@@ -383,6 +386,7 @@ export class DownloadManager {
url,
save_path: download.downloadPath,
out: name,
allow_multiple_connections: true,
};
}
}

View File

@@ -21,6 +21,12 @@ const binaryNameByPlatform: Partial<Record<NodeJS.Platform, string>> = {
win32: "hydra-python-rpc.exe",
};
const rustBinaryNameByPlatform: Partial<Record<NodeJS.Platform, string>> = {
darwin: "hydra-httpdl",
linux: "hydra-httpdl",
win32: "hydra-httpdl.exe",
};
export class PythonRPC {
public static readonly BITTORRENT_PORT = "5881";
public static readonly RPC_PORT = "8084";
@@ -52,6 +58,20 @@ export class PythonRPC {
this.RPC_PASSWORD,
initialDownload ? JSON.stringify(initialDownload) : "",
initialSeeding ? JSON.stringify(initialSeeding) : "",
app.isPackaged
? path.join(
process.resourcesPath,
rustBinaryNameByPlatform[process.platform]!
)
: path.join(
__dirname,
"..",
"..",
"rust_rpc",
"target",
"debug",
rustBinaryNameByPlatform[process.platform]!
),
];
if (app.isPackaged) {

View File

@@ -137,6 +137,15 @@ export function App() {
}, [fetchUserDetails, updateUserDetails, dispatch]);
const onSignIn = useCallback(() => {
window.electron.getDownloadSources().then((sources) => {
sources.forEach((source) => {
downloadSourcesWorker.postMessage([
"IMPORT_DOWNLOAD_SOURCE",
source.url,
]);
});
});
fetchUserDetails().then((response) => {
if (response) {
updateUserDetails(response);

View File

@@ -15,12 +15,14 @@ import type {
StartGameDownloadPayload,
} from "@types";
import { useDate } from "./use-date";
import { formatBytes } from "@shared";
import { formatBytes, formatBytesToMbps } from "@shared";
export function useDownload() {
const { updateLibrary } = useLibrary();
const { formatDistance } = useDate();
const userPrefs = useAppSelector((state) => state.userPreferences.value);
const { lastPacket, gamesWithDeletionInProgress } = useAppSelector(
(state) => state.download
);
@@ -99,8 +101,14 @@ export function useDownload() {
return gamesWithDeletionInProgress.includes(objectId);
};
const formatDownloadSpeed = (downloadSpeed: number): string => {
return userPrefs?.showDownloadSpeedInMegabytes
? `${formatBytes(downloadSpeed)}/s`
: formatBytesToMbps(downloadSpeed);
};
return {
downloadSpeed: `${formatBytes(lastPacket?.downloadSpeed ?? 0)}/s`,
downloadSpeed: formatDownloadSpeed(lastPacket?.downloadSpeed ?? 0),
progress: formatDownloadProgress(lastPacket?.progress ?? 0),
lastPacket,
eta: calculateETA(),

View File

@@ -23,6 +23,7 @@ export function SettingsBehavior() {
enableAutoInstall: false,
seedAfterDownloadComplete: false,
showHiddenAchievementsDescription: false,
showDownloadSpeedInMegabytes: false,
});
const { t } = useTranslation("settings");
@@ -40,6 +41,8 @@ export function SettingsBehavior() {
userPreferences.seedAfterDownloadComplete ?? false,
showHiddenAchievementsDescription:
userPreferences.showHiddenAchievementsDescription ?? false,
showDownloadSpeedInMegabytes:
userPreferences.showDownloadSpeedInMegabytes ?? false,
});
}
}, [userPreferences]);
@@ -139,6 +142,16 @@ export function SettingsBehavior() {
})
}
/>
<CheckboxField
label={t("show_download_speed_in_megabytes")}
checked={form.showDownloadSpeedInMegabytes}
onChange={() =>
handleChange({
showDownloadSpeedInMegabytes: !form.showDownloadSpeedInMegabytes,
})
}
/>
</>
);
}

View File

@@ -49,6 +49,12 @@ export const formatBytes = (bytes: number): string => {
return `${Math.trunc(formatedByte * 10) / 10} ${FORMAT[base]}`;
};
export const formatBytesToMbps = (bytesPerSecond: number): string => {
const bitsPerSecond = bytesPerSecond * 8;
const mbps = bitsPerSecond / (1024 * 1024);
return `${Math.trunc(mbps * 10) / 10} Mbps`;
};
export const pipe =
<T>(...fns: ((arg: T) => any)[]) =>
(arg: T) =>

View File

@@ -85,6 +85,7 @@ export interface UserPreferences {
repackUpdatesNotificationsEnabled?: boolean;
achievementNotificationsEnabled?: boolean;
friendRequestNotificationsEnabled?: boolean;
showDownloadSpeedInMegabytes?: boolean;
}
export interface ScreenState {