Compare commits

..

58 Commits

Author SHA1 Message Date
Chubby Granny Chaser
19613b69cc Merge pull request #1553 from hydralauncher/feat/hydra-debrid
Feat/hydra debrid
2025-04-12 22:42:37 +01:00
Hachi-R
42a768f715 fix: remove etag check from resume validation 2025-04-12 18:41:52 -03:00
Chubby Granny Chaser
456c0ad6ff ci: adding rust build to release 2025-04-12 22:12:11 +01:00
Chubby Granny Chaser
76181342f9 chore: bumping version 2025-04-12 22:10:10 +01:00
Chubby Granny Chaser
8f95fa70d4 chore: merge with main 2025-04-12 21:59:26 +01:00
Chubby Granny Chaser
4743b9f082 Merge pull request #1555 from hydralauncher/fix/download-sources
fix: fixing download sources initial sync
2025-04-12 21:58:48 +01:00
Chubby Granny Chaser
db92ef255d fix: fixing resume 2025-04-12 21:58:07 +01:00
Chubby Granny Chaser
d1cdfc0ba5 fix: improving readability 2025-04-12 21:54:52 +01:00
Chubby Granny Chaser
77a4642b7b feat: importing sources on auth 2025-04-12 21:42:22 +01:00
Chubby Granny Chaser
148e272c4d fix: removing out from start download 2025-04-12 21:34:40 +01:00
Chubby Granny Chaser
3bdd8b90d4 chore: merge with main 2025-04-12 21:27:36 +01:00
Chubby Granny Chaser
6569b66801 chore: merge with main 2025-04-12 21:27:03 +01:00
Chubby Granny Chaser
4a11d741eb feat: limiting nimbus to cloud only 2025-04-12 21:24:06 +01:00
Chubby Granny Chaser
6e8a844a92 Merge branch 'main' of github.com:hydralauncher/hydra into feat/hydra-debrid 2025-04-12 21:19:54 +01:00
Chubby Granny Chaser
3821b9836c Merge pull request #1554 from hydralauncher/ci/rust-rpc
feat: adding rust codebase
2025-04-12 21:19:44 +01:00
Chubby Granny Chaser
57390c814b Merge branch 'ci/rust-rpc' of github.com:hydralauncher/hydra into feat/hydra-debrid 2025-04-12 20:27:21 +01:00
Hachi-R
007fa6f009 fix: add connections limit parameter to http downloader 2025-04-12 16:25:45 -03:00
Chubby Granny Chaser
009cb1d7d7 Merge branch 'ci/rust-rpc' of github.com:hydralauncher/hydra into feat/hydra-debrid 2025-04-12 19:58:27 +01:00
Chubby Granny Chaser
306b49eaf3 fix: merge with ci 2025-04-12 19:58:18 +01:00
Hachi-R
be232d88e4 fix: handle exception in http downloader by returning None 2025-04-12 15:55:59 -03:00
Hachi-R
e3670f5b5a fix: add force download flag in httpdl args 2025-04-12 15:54:45 -03:00
Hachi-R
bd018399fb fix: typo 2025-04-12 15:52:18 -03:00
Hachi-R
975eec96be feat: add force download option to http downloader 2025-04-12 15:42:02 -03:00
Chubby Granny Chaser
44b711f674 fix: fixing download sources initial sync 2025-04-12 18:47:33 +01:00
Chubby Granny Chaser
539ff34b69 fix: fixing download sources initial sync 2025-04-12 18:39:43 +01:00
Hachi-R
f99da1d7bf Merge branch 'ci/rust-rpc' of https://github.com/hydralauncher/hydra into ci/rust-rpc 2025-04-12 14:23:13 -03:00
Hachi-R
75c3bbf858 feat: add option to show download speed in megabits 2025-04-12 14:23:02 -03:00
Chubby Granny Chaser
afa78e4634 feat: removing aria2 2025-04-12 18:16:16 +01:00
Chubby Granny Chaser
ee1dda90d9 ci: building rust on dev 2025-04-12 18:00:20 +01:00
Hachi-R
5b62b9c593 feat: add option to show download speed in megabits 2025-04-11 17:09:33 -03:00
Hachi-R
4d76182f2e feat: add support for custom http headers in downloader 2025-04-11 15:37:51 -03:00
Chubby Granny Chaser
85fb57527a ci: adding artifacts 2025-04-11 18:33:14 +01:00
Hachi-R
9e6b6be0b9 feat: add final log 2025-04-11 14:27:27 -03:00
Hachi-R
3c3f77fc50 fix: adjust chunk size and connection limits in http downloader 2025-04-11 14:19:57 -03:00
Chubby Granny Chaser
614cb8a297 Merge branch 'ci/rust-rpc' of github.com:hydralauncher/hydra into ci/rust-rpc 2025-04-11 18:08:18 +01:00
Chubby Granny Chaser
ba3f010576 ci: adding electron builder for http 2025-04-11 18:06:02 +01:00
Hachi-R
8c442e742a fix: add range request support validation 2025-04-11 14:02:06 -03:00
Hachi-R
555b3dbb1d fix: improve file rename handling 2025-04-11 14:00:22 -03:00
Hachi-R
d2a868b504 fix: update retry backoff 2025-04-11 13:51:32 -03:00
Hachi-R
e27536c6b3 feat: chunks vector allocation 2025-04-11 13:49:16 -03:00
Hachi-R
cd367faec2 fix: oneshot channel 2025-04-11 13:46:41 -03:00
Chubby Granny Chaser
087dd9fb2e feat: adding rust codebase 2025-04-11 17:27:33 +01:00
Hachi-R
c5d8403843 fix: update binary path for hydra-httpdl executable 2025-04-10 17:17:20 -03:00
Hachi-R
8e01142225 feat: pass hydra-httpdl binary path to HttpDownloader 2025-04-10 16:40:40 -03:00
Hachi-R
a0ef59a13c fix: correct path separator for hydra-httpdl executable 2025-04-10 16:34:13 -03:00
Hachi-R
13d5e4469f feat: add hydra-httpdl executable to extra resources 2025-04-10 16:09:55 -03:00
Hachi-R
22e92eb8f6 feat: update download speed formatting to Mbps 2025-04-10 16:09:41 -03:00
Hachi-R
d28bb825a3 feat: add hydra-httpdl executable 2025-04-10 15:44:24 -03:00
Hachi-R
96d59a0fd7 fix: improve game folder deletion logic 2025-04-10 15:43:55 -03:00
Hachi-R
84600ea0dc feat: implement hydra-httpdl for download management 2025-04-10 15:43:38 -03:00
Chubby Granny Chaser
9264fa3664 fix: vibe coding 2025-04-09 17:10:57 +01:00
Chubby Granny Chaser
5b0ea980de fix: vibe coding 2025-04-09 17:07:45 +01:00
Chubby Granny Chaser
622fc393fc fix: vibe coding 2025-04-09 17:06:35 +01:00
Chubby Granny Chaser
f76ba5975d fix: tweaking download options 2025-04-09 16:06:01 +01:00
Chubby Granny Chaser
4da0dac0e6 feat: adding hydra debrid 2025-04-09 16:02:50 +01:00
Hachi-R
7c468ac9bb fix: remove allow_multiple_connections from download method 2025-04-09 11:29:12 -03:00
Hachi-R
2ee3bebfc7 fix: remove allow_multiple_connections from download options 2025-04-09 11:20:42 -03:00
Chubby Granny Chaser
98ed07d6d2 feat: adding hydra debrid 2025-04-09 13:02:22 +01:00
41 changed files with 3491 additions and 287 deletions

View File

@@ -31,6 +31,16 @@ jobs:
with:
python-version: 3.9
- name: Install Rust
uses: actions-rs/toolchain@v1
with:
toolchain: stable
components: rustfmt
- name: Build Rust
run: cargo build --release
working-directory: ./rust_rpc
- name: Install dependencies
run: pip install -r requirements.txt

View File

@@ -33,6 +33,16 @@ jobs:
with:
python-version: 3.9
- name: Install Rust
uses: actions-rs/toolchain@v1
with:
toolchain: stable
components: rustfmt
- name: Build Rust
run: cargo build --release
working-directory: ./rust_rpc
- name: Install dependencies
run: pip install -r requirements.txt

4
.gitignore vendored
View File

@@ -9,10 +9,12 @@ out
.vite
ludusavi/
hydra-python-rpc/
aria2/
.python-version
# Sentry Config File
.env.sentry-build-plugin
*storybook.log
target/

BIN
binaries/hydra-httpdl.exe Normal file

Binary file not shown.

View File

@@ -3,7 +3,6 @@ productName: Hydra
directories:
buildResources: build
extraResources:
- aria2
- ludusavi
- hydra-python-rpc
- seeds
@@ -23,6 +22,7 @@ win:
extraResources:
- from: binaries/7z.exe
- from: binaries/7z.dll
- from: rust_rpc/target/release/hydra-httpdl.exe
target:
- nsis
- portable
@@ -40,6 +40,7 @@ mac:
entitlementsInherit: build/entitlements.mac.plist
extraResources:
- from: binaries/7zz
- from: rust_rpc/target/release/hydra-httpdl
extendInfo:
- NSCameraUsageDescription: Application requests access to the device's camera.
- NSMicrophoneUsageDescription: Application requests access to the device's microphone.
@@ -51,6 +52,7 @@ dmg:
linux:
extraResources:
- from: binaries/7zzs
- from: rust_rpc/target/release/hydra-httpdl
target:
- AppImage
- snap

View File

@@ -1,6 +1,6 @@
{
"name": "hydralauncher",
"version": "3.4.0",
"version": "3.4.1",
"description": "Hydra",
"main": "./out/main/index.js",
"author": "Los Broxas",
@@ -21,7 +21,7 @@
"typecheck:web": "tsc --noEmit -p tsconfig.web.json --composite false",
"typecheck": "npm run typecheck:node && npm run typecheck:web",
"start": "electron-vite preview",
"dev": "electron-vite dev",
"dev": "cargo build --manifest-path=rust_rpc/Cargo.toml && electron-vite dev",
"build": "npm run typecheck && electron-vite build",
"postinstall": "electron-builder install-app-deps && node ./scripts/postinstall.cjs",
"build:unpack": "npm run build && electron-builder --dir",

View File

@@ -1,61 +1,94 @@
import aria2p
import os
import subprocess
import json
class HttpDownloader:
def __init__(self):
self.download = None
self.aria2 = aria2p.API(
aria2p.Client(
host="http://localhost",
port=6800,
secret=""
)
)
def __init__(self, hydra_httpdl_bin: str):
self.hydra_exe = hydra_httpdl_bin
self.process = None
self.last_status = None
def start_download(self, url: str, save_path: str, header: str, out: str = None, allow_multiple_connections: bool = False):
if self.download:
self.aria2.resume([self.download])
else:
options = {
"header": header,
"dir": save_path,
"out": out
}
def start_download(self, url: str, save_path: str, header: str = None, allow_multiple_connections: bool = False, connections_limit: int = 1):
cmd = [self.hydra_exe]
cmd.append(url)
cmd.extend([
"--chunk-size", "10",
"--buffer-size", "16",
"--force-download",
"--log",
"--silent"
])
if header:
cmd.extend(["--header", header])
if allow_multiple_connections:
cmd.extend(["--connections", str(connections_limit)])
else:
cmd.extend(["--connections", "1"])
print(f"running hydra-httpdl: {' '.join(cmd)}")
try:
self.process = subprocess.Popen(
cmd,
cwd=save_path,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
universal_newlines=True
)
except Exception as e:
print(f"error running hydra-httpdl: {e}")
return None
if allow_multiple_connections:
options.update({
"split": "16",
"max-connection-per-server": "16",
"min-split-size": "1M"
})
downloads = self.aria2.add(url, options=options)
def get_download_status(self):
if not self.process:
return None
try:
line = self.process.stdout.readline()
if line:
status = json.loads(line.strip())
self.last_status = status
elif self.last_status:
status = self.last_status
else:
return None
response = {
"status": "active",
"progress": status["progress"],
"downloadSpeed": status["speed_bps"],
"numPeers": 0,
"numSeeds": 0,
"bytesDownloaded": status["downloaded_bytes"],
"fileSize": status["total_bytes"],
"folderName": status["filename"]
}
if status["progress"] == 1:
response["status"] = "complete"
return response
except Exception as e:
print(f"error getting download status: {e}")
return None
self.download = downloads[0]
def pause_download(self):
if self.download:
self.aria2.pause([self.download])
def cancel_download(self):
if self.download:
self.aria2.remove([self.download])
self.download = None
def get_download_status(self):
if self.download == None:
return None
download = self.aria2.get_download(self.download.gid)
response = {
'folderName': download.name,
'fileSize': download.total_length,
'progress': download.completed_length / download.total_length if download.total_length else 0,
'downloadSpeed': download.download_speed,
'numPeers': 0,
'numSeeds': 0,
'status': download.status,
'bytesDownloaded': download.completed_length,
}
return response
def stop_download(self):
if self.process:
self.process.terminate()
self.process = None
self.last_status = None
def pause_download(self):
self.stop_download()
def cancel_download(self):
self.stop_download()

View File

@@ -13,6 +13,7 @@ http_port = sys.argv[2]
rpc_password = sys.argv[3]
start_download_payload = sys.argv[4]
start_seeding_payload = sys.argv[5]
hydra_httpdl_bin = sys.argv[6]
downloads = {}
# This can be streamed down from Node
@@ -32,10 +33,10 @@ if start_download_payload:
except Exception as e:
print("Error starting torrent download", e)
else:
http_downloader = HttpDownloader()
http_downloader = HttpDownloader(hydra_httpdl_bin)
downloads[initial_download['game_id']] = http_downloader
try:
http_downloader.start_download(initial_download['url'], initial_download['save_path'], initial_download.get('header'), initial_download.get("out"))
http_downloader.start_download(initial_download['url'], initial_download['save_path'], initial_download.get('header'), initial_download.get('allow_multiple_connections', False), initial_download.get('connections_limit', 24))
except Exception as e:
print("Error starting http download", e)
@@ -147,11 +148,11 @@ def action():
torrent_downloader.start_download(url, data['save_path'])
else:
if existing_downloader and isinstance(existing_downloader, HttpDownloader):
existing_downloader.start_download(url, data['save_path'], data.get('header'), data.get('out'), data.get('allow_multiple_connections', False))
existing_downloader.start_download(url, data['save_path'], data.get('header'), data.get('allow_multiple_connections', False), data.get('connections_limit', 24))
else:
http_downloader = HttpDownloader()
http_downloader = HttpDownloader(hydra_httpdl_bin)
downloads[game_id] = http_downloader
http_downloader.start_download(url, data['save_path'], data.get('header'), data.get('out'), data.get('allow_multiple_connections', False))
http_downloader.start_download(url, data['save_path'], data.get('header'), data.get('allow_multiple_connections', False), data.get('connections_limit', 24))
downloading_game_id = game_id

View File

@@ -5,4 +5,3 @@ pywin32; sys_platform == 'win32'
psutil
Pillow
flask
aria2p

2040
rust_rpc/Cargo.lock generated Normal file

File diff suppressed because it is too large Load Diff

25
rust_rpc/Cargo.toml Normal file
View File

@@ -0,0 +1,25 @@
[package]
name = "hydra-httpdl"
version = "0.1.0"
edition = "2021"
[dependencies]
tokio = { version = "1", features = ["full", "macros", "rt-multi-thread"] }
reqwest = { version = "0.12.5", features = ["stream"] }
futures = "0.3"
bytes = "1.4"
indicatif = "0.17"
anyhow = "1.0"
async-trait = "0.1"
tokio-util = { version = "0.7", features = ["io"] }
clap = { version = "4.4", features = ["derive"] }
urlencoding = "2.1"
serde_json = "1.0"
bitvec = "1.0"
sha2 = "0.10"
[profile.release]
opt-level = 3
lto = "fat"
codegen-units = 1
panic = "abort"
strip = true

966
rust_rpc/src/main.rs Normal file
View File

@@ -0,0 +1,966 @@
use anyhow::Result;
use bitvec::prelude::*;
use clap::Parser;
use futures::stream::{FuturesUnordered, StreamExt};
use indicatif::{ProgressBar, ProgressStyle};
use reqwest::{Client, StatusCode, Url};
use serde_json::json;
use sha2::{Digest, Sha256};
use std::fs::{File, OpenOptions};
use std::io::{BufReader, BufWriter, Read, Seek, SeekFrom, Write};
use std::path::Path;
use std::sync::Arc;
use tokio::sync::Mutex;
const DEFAULT_MAX_RETRIES: usize = 3;
const RETRY_BACKOFF_MS: u64 = 500;
const DEFAULT_OUTPUT_FILENAME: &str = "output.bin";
const DEFAULT_CONNECTIONS: usize = 16;
const DEFAULT_CHUNK_SIZE_MB: usize = 5;
const DEFAULT_BUFFER_SIZE_MB: usize = 8;
const DEFAULT_VERBOSE: bool = false;
const DEFAULT_SILENT: bool = false;
const DEFAULT_LOG: bool = false;
const DEFAULT_FORCE_NEW: bool = false;
const DEFAULT_RESUME_ONLY: bool = false;
const DEFAULT_FORCE_DOWNLOAD: bool = false;
const HEADER_SIZE: usize = 4096;
const MAGIC_NUMBER: &[u8; 5] = b"HYDRA";
const FORMAT_VERSION: u8 = 1;
const FINALIZE_BUFFER_SIZE: usize = 1024 * 1024;
#[derive(Parser)]
#[command(name = "hydra-httpdl")]
#[command(author = "los-broxas")]
#[command(version = "0.2.0")]
#[command(about = "high speed and low resource usage http downloader with resume capability", long_about = None)]
struct CliArgs {
/// file url to download
#[arg(required = true)]
url: String,
/// output file path (or directory to save with original filename)
#[arg(default_value = DEFAULT_OUTPUT_FILENAME)]
output: String,
/// number of concurrent connections for parallel download
#[arg(short = 'c', long, default_value_t = DEFAULT_CONNECTIONS)]
connections: usize,
/// chunk size in MB for each connection
#[arg(short = 'k', long, default_value_t = DEFAULT_CHUNK_SIZE_MB)]
chunk_size: usize,
/// buffer size in MB for file writing
#[arg(short, long, default_value_t = DEFAULT_BUFFER_SIZE_MB)]
buffer_size: usize,
/// show detailed progress information
#[arg(short = 'v', long, default_value_t = DEFAULT_VERBOSE)]
verbose: bool,
/// suppress progress bar
#[arg(short = 's', long, default_value_t = DEFAULT_SILENT)]
silent: bool,
/// log download statistics in JSON format every second
#[arg(short = 'l', long, default_value_t = DEFAULT_LOG)]
log: bool,
/// force new download, ignore existing partial files
#[arg(short = 'f', long, default_value_t = DEFAULT_FORCE_NEW)]
force_new: bool,
/// only resume existing download, exit if no partial file exists
#[arg(short = 'r', long, default_value_t = DEFAULT_RESUME_ONLY)]
resume_only: bool,
/// force download, ignore some verification checks
#[arg(short = 'F', long, default_value_t = DEFAULT_FORCE_DOWNLOAD)]
force_download: bool,
/// HTTP headers to send with request (format: "Key: Value")
#[arg(short = 'H', long)]
header: Vec<String>,
}
struct DownloadConfig {
url: String,
output_path: String,
num_connections: usize,
chunk_size: usize,
buffer_size: usize,
verbose: bool,
silent: bool,
log: bool,
force_new: bool,
resume_only: bool,
headers: Vec<String>,
force_download: bool,
}
impl DownloadConfig {
fn should_log(&self) -> bool {
self.verbose && !self.silent
}
fn should_log_stats(&self) -> bool {
self.log
}
}
struct DownloadStats {
progress_percent: f64,
bytes_downloaded: u64,
total_size: u64,
speed_bytes_per_sec: f64,
eta_seconds: u64,
elapsed_seconds: u64,
}
struct HydraHeader {
magic: [u8; 5], // "HYDRA" identifier
version: u8, // header version
file_size: u64, // file size
etag: [u8; 32], // etag hash
url_hash: [u8; 32], // url hash
chunk_size: u32, // chunk size
chunk_count: u32, // chunk count
chunks_bitmap: BitVec<u8>, // chunks bitmap
}
impl HydraHeader {
fn new(file_size: u64, etag: &str, url: &str, chunk_size: u32) -> Self {
let chunk_count = ((file_size as f64) / (chunk_size as f64)).ceil() as u32;
let chunks_bitmap = bitvec![u8, Lsb0; 0; chunk_count as usize];
let mut etag_hash = [0u8; 32];
let etag_digest = Sha256::digest(etag.as_bytes());
etag_hash.copy_from_slice(&etag_digest[..]);
let mut url_hash = [0u8; 32];
let url_digest = Sha256::digest(url.as_bytes());
url_hash.copy_from_slice(&url_digest[..]);
Self {
magic: *MAGIC_NUMBER,
version: FORMAT_VERSION,
file_size,
etag: etag_hash,
url_hash,
chunk_size,
chunk_count,
chunks_bitmap,
}
}
fn write_to_file<W: Write + Seek>(&self, writer: &mut W) -> Result<()> {
writer.write_all(&self.magic)?;
writer.write_all(&[self.version])?;
writer.write_all(&self.file_size.to_le_bytes())?;
writer.write_all(&self.etag)?;
writer.write_all(&self.url_hash)?;
writer.write_all(&self.chunk_size.to_le_bytes())?;
writer.write_all(&self.chunk_count.to_le_bytes())?;
let bitmap_bytes = self.chunks_bitmap.as_raw_slice();
writer.write_all(bitmap_bytes)?;
let header_size = 5 + 1 + 8 + 32 + 32 + 4 + 4 + bitmap_bytes.len();
let padding_size = HEADER_SIZE - header_size;
let padding = vec![0u8; padding_size];
writer.write_all(&padding)?;
Ok(())
}
fn read_from_file<R: Read + Seek>(reader: &mut R) -> Result<Self> {
let mut magic = [0u8; 5];
reader.read_exact(&mut magic)?;
if magic != *MAGIC_NUMBER {
anyhow::bail!("Not a valid Hydra download file");
}
let mut version = [0u8; 1];
reader.read_exact(&mut version)?;
if version[0] != FORMAT_VERSION {
anyhow::bail!("Incompatible format version");
}
let mut file_size_bytes = [0u8; 8];
reader.read_exact(&mut file_size_bytes)?;
let file_size = u64::from_le_bytes(file_size_bytes);
let mut etag = [0u8; 32];
reader.read_exact(&mut etag)?;
let mut url_hash = [0u8; 32];
reader.read_exact(&mut url_hash)?;
let mut chunk_size_bytes = [0u8; 4];
reader.read_exact(&mut chunk_size_bytes)?;
let chunk_size = u32::from_le_bytes(chunk_size_bytes);
let mut chunk_count_bytes = [0u8; 4];
reader.read_exact(&mut chunk_count_bytes)?;
let chunk_count = u32::from_le_bytes(chunk_count_bytes);
let bitmap_bytes_len = (chunk_count as usize + 7) / 8;
let mut bitmap_bytes = vec![0u8; bitmap_bytes_len];
reader.read_exact(&mut bitmap_bytes)?;
let chunks_bitmap = BitVec::<u8, Lsb0>::from_vec(bitmap_bytes);
reader.seek(SeekFrom::Start(HEADER_SIZE as u64))?;
Ok(Self {
magic,
version: version[0],
file_size,
etag,
url_hash,
chunk_size,
chunk_count,
chunks_bitmap,
})
}
fn set_chunk_complete(&mut self, chunk_index: usize) -> Result<()> {
if chunk_index >= self.chunk_count as usize {
anyhow::bail!("Chunk index out of bounds");
}
self.chunks_bitmap.set(chunk_index, true);
Ok(())
}
fn is_chunk_complete(&self, chunk_index: usize) -> bool {
if chunk_index >= self.chunk_count as usize {
return false;
}
self.chunks_bitmap[chunk_index]
}
fn get_incomplete_chunks(&self) -> Vec<(u64, u64)> {
let incomplete_count = self.chunk_count as usize - self.chunks_bitmap.count_ones();
let mut chunks = Vec::with_capacity(incomplete_count);
let chunk_size = self.chunk_size as u64;
for i in 0..self.chunk_count as usize {
if !self.is_chunk_complete(i) {
let start = i as u64 * chunk_size;
let end = std::cmp::min((i as u64 + 1) * chunk_size - 1, self.file_size - 1);
chunks.push((start, end));
}
}
chunks
}
fn is_download_complete(&self) -> bool {
self.chunks_bitmap.count_ones() == self.chunk_count as usize
}
}
struct ProgressTracker {
bar: Option<ProgressBar>,
}
impl ProgressTracker {
fn new(file_size: u64, silent: bool, enable_stats: bool) -> Result<Self> {
let bar = if !silent || enable_stats {
let pb = ProgressBar::new(file_size);
pb.set_style(
ProgressStyle::default_bar()
.template("[{elapsed_precise}] [{bar:40.cyan/blue}] {bytes}/{total_bytes} ({bytes_per_sec}, {eta})")?
);
if silent {
pb.set_draw_target(indicatif::ProgressDrawTarget::hidden());
}
Some(pb)
} else {
None
};
Ok(Self { bar })
}
fn increment(&self, amount: u64) {
if let Some(pb) = &self.bar {
pb.inc(amount);
}
}
fn finish(&self) {
if let Some(pb) = &self.bar {
pb.finish_with_message("Download complete");
}
}
fn get_stats(&self) -> Option<DownloadStats> {
if let Some(pb) = &self.bar {
let position = pb.position();
let total = pb.length().unwrap_or(1);
Some(DownloadStats {
progress_percent: position as f64 / total as f64,
bytes_downloaded: position,
total_size: total,
speed_bytes_per_sec: pb.per_sec(),
eta_seconds: pb.eta().as_secs(),
elapsed_seconds: pb.elapsed().as_secs(),
})
} else {
None
}
}
}
struct Downloader {
client: Client,
config: DownloadConfig,
}
impl Downloader {
async fn download(&self) -> Result<()> {
let (file_size, filename, etag) = self.get_file_info().await?;
let output_path = self.determine_output_path(filename);
if self.config.should_log() {
println!("Detected filename: {}", output_path);
}
let resume_manager = ResumeManager::try_from_file(
&output_path,
file_size,
&etag,
&self.config.url,
self.config.chunk_size as u32,
self.config.force_new,
self.config.resume_only,
)?;
let file = self.prepare_output_file(&output_path, file_size)?;
let progress = ProgressTracker::new(file_size, self.config.silent, self.config.log)?;
let chunks = if resume_manager.is_download_complete() {
if self.config.should_log() {
println!("File is already fully downloaded, finalizing...");
}
resume_manager.finalize_download()?;
return Ok(());
} else {
let completed_chunks = resume_manager.header.chunks_bitmap.count_ones() as u32;
let total_chunks = resume_manager.header.chunk_count;
if completed_chunks > 0 {
if self.config.should_log() {
let percent_done = (completed_chunks as f64 / total_chunks as f64) * 100.0;
println!("Resuming download: {:.1}% already downloaded", percent_done);
}
if let Some(pb) = &progress.bar {
let downloaded = file_size * completed_chunks as u64 / total_chunks as u64;
pb.inc(downloaded);
}
}
resume_manager.get_incomplete_chunks()
};
if self.config.should_log() {
println!(
"Downloading {} chunks of total {}",
chunks.len(),
resume_manager.header.chunk_count
);
}
self.process_chunks_with_resume(
chunks,
file,
file_size,
progress,
output_path.clone(),
resume_manager,
)
.await?;
Ok(())
}
fn determine_output_path(&self, filename: Option<String>) -> String {
if Path::new(&self.config.output_path)
.file_name()
.unwrap_or_default()
== DEFAULT_OUTPUT_FILENAME
&& filename.is_some()
{
filename.unwrap()
} else {
self.config.output_path.clone()
}
}
fn prepare_output_file(&self, path: &str, size: u64) -> Result<Arc<Mutex<BufWriter<File>>>> {
let file = if Path::new(path).exists() {
OpenOptions::new().read(true).write(true).open(path)?
} else {
let file = File::create(path)?;
file.set_len(HEADER_SIZE as u64 + size)?;
file
};
Ok(Arc::new(Mutex::new(BufWriter::with_capacity(
self.config.buffer_size,
file,
))))
}
async fn process_chunks_with_resume(
&self,
chunks: Vec<(u64, u64)>,
file: Arc<Mutex<BufWriter<File>>>,
_file_size: u64,
progress: ProgressTracker,
real_filename: String,
resume_manager: ResumeManager,
) -> Result<()> {
let mut tasks = FuturesUnordered::new();
let log_progress = if self.config.should_log_stats() {
let progress_clone = progress.bar.clone();
let filename = real_filename.clone();
let (log_cancel_tx, mut log_cancel_rx) = tokio::sync::oneshot::channel();
let log_task = tokio::spawn(async move {
let mut interval = tokio::time::interval(tokio::time::Duration::from_secs(1));
let tracker = ProgressTracker {
bar: progress_clone,
};
loop {
tokio::select! {
_ = interval.tick() => {
if let Some(stats) = tracker.get_stats() {
let json_output = json!({
"progress": stats.progress_percent,
"speed_bps": stats.speed_bytes_per_sec,
"downloaded_bytes": stats.bytes_downloaded,
"total_bytes": stats.total_size,
"eta_seconds": stats.eta_seconds,
"elapsed_seconds": stats.elapsed_seconds,
"filename": filename
});
println!("{}", json_output);
}
}
_ = &mut log_cancel_rx => {
break;
}
}
}
});
Some((log_task, log_cancel_tx))
} else {
None
};
let resume_manager = Arc::new(Mutex::new(resume_manager));
for (start, end) in chunks {
let client = self.client.clone();
let url = self.config.url.clone();
let file_clone = Arc::clone(&file);
let pb_clone = progress.bar.clone();
let manager_clone = Arc::clone(&resume_manager);
let headers = self.config.headers.clone();
let force_download = self.config.force_download;
let should_log = self.config.should_log();
let chunk_size = self.config.chunk_size as u64;
let chunk_index = (start / chunk_size) as usize;
tasks.push(tokio::spawn(async move {
let result = Self::download_chunk_with_retry(
client,
url,
start,
end,
file_clone,
pb_clone,
DEFAULT_MAX_RETRIES,
&headers,
force_download,
should_log,
)
.await;
if result.is_ok() {
let mut manager = manager_clone.lock().await;
manager.set_chunk_complete(chunk_index)?;
}
result
}));
if tasks.len() >= self.config.num_connections {
if let Some(result) = tasks.next().await {
result??;
}
}
}
while let Some(result) = tasks.next().await {
result??;
}
{
let mut writer = file.lock().await;
writer.flush()?;
}
progress.finish();
if let Some((log_handle, log_cancel_tx)) = log_progress {
if self.config.should_log_stats() {
let json_output = json!({
"progress": 1.0,
"speed_bps": 0.0,
"downloaded_bytes": _file_size,
"total_bytes": _file_size,
"eta_seconds": 0,
"elapsed_seconds": if let Some(pb) = &progress.bar { pb.elapsed().as_secs() } else { 0 },
"filename": real_filename
});
println!("{}", json_output);
}
let _ = log_cancel_tx.send(());
let _ = log_handle.await;
}
let manager = resume_manager.lock().await;
if manager.is_download_complete() {
if self.config.should_log() {
println!("Download complete, finalizing file...");
}
manager.finalize_download()?;
}
Ok(())
}
async fn download_chunk_with_retry(
client: Client,
url: String,
start: u64,
end: u64,
file: Arc<Mutex<BufWriter<File>>>,
progress_bar: Option<ProgressBar>,
max_retries: usize,
headers: &[String],
force_download: bool,
should_log: bool,
) -> Result<()> {
let mut retries = 0;
loop {
match Self::download_chunk(
client.clone(),
url.clone(),
start,
end,
file.clone(),
progress_bar.clone(),
headers,
force_download,
should_log,
)
.await
{
Ok(_) => return Ok(()),
Err(e) => {
retries += 1;
if retries >= max_retries {
return Err(e);
}
tokio::time::sleep(tokio::time::Duration::from_millis(
RETRY_BACKOFF_MS * (2_u64.pow(retries as u32 - 1)),
))
.await;
}
}
}
}
async fn download_chunk(
client: Client,
url: String,
start: u64,
end: u64,
file: Arc<Mutex<BufWriter<File>>>,
progress_bar: Option<ProgressBar>,
headers: &[String],
force_download: bool,
should_log: bool,
) -> Result<()> {
let mut req = client
.get(&url)
.header("Range", format!("bytes={}-{}", start, end));
for header in headers {
if let Some(idx) = header.find(':') {
let (name, value) = header.split_at(idx);
let value = value[1..].trim();
req = req.header(name.trim(), value);
}
}
let resp = req.send().await?;
if resp.status() != StatusCode::PARTIAL_CONTENT && resp.status() != StatusCode::OK {
if !force_download {
anyhow::bail!("Server does not support Range requests");
} else if should_log {
println!("Server does not support Range requests, ignoring...");
}
}
let mut stream = resp.bytes_stream();
let mut position = start;
let mut total_bytes = 0;
let expected_bytes = end - start + 1;
while let Some(chunk_result) = stream.next().await {
let chunk = chunk_result?;
let chunk_size = chunk.len() as u64;
total_bytes += chunk_size;
if total_bytes > expected_bytes {
let remaining = expected_bytes - (total_bytes - chunk_size);
let mut writer = file.lock().await;
writer.seek(SeekFrom::Start(HEADER_SIZE as u64 + position))?;
writer.write_all(&chunk[..remaining as usize])?;
let tracker = ProgressTracker {
bar: progress_bar.clone(),
};
tracker.increment(remaining);
break;
}
let mut writer = file.lock().await;
writer.seek(SeekFrom::Start(HEADER_SIZE as u64 + position))?;
writer.write_all(&chunk)?;
drop(writer);
position += chunk_size;
let tracker = ProgressTracker {
bar: progress_bar.clone(),
};
tracker.increment(chunk_size);
}
Ok(())
}
async fn get_file_info(&self) -> Result<(u64, Option<String>, String)> {
let mut req = self.client.head(&self.config.url);
for header in &self.config.headers {
if let Some(idx) = header.find(':') {
let (name, value) = header.split_at(idx);
let value = value[1..].trim();
req = req.header(name.trim(), value);
}
}
let resp = req.send().await?;
let accepts_ranges = resp
.headers()
.get("accept-ranges")
.and_then(|v| v.to_str().ok())
.map(|v| v.contains("bytes"))
.unwrap_or(false);
if !accepts_ranges {
let range_check = self
.client
.get(&self.config.url)
.header("Range", "bytes=0-0")
.send()
.await?;
if range_check.status() != StatusCode::PARTIAL_CONTENT {
if !self.config.force_download {
anyhow::bail!(
"Server does not support Range requests, cannot continue with parallel download"
);
} else if self.config.should_log() {
println!("Server does not support Range requests, ignoring...");
}
}
}
let file_size = if let Some(content_length) = resp.headers().get("content-length") {
content_length.to_str()?.parse()?
} else {
anyhow::bail!("Could not determine file size")
};
let etag = if let Some(etag_header) = resp.headers().get("etag") {
etag_header.to_str()?.to_string()
} else {
format!(
"no-etag-{}",
std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.unwrap()
.as_secs()
)
};
let filename = self.extract_filename_from_response(&resp);
Ok((file_size, filename, etag))
}
fn extract_filename_from_response(&self, resp: &reqwest::Response) -> Option<String> {
if let Some(disposition) = resp.headers().get("content-disposition") {
if let Ok(disposition_str) = disposition.to_str() {
if let Some(filename) = Self::parse_content_disposition(disposition_str) {
return Some(filename);
}
}
}
Self::extract_filename_from_url(&self.config.url)
}
fn parse_content_disposition(disposition: &str) -> Option<String> {
if let Some(idx) = disposition.find("filename=") {
let start = idx + 9;
let mut end = disposition.len();
if disposition.as_bytes().get(start) == Some(&b'"') {
let quoted_name = &disposition[start + 1..];
if let Some(quote_end) = quoted_name.find('"') {
return Some(quoted_name[..quote_end].to_string());
}
} else {
if let Some(semicolon) = disposition[start..].find(';') {
end = start + semicolon;
}
return Some(disposition[start..end].to_string());
}
}
None
}
fn extract_filename_from_url(url: &str) -> Option<String> {
if let Ok(parsed_url) = Url::parse(url) {
let path = parsed_url.path();
if let Some(path_filename) = Path::new(path).file_name() {
if let Some(filename_str) = path_filename.to_str() {
if !filename_str.is_empty() {
if let Ok(decoded) = urlencoding::decode(filename_str) {
return Some(decoded.to_string());
}
}
}
}
}
None
}
}
struct ResumeManager {
header: HydraHeader,
file_path: String,
}
impl ResumeManager {
fn try_from_file(
path: &str,
file_size: u64,
etag: &str,
url: &str,
chunk_size: u32,
force_new: bool,
resume_only: bool,
) -> Result<Self> {
if force_new {
if Path::new(path).exists() {
std::fs::remove_file(path)?;
}
return Self::create_new_file(path, file_size, etag, url, chunk_size);
}
if let Ok(file) = File::open(path) {
let mut reader = BufReader::new(file);
match HydraHeader::read_from_file(&mut reader) {
Ok(header) => {
let current_url_hash = Sha256::digest(url.as_bytes());
let url_matches = header.url_hash == current_url_hash.as_slice();
let size_matches = header.file_size == file_size;
if url_matches && size_matches {
return Ok(Self {
header,
file_path: path.to_string(),
});
}
if resume_only {
anyhow::bail!(
"Existing file is not compatible and resume_only option is active"
);
}
std::fs::remove_file(path)?;
}
Err(e) => {
if resume_only {
return Err(anyhow::anyhow!("Could not read file to resume: {}", e));
}
std::fs::remove_file(path)?;
}
}
} else if resume_only {
anyhow::bail!("File not found and resume_only option is active");
}
Self::create_new_file(path, file_size, etag, url, chunk_size)
}
fn create_new_file(
path: &str,
file_size: u64,
etag: &str,
url: &str,
chunk_size: u32,
) -> Result<Self> {
let header = HydraHeader::new(file_size, etag, url, chunk_size);
let file = File::create(path)?;
file.set_len(HEADER_SIZE as u64 + file_size)?;
let mut writer = BufWriter::new(file);
header.write_to_file(&mut writer)?;
writer.flush()?;
Ok(Self {
header,
file_path: path.to_string(),
})
}
fn get_incomplete_chunks(&self) -> Vec<(u64, u64)> {
self.header.get_incomplete_chunks()
}
fn set_chunk_complete(&mut self, chunk_index: usize) -> Result<()> {
self.header.set_chunk_complete(chunk_index)?;
let file = OpenOptions::new().write(true).open(&self.file_path)?;
let mut writer = BufWriter::new(file);
let bitmap_offset = 5 + 1 + 8 + 32 + 32 + 4 + 4;
writer.seek(SeekFrom::Start(bitmap_offset as u64))?;
let bitmap_bytes = self.header.chunks_bitmap.as_raw_slice();
writer.write_all(bitmap_bytes)?;
writer.flush()?;
Ok(())
}
fn is_download_complete(&self) -> bool {
self.header.is_download_complete()
}
fn finalize_download(&self) -> Result<()> {
if !self.is_download_complete() {
anyhow::bail!("Download is not complete");
}
let temp_path = format!("{}.tmp", self.file_path);
let source = File::open(&self.file_path)?;
let dest = File::create(&temp_path)?;
let mut reader = BufReader::with_capacity(FINALIZE_BUFFER_SIZE, source);
let mut writer = BufWriter::with_capacity(FINALIZE_BUFFER_SIZE, dest);
reader.seek(SeekFrom::Start(HEADER_SIZE as u64))?;
std::io::copy(&mut reader, &mut writer)?;
writer.flush()?;
drop(writer);
match std::fs::rename(&temp_path, &self.file_path) {
Ok(_) => Ok(()),
Err(_) => {
let _ = std::fs::remove_file(&self.file_path);
std::fs::rename(&temp_path, &self.file_path)?;
Ok(())
}
}
}
}
#[tokio::main]
async fn main() -> Result<()> {
let args = CliArgs::parse();
let config = DownloadConfig {
url: args.url.clone(),
output_path: args.output,
num_connections: args.connections,
chunk_size: args.chunk_size * 1024 * 1024,
buffer_size: args.buffer_size * 1024 * 1024,
verbose: args.verbose,
silent: args.silent,
log: args.log,
force_new: args.force_new,
resume_only: args.resume_only,
headers: args.header,
force_download: args.force_download,
};
if config.force_new && config.resume_only {
eprintln!("Error: --force-new and --resume-only options cannot be used together");
std::process::exit(1);
}
let downloader = Downloader {
client: Client::new(),
config,
};
if downloader.config.should_log() {
println!(
"Starting download with {} connections, chunk size: {}MB, buffer: {}MB",
downloader.config.num_connections, args.chunk_size, args.buffer_size
);
println!("URL: {}", args.url);
if downloader.config.force_new {
println!("Forcing new download, ignoring existing files");
} else if downloader.config.resume_only {
println!("Only resuming existing download");
} else {
println!("Resuming download if possible");
}
}
downloader.download().await?;
Ok(())
}

View File

@@ -2,7 +2,6 @@ const { default: axios } = require("axios");
const util = require("node:util");
const fs = require("node:fs");
const path = require("node:path");
const { spawnSync } = require("node:child_process");
const exec = util.promisify(require("node:child_process").exec);
@@ -47,90 +46,11 @@ const downloadLudusavi = async () => {
});
};
const downloadAria2WindowsAndLinux = async () => {
const file =
process.platform === "win32"
? "aria2-1.37.0-win-64bit-build1.zip"
: "aria2-1.37.0-1-x86_64.pkg.tar.zst";
const downloadUrl =
process.platform === "win32"
? `https://github.com/aria2/aria2/releases/download/release-1.37.0/${file}`
: "https://archlinux.org/packages/extra/x86_64/aria2/download/";
console.log(`Downloading ${file}...`);
const response = await axios.get(downloadUrl, { responseType: "stream" });
const stream = response.data.pipe(fs.createWriteStream(file));
stream.on("finish", async () => {
console.log(`Downloaded ${file}, extracting...`);
if (process.platform === "win32") {
await exec(`npx extract-zip ${file}`);
console.log("Extracted. Renaming folder...");
fs.mkdirSync("aria2");
fs.copyFileSync(
path.join(file.replace(".zip", ""), "aria2c.exe"),
"aria2/aria2c.exe"
);
fs.rmSync(file.replace(".zip", ""), { recursive: true });
} else {
await exec(`tar --zstd -xvf ${file} usr/bin/aria2c`);
console.log("Extracted. Copying binary file...");
fs.mkdirSync("aria2");
fs.copyFileSync("usr/bin/aria2c", "aria2/aria2c");
fs.rmSync("usr", { recursive: true });
}
console.log(`Extracted ${file}, removing compressed downloaded file...`);
fs.rmSync(file);
});
};
const copyAria2Macos = async () => {
console.log("Checking if aria2 is installed...");
const isAria2Installed = spawnSync("which", ["aria2c"]).status;
if (isAria2Installed != 0) {
console.log("Please install aria2");
console.log("brew install aria2");
return;
}
console.log("Copying aria2 binary...");
fs.mkdirSync("aria2");
await exec(`cp $(which aria2c) aria2/aria2c`);
};
const copyAria2 = () => {
const aria2Path =
process.platform === "win32" ? "aria2/aria2c.exe" : "aria2/aria2c";
if (fs.existsSync(aria2Path)) {
console.log("Aria2 already exists, skipping download...");
return;
}
if (process.platform == "darwin") {
copyAria2Macos();
} else {
downloadAria2WindowsAndLinux();
}
};
copyAria2();
downloadLudusavi();
if (process.platform !== "win32") {
const binariesPath = path.join(__dirname, "..", "binaries");
if (fs.existsSync(binariesPath)) {
const zzzPath = path.join(binariesPath, "7zz");
const zzzsPath = path.join(binariesPath, "7zzs");
if (fs.existsSync(zzzPath)) fs.chmodSync(zzzPath, 0o755);
if (fs.existsSync(zzzsPath)) fs.chmodSync(zzzsPath, 0o755);
}
fs.chmodSync(path.join(binariesPath, "7zz"), 0o755);
fs.chmodSync(path.join(binariesPath, "7zzs"), 0o755);
}

View File

@@ -354,7 +354,8 @@
"common_redist": "Common redistributables",
"common_redist_description": "Common redistributables are required to run some games. Installing them is recommended to avoid issues.",
"install_common_redist": "Install",
"installing_common_redist": "Installing…"
"installing_common_redist": "Installing…",
"show_download_speed_in_megabytes": "Show download speed in megabytes per second"
},
"notifications": {
"download_complete": "Download complete",
@@ -498,6 +499,7 @@
"animated_profile_banner": "Animated profile banner",
"hydra_cloud": "Hydra Cloud",
"hydra_cloud_feature_found": "You've just discovered a Hydra Cloud feature!",
"learn_more": "Learn More"
"learn_more": "Learn More",
"debrid_description": "Download up to 4x faster with Nimbus"
}
}

View File

@@ -341,7 +341,8 @@
"common_redist": "Componentes recomendados",
"common_redist_description": "Componentes recomendados são necessários para executar alguns jogos. A instalação deles é recomendada para evitar problemas.",
"install_common_redist": "Instalar",
"installing_common_redist": "Instalando…"
"installing_common_redist": "Instalando…",
"show_download_speed_in_megabytes": "Exibir taxas de download em megabytes por segundo"
},
"notifications": {
"download_complete": "Download concluído",
@@ -493,6 +494,7 @@
"animated_profile_banner": "Banner animado no perfil",
"cloud_saving": "Saves de jogos em nuvem",
"hydra_cloud_feature_found": "Você descobriu uma funcionalidade Hydra Cloud!",
"learn_more": "Saiba mais"
"learn_more": "Saiba mais",
"debrid_description": "Baixe até 4x mais rápido com Nimbus"
}
}

View File

@@ -47,6 +47,7 @@ import "./torrenting/resume-game-download";
import "./torrenting/start-game-download";
import "./torrenting/pause-game-seed";
import "./torrenting/resume-game-seed";
import "./torrenting/check-debrid-availability";
import "./user-preferences/get-user-preferences";
import "./user-preferences/update-user-preferences";
import "./user-preferences/auto-launch";

View File

@@ -13,35 +13,42 @@ const deleteGameFolder = async (
objectId: string
): Promise<void> => {
const downloadKey = levelKeys.game(shop, objectId);
const download = await downloadsSublevel.get(downloadKey);
if (!download) return;
if (!download?.folderName) return;
if (download.folderName) {
const folderPath = path.join(
download.downloadPath ?? (await getDownloadsPath()),
download.folderName
);
const folderPath = path.join(
download.downloadPath ?? (await getDownloadsPath()),
download.folderName
);
if (fs.existsSync(folderPath)) {
const metaPath = `${folderPath}.meta`;
const deleteFile = async (filePath: string, isDirectory = false) => {
if (fs.existsSync(filePath)) {
await new Promise<void>((resolve, reject) => {
fs.rm(
folderPath,
{ recursive: true, force: true, maxRetries: 5, retryDelay: 200 },
filePath,
{
recursive: isDirectory,
force: true,
maxRetries: 5,
retryDelay: 200,
},
(error) => {
if (error) {
logger.error(error);
reject();
}
resolve();
}
);
});
}
}
};
await deleteFile(folderPath, true);
await deleteFile(metaPath);
await downloadsSublevel.del(downloadKey);
};

View File

@@ -0,0 +1,11 @@
import { HydraDebridClient } from "@main/services/download/hydra-debrid";
import { registerEvent } from "../register-event";
const checkDebridAvailability = async (
_event: Electron.IpcMainInvokeEvent,
magnets: string[]
) => {
return HydraDebridClient.getAvailableMagnets(magnets);
};
registerEvent("checkDebridAvailability", checkDebridAvailability);

View File

@@ -8,7 +8,6 @@ import { electronApp, optimizer } from "@electron-toolkit/utils";
import { logger, WindowManager } from "@main/services";
import resources from "@locales";
import { PythonRPC } from "./services/python-rpc";
import { Aria2 } from "./services/aria2";
import { db, levelKeys } from "./level";
import { loadState } from "./main";
@@ -143,7 +142,6 @@ app.on("window-all-closed", () => {
app.on("before-quit", () => {
/* Disconnects libtorrent */
PythonRPC.kill();
Aria2.kill();
});
app.on("activate", () => {

View File

@@ -2,7 +2,6 @@ import { DownloadManager, Ludusavi, startMainLoop } from "./services";
import { RealDebridClient } from "./services/download/real-debrid";
import { HydraApi } from "./services/hydra-api";
import { uploadGamesBatch } from "./services/library-sync";
import { Aria2 } from "./services/aria2";
import { downloadsSublevel } from "./level/sublevels/downloads";
import { sortBy } from "lodash-es";
import { Downloader } from "@shared";
@@ -21,8 +20,6 @@ export const loadState = async () => {
await import("./events");
Aria2.spawn();
if (userPreferences?.realDebridApiToken) {
RealDebridClient.authorize(userPreferences.realDebridApiToken);
}

View File

@@ -1,27 +0,0 @@
import path from "node:path";
import cp from "node:child_process";
import { app } from "electron";
export class Aria2 {
private static process: cp.ChildProcess | null = null;
private static readonly binaryPath = app.isPackaged
? path.join(process.resourcesPath, "aria2", "aria2c")
: path.join(__dirname, "..", "..", "aria2", "aria2c");
public static spawn() {
this.process = cp.spawn(
this.binaryPath,
[
"--enable-rpc",
"--rpc-listen-all",
"--file-allocation=none",
"--allow-overwrite=true",
],
{ stdio: "inherit", windowsHide: true }
);
}
public static kill() {
this.process?.kill();
}
}

View File

@@ -23,6 +23,7 @@ import { db, downloadsSublevel, gamesSublevel, levelKeys } from "@main/level";
import { sortBy } from "lodash-es";
import { TorBoxClient } from "./torbox";
import { GameFilesManager } from "../game-files-manager";
import { HydraDebridClient } from "./hydra-debrid";
export class DownloadManager {
private static downloadingGameId: string | null = null;
@@ -313,6 +314,8 @@ export class DownloadManager {
url: downloadLink,
save_path: download.downloadPath,
header: `Cookie: accountToken=${token}`,
allow_multiple_connections: true,
connections_limit: 8,
};
}
case Downloader.PixelDrain: {
@@ -387,6 +390,21 @@ export class DownloadManager {
allow_multiple_connections: true,
};
}
case Downloader.Hydra: {
const downloadUrl = await HydraDebridClient.getDownloadUrl(
download.uri
);
if (!downloadUrl) throw new Error(DownloadError.NotCachedInHydra);
return {
action: "start",
game_id: downloadId,
url: downloadUrl,
save_path: download.downloadPath,
allow_multiple_connections: true,
};
}
}
}

View File

@@ -0,0 +1,27 @@
import { HydraApi } from "../hydra-api";
export class HydraDebridClient {
public static getAvailableMagnets(
magnets: string[]
): Promise<Record<string, boolean>> {
return HydraApi.put(
"/debrid/check-availability",
{
magnets,
},
{ needsAuth: false }
);
}
public static async getDownloadUrl(magnet: string) {
try {
const response = await HydraApi.post("/debrid/request-file", {
magnet,
});
return response.downloadUrl;
} catch (error) {
return null;
}
}
}

View File

@@ -21,6 +21,12 @@ const binaryNameByPlatform: Partial<Record<NodeJS.Platform, string>> = {
win32: "hydra-python-rpc.exe",
};
const rustBinaryNameByPlatform: Partial<Record<NodeJS.Platform, string>> = {
darwin: "hydra-httpdl",
linux: "hydra-httpdl",
win32: "hydra-httpdl.exe",
};
export class PythonRPC {
public static readonly BITTORRENT_PORT = "5881";
public static readonly RPC_PORT = "8084";
@@ -52,6 +58,20 @@ export class PythonRPC {
this.RPC_PASSWORD,
initialDownload ? JSON.stringify(initialDownload) : "",
initialSeeding ? JSON.stringify(initialSeeding) : "",
app.isPackaged
? path.join(
process.resourcesPath,
rustBinaryNameByPlatform[process.platform]!
)
: path.join(
__dirname,
"..",
"..",
"rust_rpc",
"target",
"debug",
rustBinaryNameByPlatform[process.platform]!
),
];
if (app.isPackaged) {

View File

@@ -55,6 +55,8 @@ contextBridge.exposeInMainWorld("electron", {
ipcRenderer.on("on-seeding-status", listener);
return () => ipcRenderer.removeListener("on-seeding-status", listener);
},
checkDebridAvailability: (magnets: string[]) =>
ipcRenderer.invoke("checkDebridAvailability", magnets),
/* Catalogue */
searchGames: (payload: CatalogueSearchPayload, take: number, skip: number) =>

View File

@@ -31,7 +31,6 @@ import { HydraCloudModal } from "./pages/shared-modals/hydra-cloud/hydra-cloud-m
import { injectCustomCss } from "./helpers";
import "./app.scss";
import { DownloadSource } from "@types";
export interface AppProps {
children: React.ReactNode;
@@ -137,71 +136,16 @@ export function App() {
});
}, [fetchUserDetails, updateUserDetails, dispatch]);
const syncDownloadSources = useCallback(async () => {
const downloadSources = await window.electron.getDownloadSources();
const existingDownloadSources: DownloadSource[] =
await downloadSourcesTable.toArray();
window.electron.createDownloadSources(
existingDownloadSources.map((source) => source.url)
);
await Promise.allSettled(
downloadSources.map(async (source) => {
return new Promise((resolve) => {
const existingDownloadSource = existingDownloadSources.find(
(downloadSource) => downloadSource.url === source.url
);
if (!existingDownloadSource) {
const channel = new BroadcastChannel(
`download_sources:import:${source.url}`
);
downloadSourcesWorker.postMessage([
"IMPORT_DOWNLOAD_SOURCE",
source.url,
]);
channel.onmessage = () => {
resolve(true);
channel.close();
};
} else {
resolve(true);
}
});
})
);
updateRepacks();
const id = crypto.randomUUID();
const channel = new BroadcastChannel(`download_sources:sync:${id}`);
channel.onmessage = async (event: MessageEvent<number>) => {
const newRepacksCount = event.data;
window.electron.publishNewRepacksNotification(newRepacksCount);
updateRepacks();
const downloadSources = await downloadSourcesTable.toArray();
downloadSources
.filter((source) => !source.fingerprint)
.forEach(async (downloadSource) => {
const { fingerprint } = await window.electron.putDownloadSource(
downloadSource.objectIds
);
downloadSourcesTable.update(downloadSource.id, { fingerprint });
});
};
downloadSourcesWorker.postMessage(["SYNC_DOWNLOAD_SOURCES", id]);
}, [updateRepacks]);
const onSignIn = useCallback(() => {
window.electron.getDownloadSources().then((sources) => {
sources.forEach((source) => {
downloadSourcesWorker.postMessage([
"IMPORT_DOWNLOAD_SOURCE",
source.url,
]);
});
});
fetchUserDetails().then((response) => {
if (response) {
updateUserDetails(response);
@@ -209,15 +153,7 @@ export function App() {
showSuccessToast(t("successfully_signed_in"));
}
});
syncDownloadSources();
}, [
fetchUserDetails,
t,
showSuccessToast,
updateUserDetails,
syncDownloadSources,
]);
}, [fetchUserDetails, t, showSuccessToast, updateUserDetails]);
useEffect(() => {
const unsubscribe = window.electron.onSyncFriendRequests((result) => {
@@ -285,8 +221,41 @@ export function App() {
}, [dispatch, draggingDisabled]);
useEffect(() => {
syncDownloadSources();
}, [syncDownloadSources]);
updateRepacks();
const id = crypto.randomUUID();
const channel = new BroadcastChannel(`download_sources:sync:${id}`);
channel.onmessage = async (event: MessageEvent<number>) => {
const newRepacksCount = event.data;
window.electron.publishNewRepacksNotification(newRepacksCount);
updateRepacks();
const downloadSources = await downloadSourcesTable.toArray();
await Promise.all(
downloadSources
.filter((source) => !source.fingerprint)
.map(async (downloadSource) => {
const { fingerprint } = await window.electron.putDownloadSource(
downloadSource.objectIds
);
return downloadSourcesTable.update(downloadSource.id, {
fingerprint,
});
})
);
channel.close();
};
downloadSourcesWorker.postMessage(["SYNC_DOWNLOAD_SOURCES", id]);
return () => {
channel.close();
};
}, [updateRepacks]);
useEffect(() => {
const loadAndApplyTheme = async () => {

View File

@@ -0,0 +1,24 @@
<svg width="20" height="21" viewBox="0 0 20 21" fill="none" xmlns="http://www.w3.org/2000/svg">
<g id="Meteor">
<g id="Vector">
<path d="M10.6242 13.0003C10.6242 13.6184 10.4409 14.2226 10.0975 14.7365C9.75415 15.2504 9.26609 15.6509 8.69507 15.8875C8.12405 16.124 7.49572 16.1859 6.88953 16.0653C6.28334 15.9447 5.72652 15.6471 5.28948 15.2101C4.85244 14.773 4.55481 14.2162 4.43423 13.61C4.31366 13.0038 4.37554 12.3755 4.61206 11.8045C4.84859 11.2334 5.24913 10.7454 5.76303 10.402C6.27693 10.0586 6.88112 9.87535 7.49919 9.87535C8.32799 9.87535 9.12285 10.2046 9.7089 10.7906C10.2949 11.3767 10.6242 12.1715 10.6242 13.0003ZM16.432 10.0582L12.682 13.8082C12.5647 13.9254 12.4988 14.0845 12.4988 14.2503C12.4988 14.4162 12.5647 14.5753 12.682 14.6925C12.7993 14.8098 12.9583 14.8757 13.1242 14.8757C13.29 14.8757 13.4491 14.8098 13.5664 14.6925L17.3164 10.9425C17.3744 10.8845 17.4205 10.8155 17.4519 10.7397C17.4834 10.6638 17.4995 10.5825 17.4995 10.5003C17.4995 10.4182 17.4834 10.3369 17.4519 10.261C17.4205 10.1852 17.3744 10.1162 17.3164 10.0582C17.2583 10.0001 17.1894 9.95403 17.1135 9.9226C17.0376 9.89118 16.9563 9.875 16.8742 9.875C16.7921 9.875 16.7107 9.89118 16.6349 9.9226C16.559 9.95403 16.4901 10.0001 16.432 10.0582ZM14.8164 9.06754C14.8744 9.00947 14.9205 8.94053 14.9519 8.86466C14.9834 8.78879 14.9995 8.70747 14.9995 8.62535C14.9995 8.54323 14.9834 8.46191 14.9519 8.38604C14.9205 8.31017 14.8744 8.24123 14.8164 8.18316C14.7583 8.12509 14.6894 8.07903 14.6135 8.0476C14.5376 8.01617 14.4563 8 14.3742 8C14.2921 8 14.2107 8.01617 14.1349 8.0476C14.059 8.07903 13.9901 8.12509 13.932 8.18316L12.057 10.0582C11.9397 10.1754 11.8738 10.3345 11.8738 10.5003C11.8738 10.6662 11.9397 10.8253 12.057 10.9425C12.1743 11.0598 12.3333 11.1257 12.4992 11.1257C12.665 11.1257 12.8241 11.0598 12.9414 10.9425L14.8164 9.06754ZM17.9414 5.05816C17.8833 5.00005 17.8144 4.95395 17.7385 4.9225C17.6627 4.89105 17.5813 4.87486 17.4992 4.87486C17.4171 4.87486 17.3357 4.89105 17.2599 4.9225C17.184 4.95395 17.115 5.00005 17.057 5.05816L15.807 6.30816C15.6897 6.42544 15.6238 6.5845 15.6238 6.75035C15.6238 6.9162 15.6897 7.07526 15.807 7.19254C15.9243 7.30981 16.0833 7.37569 16.2492 7.3757C16.415 7.3757 16.5741 7.30981 16.6914 7.19254L17.9414 5.94254C17.9995 5.88449 18.0456 5.81556 18.077 5.73969C18.1085 5.66381 18.1247 5.58248 18.1247 5.50035C18.1247 5.41821 18.1085 5.33688 18.077 5.26101C18.0456 5.18514 17.9995 5.11621 17.9414 5.05816ZM9.557 8.44254C9.61505 8.50065 9.68398 8.54674 9.75985 8.5782C9.83572 8.60965 9.91705 8.62584 9.99919 8.62584C10.0813 8.62584 10.1627 8.60965 10.2385 8.5782C10.3144 8.54674 10.3833 8.50065 10.4414 8.44254L16.0664 2.81754C16.1244 2.75947 16.1705 2.69053 16.2019 2.61466C16.2334 2.53879 16.2495 2.45747 16.2495 2.37535C16.2495 2.29323 16.2334 2.21191 16.2019 2.13604C16.1705 2.06017 16.1244 1.99123 16.0664 1.93316C16.0083 1.87509 15.9394 1.82903 15.8635 1.7976C15.7876 1.76618 15.7063 1.75 15.6242 1.75C15.5421 1.75 15.4607 1.76618 15.3849 1.7976C15.309 1.82903 15.2401 1.87509 15.182 1.93316L9.557 7.55816C9.49889 7.61621 9.45279 7.68514 9.42134 7.76101C9.38989 7.83688 9.3737 7.91821 9.3737 8.00035C9.3737 8.08248 9.38989 8.16381 9.42134 8.23969C9.45279 8.31556 9.49889 8.38449 9.557 8.44254ZM10.5929 16.0941C9.77242 16.9146 8.65957 17.3756 7.49919 17.3756C6.33881 17.3756 5.22595 16.9146 4.40544 16.0941C3.58492 15.2736 3.12396 14.1607 3.12396 13.0003C3.12396 11.84 3.58492 10.7271 4.40544 9.9066L10.8703 3.44253C10.9284 3.38447 10.9744 3.31553 11.0058 3.23966C11.0373 3.16379 11.0534 3.08247 11.0534 3.00035C11.0534 2.91823 11.0373 2.83691 11.0058 2.76104C10.9744 2.68517 10.9284 2.61623 10.8703 2.55816C10.8122 2.50009 10.7433 2.45403 10.6674 2.4226C10.5915 2.39118 10.5102 2.375 10.4281 2.375C10.346 2.375 10.2647 2.39118 10.1888 2.4226C10.1129 2.45403 10.044 2.50009 9.98591 2.55816L3.52184 9.023C2.99253 9.54377 2.57156 10.1642 2.28322 10.8484C1.99488 11.5327 1.84487 12.2673 1.84184 13.0098C1.83882 13.7524 1.98284 14.4882 2.2656 15.1747C2.54836 15.8613 2.96427 16.4852 3.48932 17.0102C4.01438 17.5353 4.63819 17.9512 5.32479 18.2339C6.01138 18.5167 6.74717 18.6607 7.4897 18.6577C8.23223 18.6547 8.96682 18.5047 9.65109 18.2163C10.3354 17.928 10.9558 17.507 11.4765 16.9777C11.5888 16.8595 11.6505 16.7022 11.6484 16.5392C11.6463 16.3762 11.5806 16.2205 11.4654 16.1053C11.3501 15.99 11.1944 15.9243 11.0314 15.9223C10.8684 15.9202 10.7111 15.9818 10.5929 16.0941Z" fill="black"/>
<path d="M10.6242 13.0003C10.6242 13.6184 10.4409 14.2226 10.0975 14.7365C9.75415 15.2504 9.26609 15.6509 8.69507 15.8875C8.12405 16.124 7.49572 16.1859 6.88953 16.0653C6.28334 15.9447 5.72652 15.6471 5.28948 15.2101C4.85244 14.773 4.55481 14.2162 4.43423 13.61C4.31366 13.0038 4.37554 12.3755 4.61206 11.8045C4.84859 11.2334 5.24913 10.7454 5.76303 10.402C6.27693 10.0586 6.88112 9.87535 7.49919 9.87535C8.32799 9.87535 9.12285 10.2046 9.7089 10.7906C10.2949 11.3767 10.6242 12.1715 10.6242 13.0003ZM16.432 10.0582L12.682 13.8082C12.5647 13.9254 12.4988 14.0845 12.4988 14.2503C12.4988 14.4162 12.5647 14.5753 12.682 14.6925C12.7993 14.8098 12.9583 14.8757 13.1242 14.8757C13.29 14.8757 13.4491 14.8098 13.5664 14.6925L17.3164 10.9425C17.3744 10.8845 17.4205 10.8155 17.4519 10.7397C17.4834 10.6638 17.4995 10.5825 17.4995 10.5003C17.4995 10.4182 17.4834 10.3369 17.4519 10.261C17.4205 10.1852 17.3744 10.1162 17.3164 10.0582C17.2583 10.0001 17.1894 9.95403 17.1135 9.9226C17.0376 9.89118 16.9563 9.875 16.8742 9.875C16.7921 9.875 16.7107 9.89118 16.6349 9.9226C16.559 9.95403 16.4901 10.0001 16.432 10.0582ZM14.8164 9.06754C14.8744 9.00947 14.9205 8.94053 14.9519 8.86466C14.9834 8.78879 14.9995 8.70747 14.9995 8.62535C14.9995 8.54323 14.9834 8.46191 14.9519 8.38604C14.9205 8.31017 14.8744 8.24123 14.8164 8.18316C14.7583 8.12509 14.6894 8.07903 14.6135 8.0476C14.5376 8.01617 14.4563 8 14.3742 8C14.2921 8 14.2107 8.01617 14.1349 8.0476C14.059 8.07903 13.9901 8.12509 13.932 8.18316L12.057 10.0582C11.9397 10.1754 11.8738 10.3345 11.8738 10.5003C11.8738 10.6662 11.9397 10.8253 12.057 10.9425C12.1743 11.0598 12.3333 11.1257 12.4992 11.1257C12.665 11.1257 12.8241 11.0598 12.9414 10.9425L14.8164 9.06754ZM17.9414 5.05816C17.8833 5.00005 17.8144 4.95395 17.7385 4.9225C17.6627 4.89105 17.5813 4.87486 17.4992 4.87486C17.4171 4.87486 17.3357 4.89105 17.2599 4.9225C17.184 4.95395 17.115 5.00005 17.057 5.05816L15.807 6.30816C15.6897 6.42544 15.6238 6.5845 15.6238 6.75035C15.6238 6.9162 15.6897 7.07526 15.807 7.19254C15.9243 7.30981 16.0833 7.37569 16.2492 7.3757C16.415 7.3757 16.5741 7.30981 16.6914 7.19254L17.9414 5.94254C17.9995 5.88449 18.0456 5.81556 18.077 5.73969C18.1085 5.66381 18.1247 5.58248 18.1247 5.50035C18.1247 5.41821 18.1085 5.33688 18.077 5.26101C18.0456 5.18514 17.9995 5.11621 17.9414 5.05816ZM9.557 8.44254C9.61505 8.50065 9.68398 8.54674 9.75985 8.5782C9.83572 8.60965 9.91705 8.62584 9.99919 8.62584C10.0813 8.62584 10.1627 8.60965 10.2385 8.5782C10.3144 8.54674 10.3833 8.50065 10.4414 8.44254L16.0664 2.81754C16.1244 2.75947 16.1705 2.69053 16.2019 2.61466C16.2334 2.53879 16.2495 2.45747 16.2495 2.37535C16.2495 2.29323 16.2334 2.21191 16.2019 2.13604C16.1705 2.06017 16.1244 1.99123 16.0664 1.93316C16.0083 1.87509 15.9394 1.82903 15.8635 1.7976C15.7876 1.76618 15.7063 1.75 15.6242 1.75C15.5421 1.75 15.4607 1.76618 15.3849 1.7976C15.309 1.82903 15.2401 1.87509 15.182 1.93316L9.557 7.55816C9.49889 7.61621 9.45279 7.68514 9.42134 7.76101C9.38989 7.83688 9.3737 7.91821 9.3737 8.00035C9.3737 8.08248 9.38989 8.16381 9.42134 8.23969C9.45279 8.31556 9.49889 8.38449 9.557 8.44254ZM10.5929 16.0941C9.77242 16.9146 8.65957 17.3756 7.49919 17.3756C6.33881 17.3756 5.22595 16.9146 4.40544 16.0941C3.58492 15.2736 3.12396 14.1607 3.12396 13.0003C3.12396 11.84 3.58492 10.7271 4.40544 9.9066L10.8703 3.44253C10.9284 3.38447 10.9744 3.31553 11.0058 3.23966C11.0373 3.16379 11.0534 3.08247 11.0534 3.00035C11.0534 2.91823 11.0373 2.83691 11.0058 2.76104C10.9744 2.68517 10.9284 2.61623 10.8703 2.55816C10.8122 2.50009 10.7433 2.45403 10.6674 2.4226C10.5915 2.39118 10.5102 2.375 10.4281 2.375C10.346 2.375 10.2647 2.39118 10.1888 2.4226C10.1129 2.45403 10.044 2.50009 9.98591 2.55816L3.52184 9.023C2.99253 9.54377 2.57156 10.1642 2.28322 10.8484C1.99488 11.5327 1.84487 12.2673 1.84184 13.0098C1.83882 13.7524 1.98284 14.4882 2.2656 15.1747C2.54836 15.8613 2.96427 16.4852 3.48932 17.0102C4.01438 17.5353 4.63819 17.9512 5.32479 18.2339C6.01138 18.5167 6.74717 18.6607 7.4897 18.6577C8.23223 18.6547 8.96682 18.5047 9.65109 18.2163C10.3354 17.928 10.9558 17.507 11.4765 16.9777C11.5888 16.8595 11.6505 16.7022 11.6484 16.5392C11.6463 16.3762 11.5806 16.2205 11.4654 16.1053C11.3501 15.99 11.1944 15.9243 11.0314 15.9223C10.8684 15.9202 10.7111 15.9818 10.5929 16.0941Z" fill="url(#paint0_linear_2850_16638)"/>
<path d="M10.6242 13.0003C10.6242 13.6184 10.4409 14.2226 10.0975 14.7365C9.75415 15.2504 9.26609 15.6509 8.69507 15.8875C8.12405 16.124 7.49572 16.1859 6.88953 16.0653C6.28334 15.9447 5.72652 15.6471 5.28948 15.2101C4.85244 14.773 4.55481 14.2162 4.43423 13.61C4.31366 13.0038 4.37554 12.3755 4.61206 11.8045C4.84859 11.2334 5.24913 10.7454 5.76303 10.402C6.27693 10.0586 6.88112 9.87535 7.49919 9.87535C8.32799 9.87535 9.12285 10.2046 9.7089 10.7906C10.2949 11.3767 10.6242 12.1715 10.6242 13.0003ZM16.432 10.0582L12.682 13.8082C12.5647 13.9254 12.4988 14.0845 12.4988 14.2503C12.4988 14.4162 12.5647 14.5753 12.682 14.6925C12.7993 14.8098 12.9583 14.8757 13.1242 14.8757C13.29 14.8757 13.4491 14.8098 13.5664 14.6925L17.3164 10.9425C17.3744 10.8845 17.4205 10.8155 17.4519 10.7397C17.4834 10.6638 17.4995 10.5825 17.4995 10.5003C17.4995 10.4182 17.4834 10.3369 17.4519 10.261C17.4205 10.1852 17.3744 10.1162 17.3164 10.0582C17.2583 10.0001 17.1894 9.95403 17.1135 9.9226C17.0376 9.89118 16.9563 9.875 16.8742 9.875C16.7921 9.875 16.7107 9.89118 16.6349 9.9226C16.559 9.95403 16.4901 10.0001 16.432 10.0582ZM14.8164 9.06754C14.8744 9.00947 14.9205 8.94053 14.9519 8.86466C14.9834 8.78879 14.9995 8.70747 14.9995 8.62535C14.9995 8.54323 14.9834 8.46191 14.9519 8.38604C14.9205 8.31017 14.8744 8.24123 14.8164 8.18316C14.7583 8.12509 14.6894 8.07903 14.6135 8.0476C14.5376 8.01617 14.4563 8 14.3742 8C14.2921 8 14.2107 8.01617 14.1349 8.0476C14.059 8.07903 13.9901 8.12509 13.932 8.18316L12.057 10.0582C11.9397 10.1754 11.8738 10.3345 11.8738 10.5003C11.8738 10.6662 11.9397 10.8253 12.057 10.9425C12.1743 11.0598 12.3333 11.1257 12.4992 11.1257C12.665 11.1257 12.8241 11.0598 12.9414 10.9425L14.8164 9.06754ZM17.9414 5.05816C17.8833 5.00005 17.8144 4.95395 17.7385 4.9225C17.6627 4.89105 17.5813 4.87486 17.4992 4.87486C17.4171 4.87486 17.3357 4.89105 17.2599 4.9225C17.184 4.95395 17.115 5.00005 17.057 5.05816L15.807 6.30816C15.6897 6.42544 15.6238 6.5845 15.6238 6.75035C15.6238 6.9162 15.6897 7.07526 15.807 7.19254C15.9243 7.30981 16.0833 7.37569 16.2492 7.3757C16.415 7.3757 16.5741 7.30981 16.6914 7.19254L17.9414 5.94254C17.9995 5.88449 18.0456 5.81556 18.077 5.73969C18.1085 5.66381 18.1247 5.58248 18.1247 5.50035C18.1247 5.41821 18.1085 5.33688 18.077 5.26101C18.0456 5.18514 17.9995 5.11621 17.9414 5.05816ZM9.557 8.44254C9.61505 8.50065 9.68398 8.54674 9.75985 8.5782C9.83572 8.60965 9.91705 8.62584 9.99919 8.62584C10.0813 8.62584 10.1627 8.60965 10.2385 8.5782C10.3144 8.54674 10.3833 8.50065 10.4414 8.44254L16.0664 2.81754C16.1244 2.75947 16.1705 2.69053 16.2019 2.61466C16.2334 2.53879 16.2495 2.45747 16.2495 2.37535C16.2495 2.29323 16.2334 2.21191 16.2019 2.13604C16.1705 2.06017 16.1244 1.99123 16.0664 1.93316C16.0083 1.87509 15.9394 1.82903 15.8635 1.7976C15.7876 1.76618 15.7063 1.75 15.6242 1.75C15.5421 1.75 15.4607 1.76618 15.3849 1.7976C15.309 1.82903 15.2401 1.87509 15.182 1.93316L9.557 7.55816C9.49889 7.61621 9.45279 7.68514 9.42134 7.76101C9.38989 7.83688 9.3737 7.91821 9.3737 8.00035C9.3737 8.08248 9.38989 8.16381 9.42134 8.23969C9.45279 8.31556 9.49889 8.38449 9.557 8.44254ZM10.5929 16.0941C9.77242 16.9146 8.65957 17.3756 7.49919 17.3756C6.33881 17.3756 5.22595 16.9146 4.40544 16.0941C3.58492 15.2736 3.12396 14.1607 3.12396 13.0003C3.12396 11.84 3.58492 10.7271 4.40544 9.9066L10.8703 3.44253C10.9284 3.38447 10.9744 3.31553 11.0058 3.23966C11.0373 3.16379 11.0534 3.08247 11.0534 3.00035C11.0534 2.91823 11.0373 2.83691 11.0058 2.76104C10.9744 2.68517 10.9284 2.61623 10.8703 2.55816C10.8122 2.50009 10.7433 2.45403 10.6674 2.4226C10.5915 2.39118 10.5102 2.375 10.4281 2.375C10.346 2.375 10.2647 2.39118 10.1888 2.4226C10.1129 2.45403 10.044 2.50009 9.98591 2.55816L3.52184 9.023C2.99253 9.54377 2.57156 10.1642 2.28322 10.8484C1.99488 11.5327 1.84487 12.2673 1.84184 13.0098C1.83882 13.7524 1.98284 14.4882 2.2656 15.1747C2.54836 15.8613 2.96427 16.4852 3.48932 17.0102C4.01438 17.5353 4.63819 17.9512 5.32479 18.2339C6.01138 18.5167 6.74717 18.6607 7.4897 18.6577C8.23223 18.6547 8.96682 18.5047 9.65109 18.2163C10.3354 17.928 10.9558 17.507 11.4765 16.9777C11.5888 16.8595 11.6505 16.7022 11.6484 16.5392C11.6463 16.3762 11.5806 16.2205 11.4654 16.1053C11.3501 15.99 11.1944 15.9243 11.0314 15.9223C10.8684 15.9202 10.7111 15.9818 10.5929 16.0941Z" stroke="url(#paint1_linear_2850_16638)" stroke-width="0.3"/>
</g>
</g>
<defs>
<linearGradient id="paint0_linear_2850_16638" x1="1.95109" y1="1.75" x2="21.5698" y2="11.5208" gradientUnits="userSpaceOnUse">
<stop stop-color="#0CF1CA"/>
<stop offset="0.264423" stop-color="#0BD2B0"/>
<stop offset="0.307692" stop-color="#0CF1CA"/>
<stop offset="0.427885" stop-color="#0CF1CA"/>
<stop offset="0.466346" stop-color="#0FAF94"/>
<stop offset="0.591346" stop-color="#0CA288"/>
<stop offset="1" stop-color="#086253"/>
</linearGradient>
<linearGradient id="paint1_linear_2850_16638" x1="1.8418" y1="2.25694" x2="21.3121" y2="11.25" gradientUnits="userSpaceOnUse">
<stop stop-color="white"/>
<stop offset="1" stop-color="white" stop-opacity="0"/>
</linearGradient>
</defs>
</svg>

After

Width:  |  Height:  |  Size: 14 KiB

View File

@@ -0,0 +1,11 @@
.debrid-badge {
display: flex;
align-items: center;
gap: 8px;
border-radius: 4px;
border: 1px solid rgba(12, 241, 202, 0.3);
background: rgba(12, 241, 202, 0.05);
color: #0cf1ca;
padding: 4px 8px;
font-size: 12px;
}

View File

@@ -0,0 +1,18 @@
import Meteor from "@renderer/assets/meteor.svg?react";
import "./debrid-badge.scss";
import { useTranslation } from "react-i18next";
export interface DebridBadgeProps {
collapsed?: boolean;
}
export function DebridBadge({ collapsed }: Readonly<DebridBadgeProps>) {
const { t } = useTranslation("hydra_cloud");
return (
<div className="debrid-badge">
<Meteor />
{!collapsed && t("debrid_description")}
</div>
);
}

View File

@@ -14,3 +14,4 @@ export * from "./toast/toast";
export * from "./badge/badge";
export * from "./confirmation-modal/confirmation-modal";
export * from "./suspense-wrapper/suspense-wrapper";
export * from "./debrid-badge/debrid-badge";

View File

@@ -11,6 +11,7 @@ export const DOWNLOADER_NAME = {
[Downloader.Datanodes]: "Datanodes",
[Downloader.Mediafire]: "Mediafire",
[Downloader.TorBox]: "TorBox",
[Downloader.Hydra]: "Nimbus",
};
export const MAX_MINUTES_TO_SHOW_IN_PLAYTIME = 120;

View File

@@ -59,6 +59,9 @@ declare global {
cb: (value: SeedingStatus[]) => void
) => () => Electron.IpcRenderer;
onHardDelete: (cb: () => void) => () => Electron.IpcRenderer;
checkDebridAvailability: (
magnets: string[]
) => Promise<Record<string, boolean>>;
/* Catalogue */
searchGames: (

View File

@@ -15,12 +15,14 @@ import type {
StartGameDownloadPayload,
} from "@types";
import { useDate } from "./use-date";
import { formatBytes } from "@shared";
import { formatBytes, formatBytesToMbps } from "@shared";
export function useDownload() {
const { updateLibrary } = useLibrary();
const { formatDistance } = useDate();
const userPrefs = useAppSelector((state) => state.userPreferences.value);
const { lastPacket, gamesWithDeletionInProgress } = useAppSelector(
(state) => state.download
);
@@ -99,8 +101,14 @@ export function useDownload() {
return gamesWithDeletionInProgress.includes(objectId);
};
const formatDownloadSpeed = (downloadSpeed: number): string => {
return userPrefs?.showDownloadSpeedInMegabytes
? `${formatBytes(downloadSpeed)}/s`
: formatBytesToMbps(downloadSpeed);
};
return {
downloadSpeed: `${formatBytes(lastPacket?.downloadSpeed ?? 0)}/s`,
downloadSpeed: formatDownloadSpeed(lastPacket?.downloadSpeed ?? 0),
progress: formatDownloadProgress(lastPacket?.progress ?? 0),
lastPacket,
eta: calculateETA(),

View File

@@ -1,8 +1,9 @@
import { useEffect, useState } from "react";
import { useEffect, useState, useCallback } from "react";
enum Feature {
CheckDownloadWritePermission = "CHECK_DOWNLOAD_WRITE_PERMISSION",
Torbox = "TORBOX",
Nimbus = "NIMBUS",
}
export function useFeature() {
@@ -15,14 +16,17 @@ export function useFeature() {
});
}, []);
const isFeatureEnabled = (feature: Feature) => {
if (!features) {
const features = JSON.parse(localStorage.getItem("features") ?? "[]");
return features.includes(feature);
}
const isFeatureEnabled = useCallback(
(feature: Feature) => {
if (!features) {
const features = JSON.parse(localStorage.getItem("features") ?? "[]");
return features.includes(feature);
}
return features.includes(feature);
};
return features.includes(feature);
},
[features]
);
return {
isFeatureEnabled,

View File

@@ -374,6 +374,21 @@ export function DownloadGroup({
</DropdownMenu>
)}
</div>
{game.download?.downloader === Downloader.Hydra && (
<div
style={{
background:
"linear-gradient(90deg, #01483C 0%, #0CF1CA 50%, #01483C 100%)",
boxShadow: "0px 0px 8px 0px rgba(12, 241, 202, 0.15)",
width: "100%",
position: "absolute",
bottom: 0,
height: 2,
zIndex: 1,
}}
/>
)}
</li>
);
})}

View File

@@ -83,6 +83,10 @@ export function DownloadSettingsModal({
const getDefaultDownloader = useCallback(
(availableDownloaders: Downloader[]) => {
if (availableDownloaders.includes(Downloader.Hydra)) {
return Downloader.Hydra;
}
if (availableDownloaders.includes(Downloader.TorBox)) {
return Downloader.TorBox;
}
@@ -110,11 +114,15 @@ export function DownloadSettingsModal({
return userPreferences?.realDebridApiToken;
if (downloader === Downloader.TorBox)
return userPreferences?.torBoxApiToken;
if (downloader === Downloader.Hydra)
return isFeatureEnabled(Feature.Nimbus);
return true;
});
setSelectedDownloader(getDefaultDownloader(filteredDownloaders));
}, [
Feature,
isFeatureEnabled,
getDefaultDownloader,
userPreferences?.downloadsPath,
downloaders,
@@ -181,7 +189,9 @@ export function DownloadSettingsModal({
(downloader === Downloader.RealDebrid &&
!userPreferences?.realDebridApiToken) ||
(downloader === Downloader.TorBox &&
!userPreferences?.torBoxApiToken);
!userPreferences?.torBoxApiToken) ||
(downloader === Downloader.Hydra &&
!isFeatureEnabled(Feature.Nimbus));
return (
<Button

View File

@@ -1,7 +1,13 @@
import { useContext, useEffect, useMemo, useState } from "react";
import { useTranslation } from "react-i18next";
import { Badge, Button, Modal, TextField } from "@renderer/components";
import {
Badge,
Button,
DebridBadge,
Modal,
TextField,
} from "@renderer/components";
import type { GameRepack } from "@types";
import { DownloadSettingsModal } from "./download-settings-modal";
@@ -31,16 +37,52 @@ export function RepacksModal({
const [repack, setRepack] = useState<GameRepack | null>(null);
const [showSelectFolderModal, setShowSelectFolderModal] = useState(false);
const [hashesInDebrid, setHashesInDebrid] = useState<Record<string, boolean>>(
{}
);
const { repacks, game } = useContext(gameDetailsContext);
const { t } = useTranslation("game_details");
const { formatDate } = useDate();
const sortedRepacks = useMemo(() => {
return orderBy(repacks, (repack) => repack.uploadDate, "desc");
const getHashFromMagnet = (magnet: string) => {
if (!magnet || typeof magnet !== "string") {
return null;
}
const hashRegex = /xt=urn:btih:([a-zA-Z0-9]+)/i;
const match = magnet.match(hashRegex);
return match ? match[1].toLowerCase() : null;
};
useEffect(() => {
const magnets = repacks.flatMap((repack) =>
repack.uris.filter((uri) => uri.startsWith("magnet:"))
);
window.electron.checkDebridAvailability(magnets).then((availableHashes) => {
setHashesInDebrid(availableHashes);
});
}, [repacks]);
const sortedRepacks = useMemo(() => {
return orderBy(
repacks,
[
(repack) => {
const magnet = repack.uris.find((uri) => uri.startsWith("magnet:"));
const hash = magnet ? getHashFromMagnet(magnet) : null;
return hash ? (hashesInDebrid[hash] ?? false) : false;
},
(repack) => repack.uploadDate,
],
["desc", "desc"]
);
}, [repacks, hashesInDebrid]);
useEffect(() => {
setFilteredRepacks(sortedRepacks);
}, [sortedRepacks, visible, game]);
@@ -110,6 +152,10 @@ export function RepacksModal({
{repack.fileSize} - {repack.repacker} -{" "}
{repack.uploadDate ? formatDate(repack.uploadDate) : ""}
</p>
{hashesInDebrid[getHashFromMagnet(repack.uris[0]) ?? ""] && (
<DebridBadge />
)}
</Button>
);
})}

View File

@@ -23,6 +23,7 @@ export function SettingsBehavior() {
enableAutoInstall: false,
seedAfterDownloadComplete: false,
showHiddenAchievementsDescription: false,
showDownloadSpeedInMegabytes: false,
});
const { t } = useTranslation("settings");
@@ -40,6 +41,8 @@ export function SettingsBehavior() {
userPreferences.seedAfterDownloadComplete ?? false,
showHiddenAchievementsDescription:
userPreferences.showHiddenAchievementsDescription ?? false,
showDownloadSpeedInMegabytes:
userPreferences.showDownloadSpeedInMegabytes ?? false,
});
}
}, [userPreferences]);
@@ -139,6 +142,16 @@ export function SettingsBehavior() {
})
}
/>
<CheckboxField
label={t("show_download_speed_in_megabytes")}
checked={form.showDownloadSpeedInMegabytes}
onChange={() =>
handleChange({
showDownloadSpeedInMegabytes: !form.showDownloadSpeedInMegabytes,
})
}
/>
</>
);
}

View File

@@ -7,6 +7,7 @@ export enum Downloader {
Datanodes,
Mediafire,
TorBox,
Hydra,
}
export enum DownloadSourceStatus {
@@ -56,6 +57,7 @@ export enum DownloadError {
NotCachedInTorbox = "download_error_not_cached_in_torbox",
GofileQuotaExceeded = "download_error_gofile_quota_exceeded",
RealDebridAccountNotAuthorized = "download_error_real_debrid_account_not_authorized",
NotCachedInHydra = "download_error_not_cached_in_hydra",
}
export const FILE_EXTENSIONS_TO_EXTRACT = [".rar", ".zip", ".7z"];

View File

@@ -49,6 +49,12 @@ export const formatBytes = (bytes: number): string => {
return `${Math.trunc(formatedByte * 10) / 10} ${FORMAT[base]}`;
};
export const formatBytesToMbps = (bytesPerSecond: number): string => {
const bitsPerSecond = bytesPerSecond * 8;
const mbps = bitsPerSecond / (1024 * 1024);
return `${Math.trunc(mbps * 10) / 10} Mbps`;
};
export const pipe =
<T>(...fns: ((arg: T) => any)[]) =>
(arg: T) =>
@@ -111,7 +117,12 @@ export const getDownloadersForUri = (uri: string) => {
return [Downloader.RealDebrid];
if (uri.startsWith("magnet:")) {
return [Downloader.Torrent, Downloader.TorBox, Downloader.RealDebrid];
return [
Downloader.Torrent,
Downloader.Hydra,
Downloader.TorBox,
Downloader.RealDebrid,
];
}
return [];

View File

@@ -81,10 +81,12 @@ export interface UserPreferences {
enableAutoInstall?: boolean;
seedAfterDownloadComplete?: boolean;
showHiddenAchievementsDescription?: boolean;
showDownloadSpeedInMegabits?: boolean;
downloadNotificationsEnabled?: boolean;
repackUpdatesNotificationsEnabled?: boolean;
achievementNotificationsEnabled?: boolean;
friendRequestNotificationsEnabled?: boolean;
showDownloadSpeedInMegabytes?: boolean;
}
export interface ScreenState {